diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/30fcd23745efe32ce681__mypyc.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/30fcd23745efe32ce681__mypyc.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..a898493b9 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/30fcd23745efe32ce681__mypyc.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Authlib-0.15.6.dist-info/INSTALLER b/.direnv/python-3.8.20/lib/python3.8/site-packages/Authlib-0.15.6.dist-info/INSTALLER new file mode 100644 index 000000000..ce3133dc1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Authlib-0.15.6.dist-info/INSTALLER @@ -0,0 +1 @@ +Poetry 2.2.1 \ No newline at end of file diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Authlib-0.15.6.dist-info/LICENSE b/.direnv/python-3.8.20/lib/python3.8/site-packages/Authlib-0.15.6.dist-info/LICENSE new file mode 100644 index 000000000..424419946 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Authlib-0.15.6.dist-info/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2017, Hsiaoming Yang +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Authlib-0.15.6.dist-info/METADATA b/.direnv/python-3.8.20/lib/python3.8/site-packages/Authlib-0.15.6.dist-info/METADATA new file mode 100644 index 000000000..53052b367 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Authlib-0.15.6.dist-info/METADATA @@ -0,0 +1,109 @@ +Metadata-Version: 2.1 +Name: Authlib +Version: 0.15.6 +Summary: The ultimate Python library in building OAuth and OpenID Connect servers. +Home-page: https://authlib.org/ +Author: Hsiaoming Yang +Author-email: me@lepture.com +License: BSD-3-Clause +Project-URL: Documentation, https://docs.authlib.org/ +Project-URL: Commercial License, https://authlib.org/plans +Project-URL: Bug Tracker, https://github.com/lepture/authlib/issues +Project-URL: Source Code, https://github.com/lepture/authlib +Project-URL: Blog, https://blog.authlib.org/ +Project-URL: Donate, https://lepture.com/donate +Platform: any +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Console +Classifier: Environment :: Web Environment +Classifier: Framework :: Flask +Classifier: Framework :: Django +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Dist: cryptography +Provides-Extra: client +Requires-Dist: requests ; extra == 'client' + +Authlib +======= + +The ultimate Python library in building OAuth and OpenID Connect servers. +JWS, JWK, JWA, JWT are included. + +Useful Links +------------ + +1. Homepage: https://authlib.org/ +2. Documentation: https://docs.authlib.org/ +3. Purchase Commercial License: https://authlib.org/plans +4. Blog: https://blog.authlib.org/ +5. More Repositories: https://github.com/authlib +6. Twitter: https://twitter.com/authlib +7. Donate: https://www.patreon.com/lepture + +Specifications +-------------- + +- RFC5849: The OAuth 1.0 Protocol +- RFC6749: The OAuth 2.0 Authorization Framework +- RFC6750: The OAuth 2.0 Authorization Framework: Bearer Token Usage +- RFC7009: OAuth 2.0 Token Revocation +- RFC7515: JSON Web Signature +- RFC7516: JSON Web Encryption +- RFC7517: JSON Web Key +- RFC7518: JSON Web Algorithms +- RFC7519: JSON Web Token +- RFC7521: Assertion Framework for OAuth 2.0 Client Authentication and Authorization Grants +- RFC7523: JSON Web Token (JWT) Profile for OAuth 2.0 Client Authentication and Authorization Grants +- RFC7591: OAuth 2.0 Dynamic Client Registration Protocol +- RFC7636: Proof Key for Code Exchange by OAuth Public Clients +- RFC7638: JSON Web Key (JWK) Thumbprint +- RFC7662: OAuth 2.0 Token Introspection +- RFC8037: CFRG Elliptic Curve Diffie-Hellman (ECDH) and Signatures in JSON Object Signing and Encryption (JOSE) +- RFC8414: OAuth 2.0 Authorization Server Metadata +- RFC8628: OAuth 2.0 Device Authorization Grant +- OpenID Connect 1.0 +- OpenID Connect Discovery 1.0 + +Implementations +--------------- + +- Requests OAuth 1 Session +- Requests OAuth 2 Session +- Requests Assertion Session +- HTTPX OAuth 1 Session +- HTTPX OAuth 2 Session +- HTTPX Assertion Session +- Flask OAuth 1/2 Client +- Django OAuth 1/2 Client +- Starlette OAuth 1/2 Client +- Flask OAuth 1.0 Server +- Flask OAuth 2.0 Server +- Flask OpenID Connect 1.0 +- Django OAuth 1.0 Server +- Django OAuth 2.0 Server +- Django OpenID Connect 1.0 + +License +------- + +Authlib is licensed under BSD. Please see LICENSE for licensing details. + +If this license does not fit your company, consider to purchase a commercial +license. Find more information on `Authlib Plans`_. + +.. _`Authlib Plans`: https://authlib.org/plans + + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Authlib-0.15.6.dist-info/RECORD b/.direnv/python-3.8.20/lib/python3.8/site-packages/Authlib-0.15.6.dist-info/RECORD new file mode 100644 index 000000000..f11d36ec0 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Authlib-0.15.6.dist-info/RECORD @@ -0,0 +1,183 @@ +authlib/__init__.py,sha256=CoObQJQX-YGSJy-HWbJPtK6XbpfKDBc21DJwjhLnIcM,476 +authlib/consts.py,sha256=3YPs-THHdx4SpUFMrEgVfqHgq6kJVbEpdK3a1SJ5i0M,313 +authlib/deprecate.py,sha256=GkYSPN_Tsx6FvPf1I8lWg8MtFjq2la66UlyuRAJHZX0,499 +authlib/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +authlib/common/encoding.py,sha256=YrDqWLo_60BLEzl8VEurkkwf2EBXqh_9NLWnlt0VsLs,2108 +authlib/common/errors.py,sha256=31LVEXiqySK8ydbtBip64qxtnJk5V24eN8GPlO_9dP8,2210 +authlib/common/security.py,sha256=2xcxtJWVE26kosNJTWtnN3skeSzm3Jjtpm4wxoTCBYs,493 +authlib/common/urls.py,sha256=dfUNEI8Y6kR8mT8zZ2UWryc942-Z-vhDIDupX8u3TPQ,4973 +authlib/integrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +authlib/integrations/django_helpers.py,sha256=iGLEVnPmAEE_eJxJVtOltVE3NGL3klDRj1nKn9tSFck,1876 +authlib/integrations/flask_helpers.py,sha256=3SV9MbI2B0dw-0TkNYwzH7rOrGjqYc2-goGlC9DVVCM,749 +authlib/integrations/base_client/__init__.py,sha256=xHA18_6VJt2TxV3F70orbo6jQSTOEEgSNbDTPd7RdzY,586 +authlib/integrations/base_client/async_app.py,sha256=ogzUUp38e1MrQ4yXhLMy564lhqhDLqhxRpfVBfNjge8,7944 +authlib/integrations/base_client/base_app.py,sha256=ja7UgyWQMgG6cPXXeU4KYFY32svrpsjkgx3D_1i-cbg,9551 +authlib/integrations/base_client/base_oauth.py,sha256=cZxSJ24Hqg7pYTxKgeS9ZqjFCusdtKGfkKFlKvEo1U0,3948 +authlib/integrations/base_client/errors.py,sha256=fwXW7ldF-TeCIHeANGWYqv5hhaFpXzLLsGRUBwgcy4c,632 +authlib/integrations/base_client/framework_integration.py,sha256=TibSZ2cC0vFRZhwTWTqh57Ecyzgzak3JZ6I2gx25ZWs,782 +authlib/integrations/base_client/remote_app.py,sha256=zdYWbm7yT0FcSWMS_mzGmmoIUzqeL06JWrgmc_4qib8,7941 +authlib/integrations/django_client/__init__.py,sha256=opYrgbuD8-hl5tzFp9Ehrb1UN7BzIpuc8ARxrwRdycU,360 +authlib/integrations/django_client/integration.py,sha256=nNXuELMzkVFarM5eJMnqotEozL2xgJNrkBuqXXS4g0o,2659 +authlib/integrations/django_oauth1/__init__.py,sha256=yp66WLC43YdsICGgVDbu6AfIRyPR17M43umIUcxjH10,221 +authlib/integrations/django_oauth1/authorization_server.py,sha256=UAfP61s-OW0MkVVdq2ld-jjksdPVL5ui_j10sLq3N14,4446 +authlib/integrations/django_oauth1/nonce.py,sha256=auueaxe_Hn9gftyqcjs7jzMxOJVw-xfe3hdG9NyEB6Y,420 +authlib/integrations/django_oauth1/resource_protector.py,sha256=o7VhpkCHJ7z90gdDYdQ--YOkQWhXBeOAV2y4HrYUDUs,2435 +authlib/integrations/django_oauth2/__init__.py,sha256=HGqxRud5D9EGZIthXmySYhB7d-90qzZlXZDrd4mPHnQ,278 +authlib/integrations/django_oauth2/authorization_server.py,sha256=TAA4D-xX6GeI39-Q_wkVonBphhYKAaVxi0Ju8jbkOs8,5457 +authlib/integrations/django_oauth2/endpoints.py,sha256=ljbhHpMlf5PnbxSXUWYE5gNyqXYypd_WO7jyZAaS200,1964 +authlib/integrations/django_oauth2/resource_protector.py,sha256=QdCxym3kPakT-QPmKDVD7TTxyMW_OqTSkk20EgVqNbk,2722 +authlib/integrations/django_oauth2/signals.py,sha256=8SlnOsi1IuBPmrCi7dOLXK70N_m9y6B3msjMCtBMnSk,236 +authlib/integrations/flask_client/__init__.py,sha256=QVjHRttNnYBJpDALAnJ-pZmF2THks2hvAvh61_Awz3w,282 +authlib/integrations/flask_client/integration.py,sha256=HBoSF7ugPoulPyla4JagLqqJcDCSA1kC72-mDsYJmDQ,1975 +authlib/integrations/flask_client/oauth_registry.py,sha256=-H2jqDgQsEW56U5YEN8CsALVpkdFNV5HPWs5ZZMhPXA,3743 +authlib/integrations/flask_client/remote_app.py,sha256=4zoL16AbBnTVRG_mq6NtSIHylqOkuMrsk0u4ho53jfU,2949 +authlib/integrations/flask_oauth1/__init__.py,sha256=PGDVdNJ9oGs5bYJr7oGdpQX0tE2nQJNcHJ8t-SxEAEY,260 +authlib/integrations/flask_oauth1/authorization_server.py,sha256=2FB54nMUXBGIlOLHcB_zEc5tLPvUc8p0QoQi0iBHwYY,6320 +authlib/integrations/flask_oauth1/cache.py,sha256=MtmYy-Zr_OgJoEngS2vOiuYTketegsFZDzlE6rOw1b4,3020 +authlib/integrations/flask_oauth1/resource_protector.py,sha256=95Z7Dx-fwTkdslIgOs9Sok51AjVYh8T3yhCKv2QftHU,4031 +authlib/integrations/flask_oauth2/__init__.py,sha256=8xa6R7Otk9DSHzKFcyGeDv-H7OLUbFwtF73ybpe9jNY,243 +authlib/integrations/flask_oauth2/authorization_server.py,sha256=1ehIZmVtJJdwdbA8vW55PB32C7v_JiuryDnyDklWeV4,8011 +authlib/integrations/flask_oauth2/errors.py,sha256=mqpfJ_iTi0yJZCT_sl6sSDScQ4BRoVFED53Px6m3ZYA,1094 +authlib/integrations/flask_oauth2/resource_protector.py,sha256=fmyL4gml_dvnX6NVDRAiATGatx_RI08VkzRPXmUM5_4,4079 +authlib/integrations/flask_oauth2/signals.py,sha256=CKao8F778CkUzl7mKjF96smr7WKJ0nfxBT0onVFq10Y,341 +authlib/integrations/httpx_client/__init__.py,sha256=zuO_FIAdLEJ9Ch25kTa-Nsyi2gqkPuAsXdHAeYc0nBI,804 +authlib/integrations/httpx_client/assertion_client.py,sha256=9iZXZXmAtIzEZvRh_elH144qcNX-bNCcDANRo0lezXc,3394 +authlib/integrations/httpx_client/oauth1_client.py,sha256=4HEX5Pr3dYmCl8KyRn-X5sHLPOyG92FzATyysK7Es54,4045 +authlib/integrations/httpx_client/oauth2_client.py,sha256=zHvPkxiRFWOZ-JSivxJeEJDdQXPnrHs8LOaOebHcOQA,8395 +authlib/integrations/httpx_client/utils.py,sha256=R1ZUGKhTeMSdB08Sz7p7D6CxmxDTmUJqMaWv44lW-1g,404 +authlib/integrations/requests_client/__init__.py,sha256=Nco-Q1_wOswQ9qCgBgh0rVWPYO9ddEKnlnnJ9rGZE_U,652 +authlib/integrations/requests_client/assertion_session.py,sha256=TuyzkN59OvTCJFB8TW2giSPlJ0VdNjJoefX4xVbUGxg,1827 +authlib/integrations/requests_client/oauth1_session.py,sha256=LXMIaWciRUjQd2ma4CJbF3dr60orItQOZ6n4e4IiZo4,2142 +authlib/integrations/requests_client/oauth2_session.py,sha256=7bZbK3Eg40GH7uVLCAaIlr8eTcd15NzRPuS7vinNluQ,4782 +authlib/integrations/sqla_oauth1/__init__.py,sha256=6TIiiz_YtxpULt_Gf8qNgmYCreAbMxXHCK4VHUqIArM,416 +authlib/integrations/sqla_oauth1/functions.py,sha256=DLi0HbRx2IGUFO2kUH1QZ1WlHgkhAkSdTp1MUTTqDWQ,5254 +authlib/integrations/sqla_oauth1/mixins.py,sha256=SxSM12UTEXW3pV01gCWhjwAMyOas7cmsj7NbFCvgM34,2795 +authlib/integrations/sqla_oauth2/__init__.py,sha256=d8g3ipiiPtyk1JnmTDzeAKCdm-ozjNoERDWLEkpBgL8,548 +authlib/integrations/sqla_oauth2/client_mixin.py,sha256=uem_suLalYLeCu-szePyT8WsZAjewHXeQKrtzh1f_U0,3986 +authlib/integrations/sqla_oauth2/functions.py,sha256=7RejfsKhlmioD4qHb4v-8a-iOlYxV9vcwEatKHGlvGg,3210 +authlib/integrations/sqla_oauth2/tokens_mixins.py,sha256=VXdtQ69jhxGtVMWxSkWiISU7avd--yvGv9JjDkLEc6E,1678 +authlib/integrations/starlette_client/__init__.py,sha256=vFPq0GQEzIWPjeLrNyzNnfUXVrCLOZvyVJpR2TO6A3c,557 +authlib/integrations/starlette_client/integration.py,sha256=MY1nhdozsGZKD-j7-v5_2igpbeGCy0wQ1vqkU0BS92w,2608 +authlib/jose/__init__.py,sha256=-FGxiCk9Q_ZcLIn2BBN_VLrncRqOy9FCoLyyfWCabWQ,1853 +authlib/jose/errors.py,sha256=mVxr-NznmqEicn2xxnjeBDKD1RnnzoFWC9qa9NiEVAg,2262 +authlib/jose/jwk.py,sha256=Ge-UMTx7sk6ccl1BcA8CHxSfYtJiaUYx5hd_icb3ZHc,408 +authlib/jose/util.py,sha256=SDqaGQD64M6uQ0PcLHPIhxo5cpiUHKqB_TfxBB0paqI,699 +authlib/jose/drafts/__init__.py,sha256=kRzfvWoD5kRl2d8CEIYgH5fdW6xj32sNORuxWgd7X98,88 +authlib/jose/drafts/_jwe_enc_cryptography.py,sha256=631oL-4cTVygvocbgZkKVYqzq3rEd_xMH60k01ycv5Q,1806 +authlib/jose/rfc7515/__init__.py,sha256=0NhWGkry69LiJH6cAkwIUNmQUpuGh463TNwIpoQjtE4,360 +authlib/jose/rfc7515/jws.py,sha256=mA-qIdJPVuPWnpKSG-slT3DHgWfaNnvhfm5AfXClJwI,11524 +authlib/jose/rfc7515/models.py,sha256=JPUPg7pHW-_aztpfX517y6SWBZI5_AfrNxUxwUhrilM,2483 +authlib/jose/rfc7516/__init__.py,sha256=d3i3YFUn5AuIjOfBJfRuogFocP2C0i4w5vt8aOK1VD0,387 +authlib/jose/rfc7516/jwe.py,sha256=Ik4FSMSxPAGt1jkyTauv5eGW0Df0mMkhPQ8bWHxVNY0,7345 +authlib/jose/rfc7516/models.py,sha256=KaGK3YaQQ0ZxBfneR0tG2skIfZh0NcsQvLXSLWNnI24,2023 +authlib/jose/rfc7517/__init__.py,sha256=CzzNku-UcE9IUabtQIlvApTXlLPrslbPL3z02IW-8-U,343 +authlib/jose/rfc7517/_cryptography_key.py,sha256=1-EQ1YD7ZR7Gp7FBntrWEN-76Su_vlunuzC77VPJ_sg,1257 +authlib/jose/rfc7517/jwk.py,sha256=O7LNBPeQVZEQPMOnGJuU0knIoixuFy7rvyMjkbMPKKM,2024 +authlib/jose/rfc7517/models.py,sha256=z8eh-znjpsRKwVZLiyNU5eyFKm1_fXdR2aZ2QUqsT7E,4576 +authlib/jose/rfc7518/__init__.py,sha256=mO7V2nXWRIL_w-HEIXHl3E0yS5jfMksDGjt1jjKao8s,431 +authlib/jose/rfc7518/jwe_algorithms.py,sha256=FCQWLbHjLmU5hGhVkbsTOPmiypgwKvIu7PTDc-h0Xno,1556 +authlib/jose/rfc7518/jws_algorithms.py,sha256=8myW2tgFqI9UtD00zDWkCNcTcXUVHXQn6aPuK9W1mbU,2003 +authlib/jose/rfc7518/oct_key.py,sha256=NX-B4x7NJTfpzCzggskKG9ah_yBrINVs8TtgOo79FQY,1426 +authlib/jose/rfc7518/util.py,sha256=LpOgX10QHuqss6x015QPgApTlnYJ_cQyd0hzeYALnaE,265 +authlib/jose/rfc7518/_cryptography_backends/__init__.py,sha256=fONfDQ5SmC7DNgX4kY4oUXa2HcmWuaT9U2QecmkmVFQ,214 +authlib/jose/rfc7518/_cryptography_backends/_jwe_alg.py,sha256=gabtQSsWXgK9yAY2yRpFdWAut7Bo6H6X0l_I32akEXE,8917 +authlib/jose/rfc7518/_cryptography_backends/_jwe_enc.py,sha256=QRtPVbspgqniWOziDRCUNrdTLe1sewxS1AQTX21wCfo,5093 +authlib/jose/rfc7518/_cryptography_backends/_jws.py,sha256=MJwKMOkAfey7Lua2XTu4rtnjEhC1XQOVeXVQp-AT63Q,4908 +authlib/jose/rfc7518/_cryptography_backends/_keys.py,sha256=p9K-1O_3LDn-bKgVnRapjSM1EuWUsKF40w1vMLBjwNk,10237 +authlib/jose/rfc7519/__init__.py,sha256=CORkKBZ135xSAR-wQJob9P5EfdmyRkJ58MM6EOGvCAE,333 +authlib/jose/rfc7519/claims.py,sha256=QSIjbifJpUFCrW8eAZPNWYBZ4nzM4cVOu6TJZWznMok,8146 +authlib/jose/rfc7519/jwt.py,sha256=dCjgVXAa67o2KL5cqkR72dTgZOTh9-uqIiSSe2SDEvY,4846 +authlib/jose/rfc8037/__init__.py,sha256=FfxjfiZ2H0UviK6V9cu-be_-lYzfZ5RuV_1srzOosf4,127 +authlib/jose/rfc8037/_jws_cryptography.py,sha256=3wj6AjgE_YZcRSATyds6DVc4bqA3uy8X277ExJjvkfA,939 +authlib/jose/rfc8037/okp_key.py,sha256=s51o1WI8hBFUPF3OYOO0wi-e-TEgXNmVEv6T9Pj70vE,4187 +authlib/oauth1/__init__.py,sha256=YrqE1RUU0Dyqf5whN5vWiGLgpdhUSU_LKEswHbWEHJs,752 +authlib/oauth1/client.py,sha256=GQ7w2t9iHjWrHO6pO0hm-m0fti9mui9ToI0AdOmbmMI,7116 +authlib/oauth1/errors.py,sha256=pg0NaUgENjfTN_ba50_yQB9aSNe5Mte5MDlikFuypBY,46 +authlib/oauth1/rfc5849/__init__.py,sha256=S4rdtQiDd83QsR1hBqdsvuI-VgmgoG7rFuJH2fLP_K4,1036 +authlib/oauth1/rfc5849/authorization_server.py,sha256=slLBnoxt-OECk67DrGTG_Las-ady5pKsmtgfRtyDDFg,13880 +authlib/oauth1/rfc5849/base_server.py,sha256=0rewbcJmcQ9G-MIjOgv2nezDBlZefuK0Yf6mjs4Mppk,3857 +authlib/oauth1/rfc5849/client_auth.py,sha256=7aVbcU5j3MU5XAOKluI8fLDCIDcJHTxViv-z6CbulJE,6892 +authlib/oauth1/rfc5849/errors.py,sha256=cT5PASueBNDWmD3TTZYMwXS9Nr4y4khJ5kF58pmoKso,2597 +authlib/oauth1/rfc5849/models.py,sha256=9MQ1hHVOsoyKr0RAR5uMnhZPjEfzWHUo3lq5tb_x-tA,3435 +authlib/oauth1/rfc5849/parameters.py,sha256=XQMgNFrl9NkUSddnVEIO0mXproIySqFMyb7PY8chY5Q,3508 +authlib/oauth1/rfc5849/resource_protector.py,sha256=v4EsTBnqYAQMarwukpYmgiyT3J9khnp8Apj6n_RRabI,1258 +authlib/oauth1/rfc5849/rsa.py,sha256=z2cx8e-p2pdvzx_WQm9mcMzkGfsBJniWoy6SLn6XZa4,896 +authlib/oauth1/rfc5849/signature.py,sha256=G5-iy8Oa8paMnTRyxQ1u7a9xCKHDufkDMS1zuDz1hjs,14159 +authlib/oauth1/rfc5849/util.py,sha256=89kP-xwQHop8SaBjCEkppxzYO7GQFbmi3ekVyI-zFvE,136 +authlib/oauth1/rfc5849/wrapper.py,sha256=qXqW9_UHLkHQ7R6Mb2PgiUNtjh_fpdBQcFiN6gPbJrY,3931 +authlib/oauth2/__init__.py,sha256=QbT0eXEAwNUr9RbRGRHIw6kw-pUnFXbfcbZJZ5_ynmc,423 +authlib/oauth2/auth.py,sha256=zM9AX-4jOSoJrsr_OQsU5n8V1JJVcAQsfahfez2piZg,3481 +authlib/oauth2/base.py,sha256=0Iw_UabgRNpsmyzI4jpxEakAuyg7duRlGBJPFv0xg1E,1069 +authlib/oauth2/client.py,sha256=yt7dHA8xu55pA-NoxrdC7igfUHSWFiUa4AAhLfd22CU,16656 +authlib/oauth2/rfc6749/__init__.py,sha256=LFNVOywMSdTtssoQ1ZLpfzsvaBBHgk0I1dY4NlZtCHI,2129 +authlib/oauth2/rfc6749/authenticate_client.py,sha256=nGgHKc3qEZIhTzqX4R1IEu3oyO0cKa74o8twzXQpTwQ,4132 +authlib/oauth2/rfc6749/authorization_server.py,sha256=fL1cWhVq1Qm_4dJ88byb6a-cxwn8qzFeVK3-GcbRwxM,9284 +authlib/oauth2/rfc6749/errors.py,sha256=cMhJI0YP9_DK9sjv5wKMmDwG4mCqyoioaypIyjVi6QQ,6314 +authlib/oauth2/rfc6749/models.py,sha256=0EStfaA7dYa2xPWDDmjJn9p-EUz6EhUrHZHQWhTg-Y4,6813 +authlib/oauth2/rfc6749/parameters.py,sha256=YCDcu9BammKPLEM5cOoZ0fwF2pTl9lpYYMC97ZUwxQM,8270 +authlib/oauth2/rfc6749/resource_protector.py,sha256=6oH0DjNbFojoP9tPVE1vd8eENO-VCxRuVgTBvOeJ9hY,1223 +authlib/oauth2/rfc6749/token_endpoint.py,sha256=iVeh_uYIXLAsvfWLdyqm1cF5ZFCh37CaTJGz4U7asDs,1143 +authlib/oauth2/rfc6749/util.py,sha256=xzebTUJciyJ9qy1jrYqiXfNBq6GfqCkb9Ok0XMoPWFc,1122 +authlib/oauth2/rfc6749/wrappers.py,sha256=7vDgNMHAB6bopYhOBxspkCjJY4QyYEoSGwSxZGN0QhA,2663 +authlib/oauth2/rfc6749/grants/__init__.py,sha256=jJZOtFgyBztuIGQxcuK1qtvnR3hWRDU-G59RuyWmqKI,1314 +authlib/oauth2/rfc6749/grants/authorization_code.py,sha256=NbDz5Y7AiWi2V6gcbo-gGD0upZ0fhEkSe1hYfhOLo-U,15797 +authlib/oauth2/rfc6749/grants/base.py,sha256=85k98YZsYGJ0eQ3Tsbnp4nZYFIFduoA8jb-MtjgVVTA,5007 +authlib/oauth2/rfc6749/grants/client_credentials.py,sha256=pf3tfaiSVeuCudaqZGKpFU1OVBWkSOTB8SErEjJQWvY,3924 +authlib/oauth2/rfc6749/grants/implicit.py,sha256=qNBYKp8GG3sNLZSfC1VL6xRiloTXDl3c1qPXsGf3XF8,9297 +authlib/oauth2/rfc6749/grants/refresh_token.py,sha256=uAphAHXiNAbuwbv4v-aJWocgWQY0OG-Kl4o-UZ3HMDE,6503 +authlib/oauth2/rfc6749/grants/resource_owner_password_credentials.py,sha256=_wkF0JsdnqMLnPJ7wY_Y70P0nYmvsYCX3maM7LzAa8Q,5763 +authlib/oauth2/rfc6750/__init__.py,sha256=0DgFrRaX5hJWPIzt6ZUSNTyPOU3ROT4hG5IwuG5rCaU,609 +authlib/oauth2/rfc6750/errors.py,sha256=vLqIAdBBy0wQIT5c-kg9kREfKR8ZdVSUC9hUyjoDo3c,3238 +authlib/oauth2/rfc6750/parameters.py,sha256=WfNCvwARR8dIzIyhNnatnP_6hlogYoWVEbmwe60tZCM,1212 +authlib/oauth2/rfc6750/validator.py,sha256=Bh0xnI_yvcpZjddeQia0BqoD8avsQ59FBXcAG4yEmlk,3271 +authlib/oauth2/rfc6750/wrappers.py,sha256=vQ7Z5RN_repa8pCX399-rInrL9lkpysar-7kj7yATyc,3733 +authlib/oauth2/rfc7009/__init__.py,sha256=P0aYdtZ-B5Nj8mUkEqS-snMvJ8hLTDR5unZKAZ7KGLo,377 +authlib/oauth2/rfc7009/parameters.py,sha256=klTaHudte5Oncfw6M5Mr6LL9Y-cVnN29Th1ix-uVYAU,854 +authlib/oauth2/rfc7009/revocation.py,sha256=sU4_hrtADGvlMig4kBVRASxjspa4UkGxiWJHROUHqeQ,3715 +authlib/oauth2/rfc7521/__init__.py,sha256=sI-EfGOAZTLM-LxfBfYEXU_74cd3ib63g8fkGu39PJA,67 +authlib/oauth2/rfc7521/client.py,sha256=1PrWuGraLfzCVL0cpZLnvVFdfcHUT2hkE6-dbrraiCs,2481 +authlib/oauth2/rfc7523/__init__.py,sha256=AY1iuAHaOFfwYI3WwkUZCov2SMe_7jNAwWAvBh4gqZQ,769 +authlib/oauth2/rfc7523/assertion.py,sha256=kvlc5rj7N3A1Xl5Cl2cW2mPM0f3SsREN3ULYO044kQQ,2140 +authlib/oauth2/rfc7523/auth.py,sha256=3i5vt_ZV-DOjww4d55b3VHmMqhxGpCNTS_KxJbp2R4M,3869 +authlib/oauth2/rfc7523/client.py,sha256=UmVfZdBxqimCvx7oQwTErQzPdjKaqqs7FjTvPSMHkBY,4393 +authlib/oauth2/rfc7523/jwt_bearer.py,sha256=-6QbwuKsAhsQZLlHp8F7xzopPMIXZZsfytvjDAnH7bw,5417 +authlib/oauth2/rfc7591/__init__.py,sha256=Wb4PkmuGy8makvgceRVV1472cGL1udCS2bD9SGbqJyg,667 +authlib/oauth2/rfc7591/claims.py,sha256=4eMNWjY0Osh4IvdxAZE-bHXf0gqfk0VoERxtkSL07AQ,9809 +authlib/oauth2/rfc7591/endpoint.py,sha256=bJMaKIY-ZsIzGcPohMOKxGOaCkFRN5PjtJjF_xBBqGs,7509 +authlib/oauth2/rfc7591/errors.py,sha256=nx_1-wyT1kznHXh1R91uwa1gbRJ5YKUxWY1SiFVRTWg,1098 +authlib/oauth2/rfc7592/__init__.py,sha256=4du9AjzDNRXbmtkTWBPbpXz5ivugseKIuXtp3oAsk4w,315 +authlib/oauth2/rfc7592/endpoint.py,sha256=FldOO50vPPpXjic7mGXuQROR5C7QHj0ehCvty4_Czxc,6662 +authlib/oauth2/rfc7636/__init__.py,sha256=TIKOEYQWQqOw6bFU9Kf2-sZFSWR-e_iJGnRiowhybD0,364 +authlib/oauth2/rfc7636/challenge.py,sha256=Og_V80QCDT1v6tq186FPoEwKRieLXvHEM8lnJhJFSMM,5126 +authlib/oauth2/rfc7662/__init__.py,sha256=qOguHj3TeVYLrdf9qvxe5HMyDDv_t7A5FZ6hKA1hCw0,365 +authlib/oauth2/rfc7662/introspection.py,sha256=GMjfNQFGxpoqOM2Bv7gK2CusTMS_r1dMlxaFgPCpVDU,4788 +authlib/oauth2/rfc7662/models.py,sha256=vlmy-_UJmzy4rFjOjLsKpybpodoMsxt4FRi-URDoCKQ,868 +authlib/oauth2/rfc8414/__init__.py,sha256=G_tQ8TI6qGx1Udk5ogjd7pP-Q15k0X7tPVN4IyUUoTA,385 +authlib/oauth2/rfc8414/models.py,sha256=k_Bl8dJu7HxLob7c9R288Z3FtQAV9zTZlrzEBYii_xI,17490 +authlib/oauth2/rfc8414/well_known.py,sha256=PW0DH8-CrRehqy9kmm_iaeViOl7lFgMqWG_TZWq6NJs,745 +authlib/oauth2/rfc8628/__init__.py,sha256=oQSZsFfTr-BhdrdFhXW31FfgW3Qn5PjClNq3k1GF3z0,702 +authlib/oauth2/rfc8628/device_code.py,sha256=GnsNFrs-3OaSEdo7g1SlUCWmaq-35LIxH-gPiIH7fyg,8165 +authlib/oauth2/rfc8628/endpoint.py,sha256=PYcYyCzf8l9Rn53eJh5qfs09ms7DEio84gKNWCi7Bfw,6254 +authlib/oauth2/rfc8628/errors.py,sha256=c761U-GWU58bmfyEPw1VveaOkmOgXElBVlWaE-tXMTg,919 +authlib/oauth2/rfc8628/models.py,sha256=J2AWUdxaTScJuevuqzHE_xs5ufIGmANsIxrxsboj03A,610 +authlib/oauth2/rfc8693/__init__.py,sha256=MiJMzPYCjHeCqCZmIUrEoUfif7UVIbKGS_tjBXTPN-w,206 +authlib/oidc/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +authlib/oidc/core/__init__.py,sha256=ncECH1dHLHLM1k7f2a8_nM32RextszP0SvYRGk4Q4ww,622 +authlib/oidc/core/claims.py,sha256=R4B-hPqaVxjQisSDf3PWbLOz9-Ak63f6ny8oX1XyvIU,10218 +authlib/oidc/core/errors.py,sha256=2nZdVIlTweNwXapWa7QodgCR0_5neGlCbGB2U4xUT6o,2883 +authlib/oidc/core/models.py,sha256=eYGC-NcIuu1m50h5o1bJFwUjwGF9l-AytTic8B07zmU,413 +authlib/oidc/core/util.py,sha256=J0x7rU_X35h2fLN3D49G3-F9wN3WAamFpsRPIclarK0,390 +authlib/oidc/core/grants/__init__.py,sha256=uREj2j3vw3szffWCVLCGn6aCXt91ZnwNKSzGL_bYKH8,194 +authlib/oidc/core/grants/code.py,sha256=Dpf5w55SR44mp2A5t-Vdfk7epmwBAFic_AAhRJAiILw,4071 +authlib/oidc/core/grants/hybrid.py,sha256=mDF66-sX40Qz1trtBg0xDLZMeh17dpiUy71jtjS67Ik,3649 +authlib/oidc/core/grants/implicit.py,sha256=ADCMVuGrOrUVSR57S4z9Pe2D6rUwKC3Qn5qb5-kqepg,5326 +authlib/oidc/core/grants/util.py,sha256=r40yHBeE63fC1_kbrCXKT5ftTojF19TOOenE_-uDehs,4785 +authlib/oidc/discovery/__init__.py,sha256=sI40mT-HXoRPqxAE5UfPg-r3xa_wrQZ8jIuN5n5nm70,321 +authlib/oidc/discovery/models.py,sha256=dcb7fprftGNQtlDKOBUxTf3fVtDj8CpmocBM1PjtRBc,12611 +authlib/oidc/discovery/well_known.py,sha256=3VmfTsVReChVVxBuN1eoL7fIRHL0-tN0aXH2XVKeiDM,574 +Authlib-0.15.6.dist-info/LICENSE,sha256=jhtIUY3pxs0Ay0jH_luAI_2Q1VUsoS6-c2Kg3zDdvkU,1514 +Authlib-0.15.6.dist-info/METADATA,sha256=OgNfwUB8LEJyNRPDi8oh5sn9GdmC0O5ovLv85PfAq5Q,3778 +Authlib-0.15.6.dist-info/WHEEL,sha256=z9j0xAa_JmUKMpmz72K0ZGALSM_n-wQVmGbleXx2VHg,110 +Authlib-0.15.6.dist-info/top_level.txt,sha256=Rj3mJn0jhRuCs6x7ysI6hYE2PePbuxey6y6jswadAEY,8 +Authlib-0.15.6.dist-info/INSTALLER,sha256=sz0Tnp99msIbbLB51q7U9nA6AvRKcsOeUKhrHH0Ulg4,12 +Authlib-0.15.6.dist-info/RECORD,, diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Authlib-0.15.6.dist-info/WHEEL b/.direnv/python-3.8.20/lib/python3.8/site-packages/Authlib-0.15.6.dist-info/WHEEL new file mode 100644 index 000000000..0b18a2811 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Authlib-0.15.6.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Authlib-0.15.6.dist-info/top_level.txt b/.direnv/python-3.8.20/lib/python3.8/site-packages/Authlib-0.15.6.dist-info/top_level.txt new file mode 100644 index 000000000..b91e7e46b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Authlib-0.15.6.dist-info/top_level.txt @@ -0,0 +1 @@ +authlib diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/AES.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/AES.py new file mode 100644 index 000000000..40441f4c5 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/AES.py @@ -0,0 +1,234 @@ +# -*- coding: utf-8 -*- +# +# Cipher/AES.py : AES +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +import sys + +from Crypto.Cipher import _create_cipher +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + c_size_t, c_uint8_ptr) + +from Crypto.Util import _cpu_features +from Crypto.Random import get_random_bytes + +MODE_ECB = 1 #: Electronic Code Book (:ref:`ecb_mode`) +MODE_CBC = 2 #: Cipher-Block Chaining (:ref:`cbc_mode`) +MODE_CFB = 3 #: Cipher Feedback (:ref:`cfb_mode`) +MODE_OFB = 5 #: Output Feedback (:ref:`ofb_mode`) +MODE_CTR = 6 #: Counter mode (:ref:`ctr_mode`) +MODE_OPENPGP = 7 #: OpenPGP mode (:ref:`openpgp_mode`) +MODE_CCM = 8 #: Counter with CBC-MAC (:ref:`ccm_mode`) +MODE_EAX = 9 #: :ref:`eax_mode` +MODE_SIV = 10 #: Synthetic Initialization Vector (:ref:`siv_mode`) +MODE_GCM = 11 #: Galois Counter Mode (:ref:`gcm_mode`) +MODE_OCB = 12 #: Offset Code Book (:ref:`ocb_mode`) + + +_cproto = """ + int AES_start_operation(const uint8_t key[], + size_t key_len, + void **pResult); + int AES_encrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int AES_decrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int AES_stop_operation(void *state); + """ + + +# Load portable AES +_raw_aes_lib = load_pycryptodome_raw_lib("Crypto.Cipher._raw_aes", + _cproto) + +# Try to load AES with AES NI instructions +try: + _raw_aesni_lib = None + if _cpu_features.have_aes_ni(): + _raw_aesni_lib = load_pycryptodome_raw_lib("Crypto.Cipher._raw_aesni", + _cproto.replace("AES", + "AESNI")) +# _raw_aesni may not have been compiled in +except OSError: + pass + + +def _create_base_cipher(dict_parameters): + """This method instantiates and returns a handle to a low-level + base cipher. It will absorb named parameters in the process.""" + + use_aesni = dict_parameters.pop("use_aesni", True) + + try: + key = dict_parameters.pop("key") + except KeyError: + raise TypeError("Missing 'key' parameter") + + if len(key) not in key_size: + raise ValueError("Incorrect AES key length (%d bytes)" % len(key)) + + if use_aesni and _raw_aesni_lib: + start_operation = _raw_aesni_lib.AESNI_start_operation + stop_operation = _raw_aesni_lib.AESNI_stop_operation + else: + start_operation = _raw_aes_lib.AES_start_operation + stop_operation = _raw_aes_lib.AES_stop_operation + + cipher = VoidPointer() + result = start_operation(c_uint8_ptr(key), + c_size_t(len(key)), + cipher.address_of()) + if result: + raise ValueError("Error %X while instantiating the AES cipher" + % result) + return SmartPointer(cipher.get(), stop_operation) + + +def _derive_Poly1305_key_pair(key, nonce): + """Derive a tuple (r, s, nonce) for a Poly1305 MAC. + + If nonce is ``None``, a new 16-byte nonce is generated. + """ + + if len(key) != 32: + raise ValueError("Poly1305 with AES requires a 32-byte key") + + if nonce is None: + nonce = get_random_bytes(16) + elif len(nonce) != 16: + raise ValueError("Poly1305 with AES requires a 16-byte nonce") + + s = new(key[:16], MODE_ECB).encrypt(nonce) + return key[16:], s, nonce + + +def new(key, mode, *args, **kwargs): + """Create a new AES cipher. + + Args: + key(bytes/bytearray/memoryview): + The secret key to use in the symmetric cipher. + + It must be 16 (*AES-128)*, 24 (*AES-192*) or 32 (*AES-256*) bytes long. + + For ``MODE_SIV`` only, it doubles to 32, 48, or 64 bytes. + mode (a ``MODE_*`` constant): + The chaining mode to use for encryption or decryption. + If in doubt, use ``MODE_EAX``. + + Keyword Args: + iv (bytes/bytearray/memoryview): + (Only applicable for ``MODE_CBC``, ``MODE_CFB``, ``MODE_OFB``, + and ``MODE_OPENPGP`` modes). + + The initialization vector to use for encryption or decryption. + + For ``MODE_CBC``, ``MODE_CFB``, and ``MODE_OFB`` it must be 16 bytes long. + + For ``MODE_OPENPGP`` mode only, + it must be 16 bytes long for encryption + and 18 bytes for decryption (in the latter case, it is + actually the *encrypted* IV which was prefixed to the ciphertext). + + If not provided, a random byte string is generated (you must then + read its value with the :attr:`iv` attribute). + + nonce (bytes/bytearray/memoryview): + (Only applicable for ``MODE_CCM``, ``MODE_EAX``, ``MODE_GCM``, + ``MODE_SIV``, ``MODE_OCB``, and ``MODE_CTR``). + + A value that must never be reused for any other encryption done + with this key (except possibly for ``MODE_SIV``, see below). + + For ``MODE_EAX``, ``MODE_GCM`` and ``MODE_SIV`` there are no + restrictions on its length (recommended: **16** bytes). + + For ``MODE_CCM``, its length must be in the range **[7..13]**. + Bear in mind that with CCM there is a trade-off between nonce + length and maximum message size. Recommendation: **11** bytes. + + For ``MODE_OCB``, its length must be in the range **[1..15]** + (recommended: **15**). + + For ``MODE_CTR``, its length must be in the range **[0..15]** + (recommended: **8**). + + For ``MODE_SIV``, the nonce is optional, if it is not specified, + then no nonce is being used, which renders the encryption + deterministic. + + If not provided, for modes other than ``MODE_SIV``, a random + byte string of the recommended length is used (you must then + read its value with the :attr:`nonce` attribute). + + segment_size (integer): + (Only ``MODE_CFB``).The number of **bits** the plaintext and ciphertext + are segmented in. It must be a multiple of 8. + If not specified, it will be assumed to be 8. + + mac_len (integer): + (Only ``MODE_EAX``, ``MODE_GCM``, ``MODE_OCB``, ``MODE_CCM``) + Length of the authentication tag, in bytes. + + It must be even and in the range **[4..16]**. + The recommended value (and the default, if not specified) is **16**. + + msg_len (integer): + (Only ``MODE_CCM``). Length of the message to (de)cipher. + If not specified, ``encrypt`` must be called with the entire message. + Similarly, ``decrypt`` can only be called once. + + assoc_len (integer): + (Only ``MODE_CCM``). Length of the associated data. + If not specified, all associated data is buffered internally, + which may represent a problem for very large messages. + + initial_value (integer or bytes/bytearray/memoryview): + (Only ``MODE_CTR``). + The initial value for the counter. If not present, the cipher will + start counting from 0. The value is incremented by one for each block. + The counter number is encoded in big endian mode. + + counter (object): + (Only ``MODE_CTR``). + Instance of ``Crypto.Util.Counter``, which allows full customization + of the counter block. This parameter is incompatible to both ``nonce`` + and ``initial_value``. + + use_aesni: (boolean): + Use Intel AES-NI hardware extensions (default: use if available). + + Returns: + an AES object, of the applicable mode. + """ + + kwargs["add_aes_modes"] = True + return _create_cipher(sys.modules[__name__], key, mode, *args, **kwargs) + + +# Size of a data block (in bytes) +block_size = 16 +# Size of a key (in bytes) +key_size = (16, 24, 32) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/AES.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/AES.pyi new file mode 100644 index 000000000..125e3e248 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/AES.pyi @@ -0,0 +1,156 @@ +from typing import Dict, Optional, Tuple, Union, overload +from typing_extensions import Literal + +Buffer=bytes|bytearray|memoryview + +from Crypto.Cipher._mode_ecb import EcbMode +from Crypto.Cipher._mode_cbc import CbcMode +from Crypto.Cipher._mode_cfb import CfbMode +from Crypto.Cipher._mode_ofb import OfbMode +from Crypto.Cipher._mode_ctr import CtrMode +from Crypto.Cipher._mode_openpgp import OpenPgpMode +from Crypto.Cipher._mode_ccm import CcmMode +from Crypto.Cipher._mode_eax import EaxMode +from Crypto.Cipher._mode_gcm import GcmMode +from Crypto.Cipher._mode_siv import SivMode +from Crypto.Cipher._mode_ocb import OcbMode + +MODE_ECB: Literal[1] +MODE_CBC: Literal[2] +MODE_CFB: Literal[3] +MODE_OFB: Literal[5] +MODE_CTR: Literal[6] +MODE_OPENPGP: Literal[7] +MODE_CCM: Literal[8] +MODE_EAX: Literal[9] +MODE_SIV: Literal[10] +MODE_GCM: Literal[11] +MODE_OCB: Literal[12] + +# MODE_ECB +@overload +def new(key: Buffer, + mode: Literal[1], + use_aesni : bool = ...) -> \ + EcbMode: ... + +# MODE_CBC +@overload +def new(key: Buffer, + mode: Literal[2], + iv : Optional[Buffer] = ..., + use_aesni : bool = ...) -> \ + CbcMode: ... + +@overload +def new(key: Buffer, + mode: Literal[2], + IV : Optional[Buffer] = ..., + use_aesni : bool = ...) -> \ + CbcMode: ... + +# MODE_CFB +@overload +def new(key: Buffer, + mode: Literal[3], + iv : Optional[Buffer] = ..., + segment_size : int = ..., + use_aesni : bool = ...) -> \ + CfbMode: ... + +@overload +def new(key: Buffer, + mode: Literal[3], + IV : Optional[Buffer] = ..., + segment_size : int = ..., + use_aesni : bool = ...) -> \ + CfbMode: ... + +# MODE_OFB +@overload +def new(key: Buffer, + mode: Literal[5], + iv : Optional[Buffer] = ..., + use_aesni : bool = ...) -> \ + OfbMode: ... + +@overload +def new(key: Buffer, + mode: Literal[5], + IV : Optional[Buffer] = ..., + use_aesni : bool = ...) -> \ + OfbMode: ... + +# MODE_CTR +@overload +def new(key: Buffer, + mode: Literal[6], + nonce : Optional[Buffer] = ..., + initial_value : Union[int, Buffer] = ..., + counter : Dict = ..., + use_aesni : bool = ...) -> \ + CtrMode: ... + +# MODE_OPENPGP +@overload +def new(key: Buffer, + mode: Literal[7], + iv : Optional[Buffer] = ..., + use_aesni : bool = ...) -> \ + OpenPgpMode: ... + +@overload +def new(key: Buffer, + mode: Literal[7], + IV : Optional[Buffer] = ..., + use_aesni : bool = ...) -> \ + OpenPgpMode: ... + +# MODE_CCM +@overload +def new(key: Buffer, + mode: Literal[8], + nonce : Optional[Buffer] = ..., + mac_len : int = ..., + assoc_len : int = ..., + use_aesni : bool = ...) -> \ + CcmMode: ... + +# MODE_EAX +@overload +def new(key: Buffer, + mode: Literal[9], + nonce : Optional[Buffer] = ..., + mac_len : int = ..., + use_aesni : bool = ...) -> \ + EaxMode: ... + +# MODE_GCM +@overload +def new(key: Buffer, + mode: Literal[10], + nonce : Optional[Buffer] = ..., + use_aesni : bool = ...) -> \ + SivMode: ... + +# MODE_SIV +@overload +def new(key: Buffer, + mode: Literal[11], + nonce : Optional[Buffer] = ..., + mac_len : int = ..., + use_aesni : bool = ...) -> \ + GcmMode: ... + +# MODE_OCB +@overload +def new(key: Buffer, + mode: Literal[12], + nonce : Optional[Buffer] = ..., + mac_len : int = ..., + use_aesni : bool = ...) -> \ + OcbMode: ... + + +block_size: int +key_size: Tuple[int, int, int] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/ARC2.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/ARC2.py new file mode 100644 index 000000000..0ba7e3353 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/ARC2.py @@ -0,0 +1,175 @@ +# -*- coding: utf-8 -*- +# +# Cipher/ARC2.py : ARC2.py +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +""" +Module's constants for the modes of operation supported with ARC2: + +:var MODE_ECB: :ref:`Electronic Code Book (ECB) ` +:var MODE_CBC: :ref:`Cipher-Block Chaining (CBC) ` +:var MODE_CFB: :ref:`Cipher FeedBack (CFB) ` +:var MODE_OFB: :ref:`Output FeedBack (OFB) ` +:var MODE_CTR: :ref:`CounTer Mode (CTR) ` +:var MODE_OPENPGP: :ref:`OpenPGP Mode ` +:var MODE_EAX: :ref:`EAX Mode ` +""" + +import sys + +from Crypto.Cipher import _create_cipher +from Crypto.Util.py3compat import byte_string +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + c_size_t, c_uint8_ptr) + +_raw_arc2_lib = load_pycryptodome_raw_lib( + "Crypto.Cipher._raw_arc2", + """ + int ARC2_start_operation(const uint8_t key[], + size_t key_len, + size_t effective_key_len, + void **pResult); + int ARC2_encrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int ARC2_decrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int ARC2_stop_operation(void *state); + """ + ) + + +def _create_base_cipher(dict_parameters): + """This method instantiates and returns a handle to a low-level + base cipher. It will absorb named parameters in the process.""" + + try: + key = dict_parameters.pop("key") + except KeyError: + raise TypeError("Missing 'key' parameter") + + effective_keylen = dict_parameters.pop("effective_keylen", 1024) + + if len(key) not in key_size: + raise ValueError("Incorrect ARC2 key length (%d bytes)" % len(key)) + + if not (40 <= effective_keylen <= 1024): + raise ValueError("'effective_key_len' must be at least 40 and no larger than 1024 " + "(not %d)" % effective_keylen) + + start_operation = _raw_arc2_lib.ARC2_start_operation + stop_operation = _raw_arc2_lib.ARC2_stop_operation + + cipher = VoidPointer() + result = start_operation(c_uint8_ptr(key), + c_size_t(len(key)), + c_size_t(effective_keylen), + cipher.address_of()) + if result: + raise ValueError("Error %X while instantiating the ARC2 cipher" + % result) + + return SmartPointer(cipher.get(), stop_operation) + + +def new(key, mode, *args, **kwargs): + """Create a new RC2 cipher. + + :param key: + The secret key to use in the symmetric cipher. + Its length can vary from 5 to 128 bytes; the actual search space + (and the cipher strength) can be reduced with the ``effective_keylen`` parameter. + :type key: bytes, bytearray, memoryview + + :param mode: + The chaining mode to use for encryption or decryption. + :type mode: One of the supported ``MODE_*`` constants + + :Keyword Arguments: + * **iv** (*bytes*, *bytearray*, *memoryview*) -- + (Only applicable for ``MODE_CBC``, ``MODE_CFB``, ``MODE_OFB``, + and ``MODE_OPENPGP`` modes). + + The initialization vector to use for encryption or decryption. + + For ``MODE_CBC``, ``MODE_CFB``, and ``MODE_OFB`` it must be 8 bytes long. + + For ``MODE_OPENPGP`` mode only, + it must be 8 bytes long for encryption + and 10 bytes for decryption (in the latter case, it is + actually the *encrypted* IV which was prefixed to the ciphertext). + + If not provided, a random byte string is generated (you must then + read its value with the :attr:`iv` attribute). + + * **nonce** (*bytes*, *bytearray*, *memoryview*) -- + (Only applicable for ``MODE_EAX`` and ``MODE_CTR``). + + A value that must never be reused for any other encryption done + with this key. + + For ``MODE_EAX`` there are no + restrictions on its length (recommended: **16** bytes). + + For ``MODE_CTR``, its length must be in the range **[0..7]**. + + If not provided for ``MODE_EAX``, a random byte string is generated (you + can read it back via the ``nonce`` attribute). + + * **effective_keylen** (*integer*) -- + Optional. Maximum strength in bits of the actual key used by the ARC2 algorithm. + If the supplied ``key`` parameter is longer (in bits) of the value specified + here, it will be weakened to match it. + If not specified, no limitation is applied. + + * **segment_size** (*integer*) -- + (Only ``MODE_CFB``).The number of **bits** the plaintext and ciphertext + are segmented in. It must be a multiple of 8. + If not specified, it will be assumed to be 8. + + * **mac_len** : (*integer*) -- + (Only ``MODE_EAX``) + Length of the authentication tag, in bytes. + It must be no longer than 8 (default). + + * **initial_value** : (*integer*) -- + (Only ``MODE_CTR``). The initial value for the counter within + the counter block. By default it is **0**. + + :Return: an ARC2 object, of the applicable mode. + """ + + return _create_cipher(sys.modules[__name__], key, mode, *args, **kwargs) + +MODE_ECB = 1 +MODE_CBC = 2 +MODE_CFB = 3 +MODE_OFB = 5 +MODE_CTR = 6 +MODE_OPENPGP = 7 +MODE_EAX = 9 + +# Size of a data block (in bytes) +block_size = 8 +# Size of a key (in bytes) +key_size = range(5, 128 + 1) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/ARC2.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/ARC2.pyi new file mode 100644 index 000000000..771c6dc85 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/ARC2.pyi @@ -0,0 +1,35 @@ +from typing import Union, Dict, Iterable, Optional + +Buffer = bytes|bytearray|memoryview + +from Crypto.Cipher._mode_ecb import EcbMode +from Crypto.Cipher._mode_cbc import CbcMode +from Crypto.Cipher._mode_cfb import CfbMode +from Crypto.Cipher._mode_ofb import OfbMode +from Crypto.Cipher._mode_ctr import CtrMode +from Crypto.Cipher._mode_openpgp import OpenPgpMode +from Crypto.Cipher._mode_eax import EaxMode + +ARC2Mode = int + +MODE_ECB: ARC2Mode +MODE_CBC: ARC2Mode +MODE_CFB: ARC2Mode +MODE_OFB: ARC2Mode +MODE_CTR: ARC2Mode +MODE_OPENPGP: ARC2Mode +MODE_EAX: ARC2Mode + +def new(key: Buffer, + mode: ARC2Mode, + iv : Optional[Buffer] = ..., + IV : Optional[Buffer] = ..., + nonce : Optional[Buffer] = ..., + segment_size : int = ..., + mac_len : int = ..., + initial_value : Union[int, Buffer] = ..., + counter : Dict = ...) -> \ + Union[EcbMode, CbcMode, CfbMode, OfbMode, CtrMode, OpenPgpMode]: ... + +block_size: int +key_size: Iterable[int] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/ARC4.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/ARC4.py new file mode 100644 index 000000000..97dc22978 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/ARC4.py @@ -0,0 +1,136 @@ +# -*- coding: utf-8 -*- +# +# Cipher/ARC4.py : ARC4 +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, + create_string_buffer, get_raw_buffer, + SmartPointer, c_size_t, c_uint8_ptr) + + +_raw_arc4_lib = load_pycryptodome_raw_lib("Crypto.Cipher._ARC4", """ + int ARC4_stream_encrypt(void *rc4State, const uint8_t in[], + uint8_t out[], size_t len); + int ARC4_stream_init(uint8_t *key, size_t keylen, + void **pRc4State); + int ARC4_stream_destroy(void *rc4State); + """) + + +class ARC4Cipher: + """ARC4 cipher object. Do not create it directly. Use + :func:`Crypto.Cipher.ARC4.new` instead. + """ + + def __init__(self, key, *args, **kwargs): + """Initialize an ARC4 cipher object + + See also `new()` at the module level.""" + + if len(args) > 0: + ndrop = args[0] + args = args[1:] + else: + ndrop = kwargs.pop('drop', 0) + + if len(key) not in key_size: + raise ValueError("Incorrect ARC4 key length (%d bytes)" % + len(key)) + + self._state = VoidPointer() + result = _raw_arc4_lib.ARC4_stream_init(c_uint8_ptr(key), + c_size_t(len(key)), + self._state.address_of()) + if result != 0: + raise ValueError("Error %d while creating the ARC4 cipher" + % result) + self._state = SmartPointer(self._state.get(), + _raw_arc4_lib.ARC4_stream_destroy) + + if ndrop > 0: + # This is OK even if the cipher is used for decryption, + # since encrypt and decrypt are actually the same thing + # with ARC4. + self.encrypt(b'\x00' * ndrop) + + self.block_size = 1 + self.key_size = len(key) + + def encrypt(self, plaintext): + """Encrypt a piece of data. + + :param plaintext: The data to encrypt, of any size. + :type plaintext: bytes, bytearray, memoryview + :returns: the encrypted byte string, of equal length as the + plaintext. + """ + + ciphertext = create_string_buffer(len(plaintext)) + result = _raw_arc4_lib.ARC4_stream_encrypt(self._state.get(), + c_uint8_ptr(plaintext), + ciphertext, + c_size_t(len(plaintext))) + if result: + raise ValueError("Error %d while encrypting with RC4" % result) + return get_raw_buffer(ciphertext) + + def decrypt(self, ciphertext): + """Decrypt a piece of data. + + :param ciphertext: The data to decrypt, of any size. + :type ciphertext: bytes, bytearray, memoryview + :returns: the decrypted byte string, of equal length as the + ciphertext. + """ + + try: + return self.encrypt(ciphertext) + except ValueError as e: + raise ValueError(str(e).replace("enc", "dec")) + + +def new(key, *args, **kwargs): + """Create a new ARC4 cipher. + + :param key: + The secret key to use in the symmetric cipher. + Its length must be in the range ``[1..256]``. + The recommended length is 16 bytes. + :type key: bytes, bytearray, memoryview + + :Keyword Arguments: + * *drop* (``integer``) -- + The amount of bytes to discard from the initial part of the keystream. + In fact, such part has been found to be distinguishable from random + data (while it shouldn't) and also correlated to key. + + The recommended value is 3072_ bytes. The default value is 0. + + :Return: an `ARC4Cipher` object + + .. _3072: http://eprint.iacr.org/2002/067.pdf + """ + return ARC4Cipher(key, *args, **kwargs) + + +# Size of a data block (in bytes) +block_size = 1 +# Size of a key (in bytes) +key_size = range(1, 256+1) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/ARC4.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/ARC4.pyi new file mode 100644 index 000000000..b08158595 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/ARC4.pyi @@ -0,0 +1,16 @@ +from typing import Any, Union, Iterable + +Buffer = bytes|bytearray|memoryview + +class ARC4Cipher: + block_size: int + key_size: int + + def __init__(self, key: Buffer, *args: Any, **kwargs: Any) -> None: ... + def encrypt(self, plaintext: Buffer) -> bytes: ... + def decrypt(self, ciphertext: Buffer) -> bytes: ... + +def new(key: Buffer, drop : int = ...) -> ARC4Cipher: ... + +block_size: int +key_size: Iterable[int] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/Blowfish.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/Blowfish.py new file mode 100644 index 000000000..6005ffe2b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/Blowfish.py @@ -0,0 +1,159 @@ +# -*- coding: utf-8 -*- +# +# Cipher/Blowfish.py : Blowfish +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +""" +Module's constants for the modes of operation supported with Blowfish: + +:var MODE_ECB: :ref:`Electronic Code Book (ECB) ` +:var MODE_CBC: :ref:`Cipher-Block Chaining (CBC) ` +:var MODE_CFB: :ref:`Cipher FeedBack (CFB) ` +:var MODE_OFB: :ref:`Output FeedBack (OFB) ` +:var MODE_CTR: :ref:`CounTer Mode (CTR) ` +:var MODE_OPENPGP: :ref:`OpenPGP Mode ` +:var MODE_EAX: :ref:`EAX Mode ` +""" + +import sys + +from Crypto.Cipher import _create_cipher +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, c_size_t, + c_uint8_ptr) + +_raw_blowfish_lib = load_pycryptodome_raw_lib( + "Crypto.Cipher._raw_blowfish", + """ + int Blowfish_start_operation(const uint8_t key[], + size_t key_len, + void **pResult); + int Blowfish_encrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int Blowfish_decrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int Blowfish_stop_operation(void *state); + """ + ) + + +def _create_base_cipher(dict_parameters): + """This method instantiates and returns a smart pointer to + a low-level base cipher. It will absorb named parameters in + the process.""" + + try: + key = dict_parameters.pop("key") + except KeyError: + raise TypeError("Missing 'key' parameter") + + if len(key) not in key_size: + raise ValueError("Incorrect Blowfish key length (%d bytes)" % len(key)) + + start_operation = _raw_blowfish_lib.Blowfish_start_operation + stop_operation = _raw_blowfish_lib.Blowfish_stop_operation + + void_p = VoidPointer() + result = start_operation(c_uint8_ptr(key), + c_size_t(len(key)), + void_p.address_of()) + if result: + raise ValueError("Error %X while instantiating the Blowfish cipher" + % result) + return SmartPointer(void_p.get(), stop_operation) + + +def new(key, mode, *args, **kwargs): + """Create a new Blowfish cipher + + :param key: + The secret key to use in the symmetric cipher. + Its length can vary from 5 to 56 bytes. + :type key: bytes, bytearray, memoryview + + :param mode: + The chaining mode to use for encryption or decryption. + :type mode: One of the supported ``MODE_*`` constants + + :Keyword Arguments: + * **iv** (*bytes*, *bytearray*, *memoryview*) -- + (Only applicable for ``MODE_CBC``, ``MODE_CFB``, ``MODE_OFB``, + and ``MODE_OPENPGP`` modes). + + The initialization vector to use for encryption or decryption. + + For ``MODE_CBC``, ``MODE_CFB``, and ``MODE_OFB`` it must be 8 bytes long. + + For ``MODE_OPENPGP`` mode only, + it must be 8 bytes long for encryption + and 10 bytes for decryption (in the latter case, it is + actually the *encrypted* IV which was prefixed to the ciphertext). + + If not provided, a random byte string is generated (you must then + read its value with the :attr:`iv` attribute). + + * **nonce** (*bytes*, *bytearray*, *memoryview*) -- + (Only applicable for ``MODE_EAX`` and ``MODE_CTR``). + + A value that must never be reused for any other encryption done + with this key. + + For ``MODE_EAX`` there are no + restrictions on its length (recommended: **16** bytes). + + For ``MODE_CTR``, its length must be in the range **[0..7]**. + + If not provided for ``MODE_EAX``, a random byte string is generated (you + can read it back via the ``nonce`` attribute). + + * **segment_size** (*integer*) -- + (Only ``MODE_CFB``).The number of **bits** the plaintext and ciphertext + are segmented in. It must be a multiple of 8. + If not specified, it will be assumed to be 8. + + * **mac_len** : (*integer*) -- + (Only ``MODE_EAX``) + Length of the authentication tag, in bytes. + It must be no longer than 8 (default). + + * **initial_value** : (*integer*) -- + (Only ``MODE_CTR``). The initial value for the counter within + the counter block. By default it is **0**. + + :Return: a Blowfish object, of the applicable mode. + """ + + return _create_cipher(sys.modules[__name__], key, mode, *args, **kwargs) + +MODE_ECB = 1 +MODE_CBC = 2 +MODE_CFB = 3 +MODE_OFB = 5 +MODE_CTR = 6 +MODE_OPENPGP = 7 +MODE_EAX = 9 + +# Size of a data block (in bytes) +block_size = 8 +# Size of a key (in bytes) +key_size = range(4, 56 + 1) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/Blowfish.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/Blowfish.pyi new file mode 100644 index 000000000..76c3808c8 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/Blowfish.pyi @@ -0,0 +1,35 @@ +from typing import Union, Dict, Iterable, Optional + +Buffer = bytes|bytearray|memoryview + +from Crypto.Cipher._mode_ecb import EcbMode +from Crypto.Cipher._mode_cbc import CbcMode +from Crypto.Cipher._mode_cfb import CfbMode +from Crypto.Cipher._mode_ofb import OfbMode +from Crypto.Cipher._mode_ctr import CtrMode +from Crypto.Cipher._mode_openpgp import OpenPgpMode +from Crypto.Cipher._mode_eax import EaxMode + +BlowfishMode = int + +MODE_ECB: BlowfishMode +MODE_CBC: BlowfishMode +MODE_CFB: BlowfishMode +MODE_OFB: BlowfishMode +MODE_CTR: BlowfishMode +MODE_OPENPGP: BlowfishMode +MODE_EAX: BlowfishMode + +def new(key: Buffer, + mode: BlowfishMode, + iv : Optional[Buffer] = ..., + IV : Optional[Buffer] = ..., + nonce : Optional[Buffer] = ..., + segment_size : int = ..., + mac_len : int = ..., + initial_value : Union[int, Buffer] = ..., + counter : Dict = ...) -> \ + Union[EcbMode, CbcMode, CfbMode, OfbMode, CtrMode, OpenPgpMode]: ... + +block_size: int +key_size: Iterable[int] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/CAST.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/CAST.py new file mode 100644 index 000000000..c7e82c1c7 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/CAST.py @@ -0,0 +1,159 @@ +# -*- coding: utf-8 -*- +# +# Cipher/CAST.py : CAST +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +""" +Module's constants for the modes of operation supported with CAST: + +:var MODE_ECB: :ref:`Electronic Code Book (ECB) ` +:var MODE_CBC: :ref:`Cipher-Block Chaining (CBC) ` +:var MODE_CFB: :ref:`Cipher FeedBack (CFB) ` +:var MODE_OFB: :ref:`Output FeedBack (OFB) ` +:var MODE_CTR: :ref:`CounTer Mode (CTR) ` +:var MODE_OPENPGP: :ref:`OpenPGP Mode ` +:var MODE_EAX: :ref:`EAX Mode ` +""" + +import sys + +from Crypto.Cipher import _create_cipher +from Crypto.Util.py3compat import byte_string +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + c_size_t, c_uint8_ptr) + +_raw_cast_lib = load_pycryptodome_raw_lib( + "Crypto.Cipher._raw_cast", + """ + int CAST_start_operation(const uint8_t key[], + size_t key_len, + void **pResult); + int CAST_encrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int CAST_decrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int CAST_stop_operation(void *state); + """) + + +def _create_base_cipher(dict_parameters): + """This method instantiates and returns a handle to a low-level + base cipher. It will absorb named parameters in the process.""" + + try: + key = dict_parameters.pop("key") + except KeyError: + raise TypeError("Missing 'key' parameter") + + if len(key) not in key_size: + raise ValueError("Incorrect CAST key length (%d bytes)" % len(key)) + + start_operation = _raw_cast_lib.CAST_start_operation + stop_operation = _raw_cast_lib.CAST_stop_operation + + cipher = VoidPointer() + result = start_operation(c_uint8_ptr(key), + c_size_t(len(key)), + cipher.address_of()) + if result: + raise ValueError("Error %X while instantiating the CAST cipher" + % result) + + return SmartPointer(cipher.get(), stop_operation) + + +def new(key, mode, *args, **kwargs): + """Create a new CAST cipher + + :param key: + The secret key to use in the symmetric cipher. + Its length can vary from 5 to 16 bytes. + :type key: bytes, bytearray, memoryview + + :param mode: + The chaining mode to use for encryption or decryption. + :type mode: One of the supported ``MODE_*`` constants + + :Keyword Arguments: + * **iv** (*bytes*, *bytearray*, *memoryview*) -- + (Only applicable for ``MODE_CBC``, ``MODE_CFB``, ``MODE_OFB``, + and ``MODE_OPENPGP`` modes). + + The initialization vector to use for encryption or decryption. + + For ``MODE_CBC``, ``MODE_CFB``, and ``MODE_OFB`` it must be 8 bytes long. + + For ``MODE_OPENPGP`` mode only, + it must be 8 bytes long for encryption + and 10 bytes for decryption (in the latter case, it is + actually the *encrypted* IV which was prefixed to the ciphertext). + + If not provided, a random byte string is generated (you must then + read its value with the :attr:`iv` attribute). + + * **nonce** (*bytes*, *bytearray*, *memoryview*) -- + (Only applicable for ``MODE_EAX`` and ``MODE_CTR``). + + A value that must never be reused for any other encryption done + with this key. + + For ``MODE_EAX`` there are no + restrictions on its length (recommended: **16** bytes). + + For ``MODE_CTR``, its length must be in the range **[0..7]**. + + If not provided for ``MODE_EAX``, a random byte string is generated (you + can read it back via the ``nonce`` attribute). + + * **segment_size** (*integer*) -- + (Only ``MODE_CFB``).The number of **bits** the plaintext and ciphertext + are segmented in. It must be a multiple of 8. + If not specified, it will be assumed to be 8. + + * **mac_len** : (*integer*) -- + (Only ``MODE_EAX``) + Length of the authentication tag, in bytes. + It must be no longer than 8 (default). + + * **initial_value** : (*integer*) -- + (Only ``MODE_CTR``). The initial value for the counter within + the counter block. By default it is **0**. + + :Return: a CAST object, of the applicable mode. + """ + + return _create_cipher(sys.modules[__name__], key, mode, *args, **kwargs) + +MODE_ECB = 1 +MODE_CBC = 2 +MODE_CFB = 3 +MODE_OFB = 5 +MODE_CTR = 6 +MODE_OPENPGP = 7 +MODE_EAX = 9 + +# Size of a data block (in bytes) +block_size = 8 +# Size of a key (in bytes) +key_size = range(5, 16 + 1) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/CAST.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/CAST.pyi new file mode 100644 index 000000000..f107552dc --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/CAST.pyi @@ -0,0 +1,35 @@ +from typing import Union, Dict, Iterable, Optional + +Buffer = bytes|bytearray|memoryview + +from Crypto.Cipher._mode_ecb import EcbMode +from Crypto.Cipher._mode_cbc import CbcMode +from Crypto.Cipher._mode_cfb import CfbMode +from Crypto.Cipher._mode_ofb import OfbMode +from Crypto.Cipher._mode_ctr import CtrMode +from Crypto.Cipher._mode_openpgp import OpenPgpMode +from Crypto.Cipher._mode_eax import EaxMode + +CASTMode = int + +MODE_ECB: CASTMode +MODE_CBC: CASTMode +MODE_CFB: CASTMode +MODE_OFB: CASTMode +MODE_CTR: CASTMode +MODE_OPENPGP: CASTMode +MODE_EAX: CASTMode + +def new(key: Buffer, + mode: CASTMode, + iv : Optional[Buffer] = ..., + IV : Optional[Buffer] = ..., + nonce : Optional[Buffer] = ..., + segment_size : int = ..., + mac_len : int = ..., + initial_value : Union[int, Buffer] = ..., + counter : Dict = ...) -> \ + Union[EcbMode, CbcMode, CfbMode, OfbMode, CtrMode, OpenPgpMode]: ... + +block_size: int +key_size : Iterable[int] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/ChaCha20.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/ChaCha20.py new file mode 100644 index 000000000..8f3ec24df --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/ChaCha20.py @@ -0,0 +1,287 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Random import get_random_bytes + +from Crypto.Util.py3compat import _copy_bytes +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + create_string_buffer, + get_raw_buffer, VoidPointer, + SmartPointer, c_size_t, + c_uint8_ptr, c_ulong, + is_writeable_buffer) + +_raw_chacha20_lib = load_pycryptodome_raw_lib("Crypto.Cipher._chacha20", + """ + int chacha20_init(void **pState, + const uint8_t *key, + size_t keySize, + const uint8_t *nonce, + size_t nonceSize); + + int chacha20_destroy(void *state); + + int chacha20_encrypt(void *state, + const uint8_t in[], + uint8_t out[], + size_t len); + + int chacha20_seek(void *state, + unsigned long block_high, + unsigned long block_low, + unsigned offset); + int hchacha20( const uint8_t key[32], + const uint8_t nonce16[16], + uint8_t subkey[32]); + """) + + +def _HChaCha20(key, nonce): + + assert(len(key) == 32) + assert(len(nonce) == 16) + + subkey = bytearray(32) + result = _raw_chacha20_lib.hchacha20( + c_uint8_ptr(key), + c_uint8_ptr(nonce), + c_uint8_ptr(subkey)) + if result: + raise ValueError("Error %d when deriving subkey with HChaCha20" % result) + + return subkey + + +class ChaCha20Cipher(object): + """ChaCha20 (or XChaCha20) cipher object. + Do not create it directly. Use :py:func:`new` instead. + + :var nonce: The nonce with length 8, 12 or 24 bytes + :vartype nonce: bytes + """ + + block_size = 1 + + def __init__(self, key, nonce): + """Initialize a ChaCha20/XChaCha20 cipher object + + See also `new()` at the module level.""" + + self.nonce = _copy_bytes(None, None, nonce) + + # XChaCha20 requires a key derivation with HChaCha20 + # See 2.3 in https://tools.ietf.org/html/draft-arciszewski-xchacha-03 + if len(nonce) == 24: + key = _HChaCha20(key, nonce[:16]) + nonce = b'\x00' * 4 + nonce[16:] + self._name = "XChaCha20" + else: + self._name = "ChaCha20" + nonce = self.nonce + + self._next = ("encrypt", "decrypt") + + self._state = VoidPointer() + result = _raw_chacha20_lib.chacha20_init( + self._state.address_of(), + c_uint8_ptr(key), + c_size_t(len(key)), + nonce, + c_size_t(len(nonce))) + if result: + raise ValueError("Error %d instantiating a %s cipher" % (result, + self._name)) + self._state = SmartPointer(self._state.get(), + _raw_chacha20_lib.chacha20_destroy) + + def encrypt(self, plaintext, output=None): + """Encrypt a piece of data. + + Args: + plaintext(bytes/bytearray/memoryview): The data to encrypt, of any size. + Keyword Args: + output(bytes/bytearray/memoryview): The location where the ciphertext + is written to. If ``None``, the ciphertext is returned. + Returns: + If ``output`` is ``None``, the ciphertext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if "encrypt" not in self._next: + raise TypeError("Cipher object can only be used for decryption") + self._next = ("encrypt",) + return self._encrypt(plaintext, output) + + def _encrypt(self, plaintext, output): + """Encrypt without FSM checks""" + + if output is None: + ciphertext = create_string_buffer(len(plaintext)) + else: + ciphertext = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(plaintext) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(plaintext)) + + result = _raw_chacha20_lib.chacha20_encrypt( + self._state.get(), + c_uint8_ptr(plaintext), + c_uint8_ptr(ciphertext), + c_size_t(len(plaintext))) + if result: + raise ValueError("Error %d while encrypting with %s" % (result, self._name)) + + if output is None: + return get_raw_buffer(ciphertext) + else: + return None + + def decrypt(self, ciphertext, output=None): + """Decrypt a piece of data. + + Args: + ciphertext(bytes/bytearray/memoryview): The data to decrypt, of any size. + Keyword Args: + output(bytes/bytearray/memoryview): The location where the plaintext + is written to. If ``None``, the plaintext is returned. + Returns: + If ``output`` is ``None``, the plaintext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if "decrypt" not in self._next: + raise TypeError("Cipher object can only be used for encryption") + self._next = ("decrypt",) + + try: + return self._encrypt(ciphertext, output) + except ValueError as e: + raise ValueError(str(e).replace("enc", "dec")) + + def seek(self, position): + """Seek to a certain position in the key stream. + + Args: + position (integer): + The absolute position within the key stream, in bytes. + """ + + position, offset = divmod(position, 64) + block_low = position & 0xFFFFFFFF + block_high = position >> 32 + + result = _raw_chacha20_lib.chacha20_seek( + self._state.get(), + c_ulong(block_high), + c_ulong(block_low), + offset + ) + if result: + raise ValueError("Error %d while seeking with %s" % (result, self._name)) + + +def _derive_Poly1305_key_pair(key, nonce): + """Derive a tuple (r, s, nonce) for a Poly1305 MAC. + + If nonce is ``None``, a new 12-byte nonce is generated. + """ + + if len(key) != 32: + raise ValueError("Poly1305 with ChaCha20 requires a 32-byte key") + + if nonce is None: + padded_nonce = nonce = get_random_bytes(12) + elif len(nonce) == 8: + # See RFC7538, 2.6: [...] ChaCha20 as specified here requires a 96-bit + # nonce. So if the provided nonce is only 64-bit, then the first 32 + # bits of the nonce will be set to a constant number. + # This will usually be zero, but for protocols with multiple senders it may be + # different for each sender, but should be the same for all + # invocations of the function with the same key by a particular + # sender. + padded_nonce = b'\x00\x00\x00\x00' + nonce + elif len(nonce) == 12: + padded_nonce = nonce + else: + raise ValueError("Poly1305 with ChaCha20 requires an 8- or 12-byte nonce") + + rs = new(key=key, nonce=padded_nonce).encrypt(b'\x00' * 32) + return rs[:16], rs[16:], nonce + + +def new(**kwargs): + """Create a new ChaCha20 or XChaCha20 cipher + + Keyword Args: + key (bytes/bytearray/memoryview): The secret key to use. + It must be 32 bytes long. + nonce (bytes/bytearray/memoryview): A mandatory value that + must never be reused for any other encryption + done with this key. + + For ChaCha20, it must be 8 or 12 bytes long. + + For XChaCha20, it must be 24 bytes long. + + If not provided, 8 bytes will be randomly generated + (you can find them back in the ``nonce`` attribute). + + :Return: a :class:`Crypto.Cipher.ChaCha20.ChaCha20Cipher` object + """ + + try: + key = kwargs.pop("key") + except KeyError as e: + raise TypeError("Missing parameter %s" % e) + + nonce = kwargs.pop("nonce", None) + if nonce is None: + nonce = get_random_bytes(8) + + if len(key) != 32: + raise ValueError("ChaCha20/XChaCha20 key must be 32 bytes long") + + if len(nonce) not in (8, 12, 24): + raise ValueError("Nonce must be 8/12 bytes(ChaCha20) or 24 bytes (XChaCha20)") + + if kwargs: + raise TypeError("Unknown parameters: " + str(kwargs)) + + return ChaCha20Cipher(key, nonce) + +# Size of a data block (in bytes) +block_size = 1 + +# Size of a key (in bytes) +key_size = 32 diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/ChaCha20.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/ChaCha20.pyi new file mode 100644 index 000000000..f5001cdbf --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/ChaCha20.pyi @@ -0,0 +1,25 @@ +from typing import Union, overload, Optional + +Buffer = bytes|bytearray|memoryview + +def _HChaCha20(key: Buffer, nonce: Buffer) -> bytearray: ... + +class ChaCha20Cipher: + block_size: int + nonce: bytes + + def __init__(self, key: Buffer, nonce: Buffer) -> None: ... + @overload + def encrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + @overload + def decrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + def seek(self, position: int) -> None: ... + +def new(key: Buffer, nonce: Optional[Buffer] = ...) -> ChaCha20Cipher: ... + +block_size: int +key_size: int diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/ChaCha20_Poly1305.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/ChaCha20_Poly1305.py new file mode 100644 index 000000000..55840a567 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/ChaCha20_Poly1305.py @@ -0,0 +1,336 @@ +# =================================================================== +# +# Copyright (c) 2018, Helder Eijs +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from binascii import unhexlify + +from Crypto.Cipher import ChaCha20 +from Crypto.Cipher.ChaCha20 import _HChaCha20 +from Crypto.Hash import Poly1305, BLAKE2s + +from Crypto.Random import get_random_bytes + +from Crypto.Util.number import long_to_bytes +from Crypto.Util.py3compat import _copy_bytes, bord +from Crypto.Util._raw_api import is_buffer + + +def _enum(**enums): + return type('Enum', (), enums) + + +_CipherStatus = _enum(PROCESSING_AUTH_DATA=1, + PROCESSING_CIPHERTEXT=2, + PROCESSING_DONE=3) + + +class ChaCha20Poly1305Cipher(object): + """ChaCha20-Poly1305 and XChaCha20-Poly1305 cipher object. + Do not create it directly. Use :py:func:`new` instead. + + :var nonce: The nonce with length 8, 12 or 24 bytes + :vartype nonce: byte string + """ + + def __init__(self, key, nonce): + """Initialize a ChaCha20-Poly1305 AEAD cipher object + + See also `new()` at the module level.""" + + self._next = ("update", "encrypt", "decrypt", "digest", + "verify") + + self._authenticator = Poly1305.new(key=key, nonce=nonce, cipher=ChaCha20) + + self._cipher = ChaCha20.new(key=key, nonce=nonce) + self._cipher.seek(64) # Block counter starts at 1 + + self._len_aad = 0 + self._len_ct = 0 + self._mac_tag = None + self._status = _CipherStatus.PROCESSING_AUTH_DATA + + def update(self, data): + """Protect the associated data. + + Associated data (also known as *additional authenticated data* - AAD) + is the piece of the message that must stay in the clear, while + still allowing the receiver to verify its integrity. + An example is packet headers. + + The associated data (possibly split into multiple segments) is + fed into :meth:`update` before any call to :meth:`decrypt` or :meth:`encrypt`. + If there is no associated data, :meth:`update` is not called. + + :param bytes/bytearray/memoryview assoc_data: + A piece of associated data. There are no restrictions on its size. + """ + + if "update" not in self._next: + raise TypeError("update() method cannot be called") + + self._len_aad += len(data) + self._authenticator.update(data) + + def _pad_aad(self): + + assert(self._status == _CipherStatus.PROCESSING_AUTH_DATA) + if self._len_aad & 0x0F: + self._authenticator.update(b'\x00' * (16 - (self._len_aad & 0x0F))) + self._status = _CipherStatus.PROCESSING_CIPHERTEXT + + def encrypt(self, plaintext, output=None): + """Encrypt a piece of data. + + Args: + plaintext(bytes/bytearray/memoryview): The data to encrypt, of any size. + Keyword Args: + output(bytes/bytearray/memoryview): The location where the ciphertext + is written to. If ``None``, the ciphertext is returned. + Returns: + If ``output`` is ``None``, the ciphertext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if "encrypt" not in self._next: + raise TypeError("encrypt() method cannot be called") + + if self._status == _CipherStatus.PROCESSING_AUTH_DATA: + self._pad_aad() + + self._next = ("encrypt", "digest") + + result = self._cipher.encrypt(plaintext, output=output) + self._len_ct += len(plaintext) + if output is None: + self._authenticator.update(result) + else: + self._authenticator.update(output) + return result + + def decrypt(self, ciphertext, output=None): + """Decrypt a piece of data. + + Args: + ciphertext(bytes/bytearray/memoryview): The data to decrypt, of any size. + Keyword Args: + output(bytes/bytearray/memoryview): The location where the plaintext + is written to. If ``None``, the plaintext is returned. + Returns: + If ``output`` is ``None``, the plaintext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if "decrypt" not in self._next: + raise TypeError("decrypt() method cannot be called") + + if self._status == _CipherStatus.PROCESSING_AUTH_DATA: + self._pad_aad() + + self._next = ("decrypt", "verify") + + self._len_ct += len(ciphertext) + self._authenticator.update(ciphertext) + return self._cipher.decrypt(ciphertext, output=output) + + def _compute_mac(self): + """Finalize the cipher (if not done already) and return the MAC.""" + + if self._mac_tag: + assert(self._status == _CipherStatus.PROCESSING_DONE) + return self._mac_tag + + assert(self._status != _CipherStatus.PROCESSING_DONE) + + if self._status == _CipherStatus.PROCESSING_AUTH_DATA: + self._pad_aad() + + if self._len_ct & 0x0F: + self._authenticator.update(b'\x00' * (16 - (self._len_ct & 0x0F))) + + self._status = _CipherStatus.PROCESSING_DONE + + self._authenticator.update(long_to_bytes(self._len_aad, 8)[::-1]) + self._authenticator.update(long_to_bytes(self._len_ct, 8)[::-1]) + self._mac_tag = self._authenticator.digest() + return self._mac_tag + + def digest(self): + """Compute the *binary* authentication tag (MAC). + + :Return: the MAC tag, as 16 ``bytes``. + """ + + if "digest" not in self._next: + raise TypeError("digest() method cannot be called") + self._next = ("digest",) + + return self._compute_mac() + + def hexdigest(self): + """Compute the *printable* authentication tag (MAC). + + This method is like :meth:`digest`. + + :Return: the MAC tag, as a hexadecimal string. + """ + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def verify(self, received_mac_tag): + """Validate the *binary* authentication tag (MAC). + + The receiver invokes this method at the very end, to + check if the associated data (if any) and the decrypted + messages are valid. + + :param bytes/bytearray/memoryview received_mac_tag: + This is the 16-byte *binary* MAC, as received from the sender. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + if "verify" not in self._next: + raise TypeError("verify() cannot be called" + " when encrypting a message") + self._next = ("verify",) + + secret = get_random_bytes(16) + + self._compute_mac() + + mac1 = BLAKE2s.new(digest_bits=160, key=secret, + data=self._mac_tag) + mac2 = BLAKE2s.new(digest_bits=160, key=secret, + data=received_mac_tag) + + if mac1.digest() != mac2.digest(): + raise ValueError("MAC check failed") + + def hexverify(self, hex_mac_tag): + """Validate the *printable* authentication tag (MAC). + + This method is like :meth:`verify`. + + :param string hex_mac_tag: + This is the *printable* MAC. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + self.verify(unhexlify(hex_mac_tag)) + + def encrypt_and_digest(self, plaintext): + """Perform :meth:`encrypt` and :meth:`digest` in one step. + + :param plaintext: The data to encrypt, of any size. + :type plaintext: bytes/bytearray/memoryview + :return: a tuple with two ``bytes`` objects: + + - the ciphertext, of equal length as the plaintext + - the 16-byte MAC tag + """ + + return self.encrypt(plaintext), self.digest() + + def decrypt_and_verify(self, ciphertext, received_mac_tag): + """Perform :meth:`decrypt` and :meth:`verify` in one step. + + :param ciphertext: The piece of data to decrypt. + :type ciphertext: bytes/bytearray/memoryview + :param bytes received_mac_tag: + This is the 16-byte *binary* MAC, as received from the sender. + :return: the decrypted data (as ``bytes``) + :raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + plaintext = self.decrypt(ciphertext) + self.verify(received_mac_tag) + return plaintext + + +def new(**kwargs): + """Create a new ChaCha20-Poly1305 or XChaCha20-Poly1305 AEAD cipher. + + :keyword key: The secret key to use. It must be 32 bytes long. + :type key: byte string + + :keyword nonce: + A value that must never be reused for any other encryption + done with this key. + + For ChaCha20-Poly1305, it must be 8 or 12 bytes long. + + For XChaCha20-Poly1305, it must be 24 bytes long. + + If not provided, 12 ``bytes`` will be generated randomly + (you can find them back in the ``nonce`` attribute). + :type nonce: bytes, bytearray, memoryview + + :Return: a :class:`Crypto.Cipher.ChaCha20.ChaCha20Poly1305Cipher` object + """ + + try: + key = kwargs.pop("key") + except KeyError as e: + raise TypeError("Missing parameter %s" % e) + + self._len_ct += len(plaintext) + + if len(key) != 32: + raise ValueError("Key must be 32 bytes long") + + nonce = kwargs.pop("nonce", None) + if nonce is None: + nonce = get_random_bytes(12) + + if len(nonce) in (8, 12): + chacha20_poly1305_nonce = nonce + elif len(nonce) == 24: + key = _HChaCha20(key, nonce[:16]) + chacha20_poly1305_nonce = b'\x00\x00\x00\x00' + nonce[16:] + else: + raise ValueError("Nonce must be 8, 12 or 24 bytes long") + + if not is_buffer(nonce): + raise TypeError("nonce must be bytes, bytearray or memoryview") + + if kwargs: + raise TypeError("Unknown parameters: " + str(kwargs)) + + cipher = ChaCha20Poly1305Cipher(key, chacha20_poly1305_nonce) + cipher.nonce = _copy_bytes(None, None, nonce) + return cipher + + +# Size of a key (in bytes) +key_size = 32 diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/ChaCha20_Poly1305.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/ChaCha20_Poly1305.pyi new file mode 100644 index 000000000..109e8051c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/ChaCha20_Poly1305.pyi @@ -0,0 +1,28 @@ +from typing import Union, Tuple, overload, Optional + +Buffer = bytes|bytearray|memoryview + +class ChaCha20Poly1305Cipher: + nonce: bytes + + def __init__(self, key: Buffer, nonce: Buffer) -> None: ... + def update(self, data: Buffer) -> None: ... + @overload + def encrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + @overload + def decrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def verify(self, received_mac_tag: Buffer) -> None: ... + def hexverify(self, received_mac_tag: str) -> None: ... + def encrypt_and_digest(self, plaintext: Buffer) -> Tuple[bytes, bytes]: ... + def decrypt_and_verify(self, ciphertext: Buffer, received_mac_tag: Buffer) -> bytes: ... + +def new(key: Buffer, nonce: Optional[Buffer] = ...) -> ChaCha20Poly1305Cipher: ... + +block_size: int +key_size: int diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/DES.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/DES.py new file mode 100644 index 000000000..5cc286aee --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/DES.py @@ -0,0 +1,158 @@ +# -*- coding: utf-8 -*- +# +# Cipher/DES.py : DES +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +""" +Module's constants for the modes of operation supported with Single DES: + +:var MODE_ECB: :ref:`Electronic Code Book (ECB) ` +:var MODE_CBC: :ref:`Cipher-Block Chaining (CBC) ` +:var MODE_CFB: :ref:`Cipher FeedBack (CFB) ` +:var MODE_OFB: :ref:`Output FeedBack (OFB) ` +:var MODE_CTR: :ref:`CounTer Mode (CTR) ` +:var MODE_OPENPGP: :ref:`OpenPGP Mode ` +:var MODE_EAX: :ref:`EAX Mode ` +""" + +import sys + +from Crypto.Cipher import _create_cipher +from Crypto.Util.py3compat import byte_string +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + c_size_t, c_uint8_ptr) + +_raw_des_lib = load_pycryptodome_raw_lib( + "Crypto.Cipher._raw_des", + """ + int DES_start_operation(const uint8_t key[], + size_t key_len, + void **pResult); + int DES_encrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int DES_decrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int DES_stop_operation(void *state); + """) + + +def _create_base_cipher(dict_parameters): + """This method instantiates and returns a handle to a low-level + base cipher. It will absorb named parameters in the process.""" + + try: + key = dict_parameters.pop("key") + except KeyError: + raise TypeError("Missing 'key' parameter") + + if len(key) != key_size: + raise ValueError("Incorrect DES key length (%d bytes)" % len(key)) + + start_operation = _raw_des_lib.DES_start_operation + stop_operation = _raw_des_lib.DES_stop_operation + + cipher = VoidPointer() + result = start_operation(c_uint8_ptr(key), + c_size_t(len(key)), + cipher.address_of()) + if result: + raise ValueError("Error %X while instantiating the DES cipher" + % result) + return SmartPointer(cipher.get(), stop_operation) + + +def new(key, mode, *args, **kwargs): + """Create a new DES cipher. + + :param key: + The secret key to use in the symmetric cipher. + It must be 8 byte long. The parity bits will be ignored. + :type key: bytes/bytearray/memoryview + + :param mode: + The chaining mode to use for encryption or decryption. + :type mode: One of the supported ``MODE_*`` constants + + :Keyword Arguments: + * **iv** (*byte string*) -- + (Only applicable for ``MODE_CBC``, ``MODE_CFB``, ``MODE_OFB``, + and ``MODE_OPENPGP`` modes). + + The initialization vector to use for encryption or decryption. + + For ``MODE_CBC``, ``MODE_CFB``, and ``MODE_OFB`` it must be 8 bytes long. + + For ``MODE_OPENPGP`` mode only, + it must be 8 bytes long for encryption + and 10 bytes for decryption (in the latter case, it is + actually the *encrypted* IV which was prefixed to the ciphertext). + + If not provided, a random byte string is generated (you must then + read its value with the :attr:`iv` attribute). + + * **nonce** (*byte string*) -- + (Only applicable for ``MODE_EAX`` and ``MODE_CTR``). + + A value that must never be reused for any other encryption done + with this key. + + For ``MODE_EAX`` there are no + restrictions on its length (recommended: **16** bytes). + + For ``MODE_CTR``, its length must be in the range **[0..7]**. + + If not provided for ``MODE_EAX``, a random byte string is generated (you + can read it back via the ``nonce`` attribute). + + * **segment_size** (*integer*) -- + (Only ``MODE_CFB``).The number of **bits** the plaintext and ciphertext + are segmented in. It must be a multiple of 8. + If not specified, it will be assumed to be 8. + + * **mac_len** : (*integer*) -- + (Only ``MODE_EAX``) + Length of the authentication tag, in bytes. + It must be no longer than 8 (default). + + * **initial_value** : (*integer*) -- + (Only ``MODE_CTR``). The initial value for the counter within + the counter block. By default it is **0**. + + :Return: a DES object, of the applicable mode. + """ + + return _create_cipher(sys.modules[__name__], key, mode, *args, **kwargs) + +MODE_ECB = 1 +MODE_CBC = 2 +MODE_CFB = 3 +MODE_OFB = 5 +MODE_CTR = 6 +MODE_OPENPGP = 7 +MODE_EAX = 9 + +# Size of a data block (in bytes) +block_size = 8 +# Size of a key (in bytes) +key_size = 8 diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/DES.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/DES.pyi new file mode 100644 index 000000000..dbcb5d259 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/DES.pyi @@ -0,0 +1,35 @@ +from typing import Union, Dict, Iterable, Optional + +Buffer = bytes|bytearray|memoryview + +from Crypto.Cipher._mode_ecb import EcbMode +from Crypto.Cipher._mode_cbc import CbcMode +from Crypto.Cipher._mode_cfb import CfbMode +from Crypto.Cipher._mode_ofb import OfbMode +from Crypto.Cipher._mode_ctr import CtrMode +from Crypto.Cipher._mode_openpgp import OpenPgpMode +from Crypto.Cipher._mode_eax import EaxMode + +DESMode = int + +MODE_ECB: DESMode +MODE_CBC: DESMode +MODE_CFB: DESMode +MODE_OFB: DESMode +MODE_CTR: DESMode +MODE_OPENPGP: DESMode +MODE_EAX: DESMode + +def new(key: Buffer, + mode: DESMode, + iv : Optional[Buffer] = ..., + IV : Optional[Buffer] = ..., + nonce : Optional[Buffer] = ..., + segment_size : int = ..., + mac_len : int = ..., + initial_value : Union[int, Buffer] = ..., + counter : Dict = ...) -> \ + Union[EcbMode, CbcMode, CfbMode, OfbMode, CtrMode, OpenPgpMode]: ... + +block_size: int +key_size: int diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/DES3.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/DES3.py new file mode 100644 index 000000000..c0d936713 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/DES3.py @@ -0,0 +1,187 @@ +# -*- coding: utf-8 -*- +# +# Cipher/DES3.py : DES3 +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +""" +Module's constants for the modes of operation supported with Triple DES: + +:var MODE_ECB: :ref:`Electronic Code Book (ECB) ` +:var MODE_CBC: :ref:`Cipher-Block Chaining (CBC) ` +:var MODE_CFB: :ref:`Cipher FeedBack (CFB) ` +:var MODE_OFB: :ref:`Output FeedBack (OFB) ` +:var MODE_CTR: :ref:`CounTer Mode (CTR) ` +:var MODE_OPENPGP: :ref:`OpenPGP Mode ` +:var MODE_EAX: :ref:`EAX Mode ` +""" + +import sys + +from Crypto.Cipher import _create_cipher +from Crypto.Util.py3compat import byte_string, bchr, bord, bstr +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + c_size_t) + +_raw_des3_lib = load_pycryptodome_raw_lib( + "Crypto.Cipher._raw_des3", + """ + int DES3_start_operation(const uint8_t key[], + size_t key_len, + void **pResult); + int DES3_encrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int DES3_decrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int DES3_stop_operation(void *state); + """) + + +def adjust_key_parity(key_in): + """Set the parity bits in a TDES key. + + :param key_in: the TDES key whose bits need to be adjusted + :type key_in: byte string + + :returns: a copy of ``key_in``, with the parity bits correctly set + :rtype: byte string + + :raises ValueError: if the TDES key is not 16 or 24 bytes long + :raises ValueError: if the TDES key degenerates into Single DES + """ + + def parity_byte(key_byte): + parity = 1 + for i in range(1, 8): + parity ^= (key_byte >> i) & 1 + return (key_byte & 0xFE) | parity + + if len(key_in) not in key_size: + raise ValueError("Not a valid TDES key") + + key_out = b"".join([ bchr(parity_byte(bord(x))) for x in key_in ]) + + if key_out[:8] == key_out[8:16] or key_out[-16:-8] == key_out[-8:]: + raise ValueError("Triple DES key degenerates to single DES") + + return key_out + + +def _create_base_cipher(dict_parameters): + """This method instantiates and returns a handle to a low-level base cipher. + It will absorb named parameters in the process.""" + + try: + key_in = dict_parameters.pop("key") + except KeyError: + raise TypeError("Missing 'key' parameter") + + key = adjust_key_parity(bstr(key_in)) + + start_operation = _raw_des3_lib.DES3_start_operation + stop_operation = _raw_des3_lib.DES3_stop_operation + + cipher = VoidPointer() + result = start_operation(key, + c_size_t(len(key)), + cipher.address_of()) + if result: + raise ValueError("Error %X while instantiating the TDES cipher" + % result) + return SmartPointer(cipher.get(), stop_operation) + + +def new(key, mode, *args, **kwargs): + """Create a new Triple DES cipher. + + :param key: + The secret key to use in the symmetric cipher. + It must be 16 or 24 byte long. The parity bits will be ignored. + :type key: bytes/bytearray/memoryview + + :param mode: + The chaining mode to use for encryption or decryption. + :type mode: One of the supported ``MODE_*`` constants + + :Keyword Arguments: + * **iv** (*bytes*, *bytearray*, *memoryview*) -- + (Only applicable for ``MODE_CBC``, ``MODE_CFB``, ``MODE_OFB``, + and ``MODE_OPENPGP`` modes). + + The initialization vector to use for encryption or decryption. + + For ``MODE_CBC``, ``MODE_CFB``, and ``MODE_OFB`` it must be 8 bytes long. + + For ``MODE_OPENPGP`` mode only, + it must be 8 bytes long for encryption + and 10 bytes for decryption (in the latter case, it is + actually the *encrypted* IV which was prefixed to the ciphertext). + + If not provided, a random byte string is generated (you must then + read its value with the :attr:`iv` attribute). + + * **nonce** (*bytes*, *bytearray*, *memoryview*) -- + (Only applicable for ``MODE_EAX`` and ``MODE_CTR``). + + A value that must never be reused for any other encryption done + with this key. + + For ``MODE_EAX`` there are no + restrictions on its length (recommended: **16** bytes). + + For ``MODE_CTR``, its length must be in the range **[0..7]**. + + If not provided for ``MODE_EAX``, a random byte string is generated (you + can read it back via the ``nonce`` attribute). + + * **segment_size** (*integer*) -- + (Only ``MODE_CFB``).The number of **bits** the plaintext and ciphertext + are segmented in. It must be a multiple of 8. + If not specified, it will be assumed to be 8. + + * **mac_len** : (*integer*) -- + (Only ``MODE_EAX``) + Length of the authentication tag, in bytes. + It must be no longer than 8 (default). + + * **initial_value** : (*integer*) -- + (Only ``MODE_CTR``). The initial value for the counter within + the counter block. By default it is **0**. + + :Return: a Triple DES object, of the applicable mode. + """ + + return _create_cipher(sys.modules[__name__], key, mode, *args, **kwargs) + +MODE_ECB = 1 +MODE_CBC = 2 +MODE_CFB = 3 +MODE_OFB = 5 +MODE_CTR = 6 +MODE_OPENPGP = 7 +MODE_EAX = 9 + +# Size of a data block (in bytes) +block_size = 8 +# Size of a key (in bytes) +key_size = (16, 24) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/DES3.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/DES3.pyi new file mode 100644 index 000000000..d4dd0bb2c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/DES3.pyi @@ -0,0 +1,37 @@ +from typing import Union, Dict, Tuple, Optional + +Buffer = bytes|bytearray|memoryview + +from Crypto.Cipher._mode_ecb import EcbMode +from Crypto.Cipher._mode_cbc import CbcMode +from Crypto.Cipher._mode_cfb import CfbMode +from Crypto.Cipher._mode_ofb import OfbMode +from Crypto.Cipher._mode_ctr import CtrMode +from Crypto.Cipher._mode_openpgp import OpenPgpMode +from Crypto.Cipher._mode_eax import EaxMode + +def adjust_key_parity(key_in: bytes) -> bytes: ... + +DES3Mode = int + +MODE_ECB: DES3Mode +MODE_CBC: DES3Mode +MODE_CFB: DES3Mode +MODE_OFB: DES3Mode +MODE_CTR: DES3Mode +MODE_OPENPGP: DES3Mode +MODE_EAX: DES3Mode + +def new(key: Buffer, + mode: DES3Mode, + iv : Optional[Buffer] = ..., + IV : Optional[Buffer] = ..., + nonce : Optional[Buffer] = ..., + segment_size : int = ..., + mac_len : int = ..., + initial_value : Union[int, Buffer] = ..., + counter : Dict = ...) -> \ + Union[EcbMode, CbcMode, CfbMode, OfbMode, CtrMode, OpenPgpMode]: ... + +block_size: int +key_size: Tuple[int, int] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/PKCS1_OAEP.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/PKCS1_OAEP.py new file mode 100644 index 000000000..e87487e04 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/PKCS1_OAEP.py @@ -0,0 +1,231 @@ +# -*- coding: utf-8 -*- +# +# Cipher/PKCS1_OAEP.py : PKCS#1 OAEP +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Signature.pss import MGF1 +import Crypto.Hash.SHA1 + +from Crypto.Util.py3compat import _copy_bytes +import Crypto.Util.number +from Crypto.Util.number import ceil_div, bytes_to_long, long_to_bytes +from Crypto.Util.strxor import strxor +from Crypto import Random +from ._pkcs1_oaep_decode import oaep_decode + + +class PKCS1OAEP_Cipher: + """Cipher object for PKCS#1 v1.5 OAEP. + Do not create directly: use :func:`new` instead.""" + + def __init__(self, key, hashAlgo, mgfunc, label, randfunc): + """Initialize this PKCS#1 OAEP cipher object. + + :Parameters: + key : an RSA key object + If a private half is given, both encryption and decryption are possible. + If a public half is given, only encryption is possible. + hashAlgo : hash object + The hash function to use. This can be a module under `Crypto.Hash` + or an existing hash object created from any of such modules. If not specified, + `Crypto.Hash.SHA1` is used. + mgfunc : callable + A mask generation function that accepts two parameters: a string to + use as seed, and the lenth of the mask to generate, in bytes. + If not specified, the standard MGF1 consistent with ``hashAlgo`` is used (a safe choice). + label : bytes/bytearray/memoryview + A label to apply to this particular encryption. If not specified, + an empty string is used. Specifying a label does not improve + security. + randfunc : callable + A function that returns random bytes. + + :attention: Modify the mask generation function only if you know what you are doing. + Sender and receiver must use the same one. + """ + self._key = key + + if hashAlgo: + self._hashObj = hashAlgo + else: + self._hashObj = Crypto.Hash.SHA1 + + if mgfunc: + self._mgf = mgfunc + else: + self._mgf = lambda x, y: MGF1(x, y, self._hashObj) + + self._label = _copy_bytes(None, None, label) + self._randfunc = randfunc + + def can_encrypt(self): + """Legacy function to check if you can call :meth:`encrypt`. + + .. deprecated:: 3.0""" + return self._key.can_encrypt() + + def can_decrypt(self): + """Legacy function to check if you can call :meth:`decrypt`. + + .. deprecated:: 3.0""" + return self._key.can_decrypt() + + def encrypt(self, message): + """Encrypt a message with PKCS#1 OAEP. + + :param message: + The message to encrypt, also known as plaintext. It can be of + variable length, but not longer than the RSA modulus (in bytes) + minus 2, minus twice the hash output size. + For instance, if you use RSA 2048 and SHA-256, the longest message + you can encrypt is 190 byte long. + :type message: bytes/bytearray/memoryview + + :returns: The ciphertext, as large as the RSA modulus. + :rtype: bytes + + :raises ValueError: + if the message is too long. + """ + + # See 7.1.1 in RFC3447 + modBits = Crypto.Util.number.size(self._key.n) + k = ceil_div(modBits, 8) # Convert from bits to bytes + hLen = self._hashObj.digest_size + mLen = len(message) + + # Step 1b + ps_len = k - mLen - 2 * hLen - 2 + if ps_len < 0: + raise ValueError("Plaintext is too long.") + # Step 2a + lHash = self._hashObj.new(self._label).digest() + # Step 2b + ps = b'\x00' * ps_len + # Step 2c + db = lHash + ps + b'\x01' + _copy_bytes(None, None, message) + # Step 2d + ros = self._randfunc(hLen) + # Step 2e + dbMask = self._mgf(ros, k-hLen-1) + # Step 2f + maskedDB = strxor(db, dbMask) + # Step 2g + seedMask = self._mgf(maskedDB, hLen) + # Step 2h + maskedSeed = strxor(ros, seedMask) + # Step 2i + em = b'\x00' + maskedSeed + maskedDB + # Step 3a (OS2IP) + em_int = bytes_to_long(em) + # Step 3b (RSAEP) + m_int = self._key._encrypt(em_int) + # Step 3c (I2OSP) + c = long_to_bytes(m_int, k) + return c + + def decrypt(self, ciphertext): + """Decrypt a message with PKCS#1 OAEP. + + :param ciphertext: The encrypted message. + :type ciphertext: bytes/bytearray/memoryview + + :returns: The original message (plaintext). + :rtype: bytes + + :raises ValueError: + if the ciphertext has the wrong length, or if decryption + fails the integrity check (in which case, the decryption + key is probably wrong). + :raises TypeError: + if the RSA key has no private half (i.e. you are trying + to decrypt using a public key). + """ + + # See 7.1.2 in RFC3447 + modBits = Crypto.Util.number.size(self._key.n) + k = ceil_div(modBits, 8) # Convert from bits to bytes + hLen = self._hashObj.digest_size + + # Step 1b and 1c + if len(ciphertext) != k or k < hLen+2: + raise ValueError("Ciphertext with incorrect length.") + # Step 2a (O2SIP) + ct_int = bytes_to_long(ciphertext) + # Step 2b (RSADP) and step 2c (I2OSP) + em = self._key._decrypt_to_bytes(ct_int) + # Step 3a + lHash = self._hashObj.new(self._label).digest() + # y must be 0, but we MUST NOT check it here in order not to + # allow attacks like Manger's (http://dl.acm.org/citation.cfm?id=704143) + maskedSeed = em[1:hLen+1] + maskedDB = em[hLen+1:] + # Step 3c + seedMask = self._mgf(maskedDB, hLen) + # Step 3d + seed = strxor(maskedSeed, seedMask) + # Step 3e + dbMask = self._mgf(seed, k-hLen-1) + # Step 3f + db = strxor(maskedDB, dbMask) + # Step 3b + 3g + res = oaep_decode(em, lHash, db) + if res <= 0: + raise ValueError("Incorrect decryption.") + # Step 4 + return db[res:] + + +def new(key, hashAlgo=None, mgfunc=None, label=b'', randfunc=None): + """Return a cipher object :class:`PKCS1OAEP_Cipher` + that can be used to perform PKCS#1 OAEP encryption or decryption. + + :param key: + The key object to use to encrypt or decrypt the message. + Decryption is only possible with a private RSA key. + :type key: RSA key object + + :param hashAlgo: + The hash function to use. This can be a module under `Crypto.Hash` + or an existing hash object created from any of such modules. + If not specified, `Crypto.Hash.SHA1` is used. + :type hashAlgo: hash object + + :param mgfunc: + A mask generation function that accepts two parameters: a string to + use as seed, and the lenth of the mask to generate, in bytes. + If not specified, the standard MGF1 consistent with ``hashAlgo`` is used (a safe choice). + :type mgfunc: callable + + :param label: + A label to apply to this particular encryption. If not specified, + an empty string is used. Specifying a label does not improve + security. + :type label: bytes/bytearray/memoryview + + :param randfunc: + A function that returns random bytes. + The default is `Random.get_random_bytes`. + :type randfunc: callable + """ + + if randfunc is None: + randfunc = Random.get_random_bytes + return PKCS1OAEP_Cipher(key, hashAlgo, mgfunc, label, randfunc) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/PKCS1_OAEP.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/PKCS1_OAEP.pyi new file mode 100644 index 000000000..6cb80da69 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/PKCS1_OAEP.pyi @@ -0,0 +1,35 @@ +from typing import Optional, Union, Callable, Any, overload +from typing_extensions import Protocol + +from Crypto.PublicKey.RSA import RsaKey + +class HashLikeClass(Protocol): + digest_size : int + def new(self, data: Optional[bytes] = ...) -> Any: ... + +class HashLikeModule(Protocol): + digest_size : int + @staticmethod + def new(data: Optional[bytes] = ...) -> Any: ... + +HashLike = Union[HashLikeClass, HashLikeModule] + +Buffer = Union[bytes, bytearray, memoryview] + +class PKCS1OAEP_Cipher: + def __init__(self, + key: RsaKey, + hashAlgo: HashLike, + mgfunc: Callable[[bytes, int], bytes], + label: Buffer, + randfunc: Callable[[int], bytes]) -> None: ... + def can_encrypt(self) -> bool: ... + def can_decrypt(self) -> bool: ... + def encrypt(self, message: Buffer) -> bytes: ... + def decrypt(self, ciphertext: Buffer) -> bytes: ... + +def new(key: RsaKey, + hashAlgo: Optional[HashLike] = ..., + mgfunc: Optional[Callable[[bytes, int], bytes]] = ..., + label: Optional[Buffer] = ..., + randfunc: Optional[Callable[[int], bytes]] = ...) -> PKCS1OAEP_Cipher: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/PKCS1_v1_5.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/PKCS1_v1_5.py new file mode 100644 index 000000000..ce30be61e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/PKCS1_v1_5.py @@ -0,0 +1,189 @@ +# -*- coding: utf-8 -*- +# +# Cipher/PKCS1-v1_5.py : PKCS#1 v1.5 +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__all__ = ['new', 'PKCS115_Cipher'] + +from Crypto import Random +from Crypto.Util.number import bytes_to_long, long_to_bytes +from Crypto.Util.py3compat import bord, is_bytes, _copy_bytes +from ._pkcs1_oaep_decode import pkcs1_decode + + +class PKCS115_Cipher: + """This cipher can perform PKCS#1 v1.5 RSA encryption or decryption. + Do not instantiate directly. Use :func:`Crypto.Cipher.PKCS1_v1_5.new` instead.""" + + def __init__(self, key, randfunc): + """Initialize this PKCS#1 v1.5 cipher object. + + :Parameters: + key : an RSA key object + If a private half is given, both encryption and decryption are possible. + If a public half is given, only encryption is possible. + randfunc : callable + Function that returns random bytes. + """ + + self._key = key + self._randfunc = randfunc + + def can_encrypt(self): + """Return True if this cipher object can be used for encryption.""" + return self._key.can_encrypt() + + def can_decrypt(self): + """Return True if this cipher object can be used for decryption.""" + return self._key.can_decrypt() + + def encrypt(self, message): + """Produce the PKCS#1 v1.5 encryption of a message. + + This function is named ``RSAES-PKCS1-V1_5-ENCRYPT``, and it is specified in + `section 7.2.1 of RFC8017 + `_. + + :param message: + The message to encrypt, also known as plaintext. It can be of + variable length, but not longer than the RSA modulus (in bytes) minus 11. + :type message: bytes/bytearray/memoryview + + :Returns: A byte string, the ciphertext in which the message is encrypted. + It is as long as the RSA modulus (in bytes). + + :Raises ValueError: + If the RSA key length is not sufficiently long to deal with the given + message. + """ + + # See 7.2.1 in RFC8017 + k = self._key.size_in_bytes() + mLen = len(message) + + # Step 1 + if mLen > k - 11: + raise ValueError("Plaintext is too long.") + # Step 2a + ps = [] + while len(ps) != k - mLen - 3: + new_byte = self._randfunc(1) + if bord(new_byte[0]) == 0x00: + continue + ps.append(new_byte) + ps = b"".join(ps) + # Step 2b + em = b'\x00\x02' + ps + b'\x00' + _copy_bytes(None, None, message) + # Step 3a (OS2IP) + em_int = bytes_to_long(em) + # Step 3b (RSAEP) + m_int = self._key._encrypt(em_int) + # Step 3c (I2OSP) + c = long_to_bytes(m_int, k) + return c + + def decrypt(self, ciphertext, sentinel, expected_pt_len=0): + r"""Decrypt a PKCS#1 v1.5 ciphertext. + + This is the function ``RSAES-PKCS1-V1_5-DECRYPT`` specified in + `section 7.2.2 of RFC8017 + `_. + + Args: + ciphertext (bytes/bytearray/memoryview): + The ciphertext that contains the message to recover. + sentinel (any type): + The object to return whenever an error is detected. + expected_pt_len (integer): + The length the plaintext is known to have, or 0 if unknown. + + Returns (byte string): + It is either the original message or the ``sentinel`` (in case of an error). + + .. warning:: + PKCS#1 v1.5 decryption is intrinsically vulnerable to timing + attacks (see `Bleichenbacher's`__ attack). + **Use PKCS#1 OAEP instead**. + + This implementation attempts to mitigate the risk + with some constant-time constructs. + However, they are not sufficient by themselves: the type of protocol you + implement and the way you handle errors make a big difference. + + Specifically, you should make it very hard for the (malicious) + party that submitted the ciphertext to quickly understand if decryption + succeeded or not. + + To this end, it is recommended that your protocol only encrypts + plaintexts of fixed length (``expected_pt_len``), + that ``sentinel`` is a random byte string of the same length, + and that processing continues for as long + as possible even if ``sentinel`` is returned (i.e. in case of + incorrect decryption). + + .. __: https://dx.doi.org/10.1007/BFb0055716 + """ + + # See 7.2.2 in RFC8017 + k = self._key.size_in_bytes() + + # Step 1 + if len(ciphertext) != k: + raise ValueError("Ciphertext with incorrect length (not %d bytes)" % k) + + # Step 2a (O2SIP) + ct_int = bytes_to_long(ciphertext) + + # Step 2b (RSADP) and Step 2c (I2OSP) + em = self._key._decrypt_to_bytes(ct_int) + + # Step 3 (not constant time when the sentinel is not a byte string) + output = bytes(bytearray(k)) + if not is_bytes(sentinel) or len(sentinel) > k: + size = pkcs1_decode(em, b'', expected_pt_len, output) + if size < 0: + return sentinel + else: + return output[size:] + + # Step 3 (somewhat constant time) + size = pkcs1_decode(em, sentinel, expected_pt_len, output) + return output[size:] + + +def new(key, randfunc=None): + """Create a cipher for performing PKCS#1 v1.5 encryption or decryption. + + :param key: + The key to use to encrypt or decrypt the message. This is a `Crypto.PublicKey.RSA` object. + Decryption is only possible if *key* is a private RSA key. + :type key: RSA key object + + :param randfunc: + Function that return random bytes. + The default is :func:`Crypto.Random.get_random_bytes`. + :type randfunc: callable + + :returns: A cipher object `PKCS115_Cipher`. + """ + + if randfunc is None: + randfunc = Random.get_random_bytes + return PKCS115_Cipher(key, randfunc) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/PKCS1_v1_5.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/PKCS1_v1_5.pyi new file mode 100644 index 000000000..1719f01a6 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/PKCS1_v1_5.pyi @@ -0,0 +1,20 @@ +from typing import Callable, Union, Any, Optional, TypeVar + +from Crypto.PublicKey.RSA import RsaKey + +Buffer = Union[bytes, bytearray, memoryview] +T = TypeVar('T') + +class PKCS115_Cipher: + def __init__(self, + key: RsaKey, + randfunc: Callable[[int], bytes]) -> None: ... + def can_encrypt(self) -> bool: ... + def can_decrypt(self) -> bool: ... + def encrypt(self, message: Buffer) -> bytes: ... + def decrypt(self, ciphertext: Buffer, + sentinel: T, + expected_pt_len: Optional[int] = ...) -> Union[bytes, T]: ... + +def new(key: RsaKey, + randfunc: Optional[Callable[[int], bytes]] = ...) -> PKCS115_Cipher: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/Salsa20.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/Salsa20.py new file mode 100644 index 000000000..62d0b292b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/Salsa20.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +# +# Cipher/Salsa20.py : Salsa20 stream cipher (http://cr.yp.to/snuffle.html) +# +# Contributed by Fabrizio Tarizzo . +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.py3compat import _copy_bytes +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + create_string_buffer, + get_raw_buffer, VoidPointer, + SmartPointer, c_size_t, + c_uint8_ptr, is_writeable_buffer) + +from Crypto.Random import get_random_bytes + +_raw_salsa20_lib = load_pycryptodome_raw_lib("Crypto.Cipher._Salsa20", + """ + int Salsa20_stream_init(uint8_t *key, size_t keylen, + uint8_t *nonce, size_t nonce_len, + void **pSalsaState); + int Salsa20_stream_destroy(void *salsaState); + int Salsa20_stream_encrypt(void *salsaState, + const uint8_t in[], + uint8_t out[], size_t len); + """) + + +class Salsa20Cipher: + """Salsa20 cipher object. Do not create it directly. Use :py:func:`new` + instead. + + :var nonce: The nonce with length 8 + :vartype nonce: byte string + """ + + def __init__(self, key, nonce): + """Initialize a Salsa20 cipher object + + See also `new()` at the module level.""" + + if len(key) not in key_size: + raise ValueError("Incorrect key length for Salsa20 (%d bytes)" % len(key)) + + if len(nonce) != 8: + raise ValueError("Incorrect nonce length for Salsa20 (%d bytes)" % + len(nonce)) + + self.nonce = _copy_bytes(None, None, nonce) + + self._state = VoidPointer() + result = _raw_salsa20_lib.Salsa20_stream_init( + c_uint8_ptr(key), + c_size_t(len(key)), + c_uint8_ptr(nonce), + c_size_t(len(nonce)), + self._state.address_of()) + if result: + raise ValueError("Error %d instantiating a Salsa20 cipher") + self._state = SmartPointer(self._state.get(), + _raw_salsa20_lib.Salsa20_stream_destroy) + + self.block_size = 1 + self.key_size = len(key) + + def encrypt(self, plaintext, output=None): + """Encrypt a piece of data. + + Args: + plaintext(bytes/bytearray/memoryview): The data to encrypt, of any size. + Keyword Args: + output(bytes/bytearray/memoryview): The location where the ciphertext + is written to. If ``None``, the ciphertext is returned. + Returns: + If ``output`` is ``None``, the ciphertext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if output is None: + ciphertext = create_string_buffer(len(plaintext)) + else: + ciphertext = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(plaintext) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(plaintext)) + + result = _raw_salsa20_lib.Salsa20_stream_encrypt( + self._state.get(), + c_uint8_ptr(plaintext), + c_uint8_ptr(ciphertext), + c_size_t(len(plaintext))) + if result: + raise ValueError("Error %d while encrypting with Salsa20" % result) + + if output is None: + return get_raw_buffer(ciphertext) + else: + return None + + def decrypt(self, ciphertext, output=None): + """Decrypt a piece of data. + + Args: + ciphertext(bytes/bytearray/memoryview): The data to decrypt, of any size. + Keyword Args: + output(bytes/bytearray/memoryview): The location where the plaintext + is written to. If ``None``, the plaintext is returned. + Returns: + If ``output`` is ``None``, the plaintext is returned as ``bytes``. + Otherwise, ``None``. + """ + + try: + return self.encrypt(ciphertext, output=output) + except ValueError as e: + raise ValueError(str(e).replace("enc", "dec")) + + +def new(key, nonce=None): + """Create a new Salsa20 cipher + + :keyword key: The secret key to use. It must be 16 or 32 bytes long. + :type key: bytes/bytearray/memoryview + + :keyword nonce: + A value that must never be reused for any other encryption + done with this key. It must be 8 bytes long. + + If not provided, a random byte string will be generated (you can read + it back via the ``nonce`` attribute of the returned object). + :type nonce: bytes/bytearray/memoryview + + :Return: a :class:`Crypto.Cipher.Salsa20.Salsa20Cipher` object + """ + + if nonce is None: + nonce = get_random_bytes(8) + + return Salsa20Cipher(key, nonce) + +# Size of a data block (in bytes) +block_size = 1 + +# Size of a key (in bytes) +key_size = (16, 32) + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/Salsa20.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/Salsa20.pyi new file mode 100644 index 000000000..cf8690e26 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/Salsa20.pyi @@ -0,0 +1,26 @@ +from typing import Union, Tuple, Optional, overload, Optional + +Buffer = bytes|bytearray|memoryview + +class Salsa20Cipher: + nonce: bytes + block_size: int + key_size: int + + def __init__(self, + key: Buffer, + nonce: Buffer) -> None: ... + @overload + def encrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + @overload + def decrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + +def new(key: Buffer, nonce: Optional[Buffer] = ...) -> Salsa20Cipher: ... + +block_size: int +key_size: Tuple[int, int] + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_ARC4.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_ARC4.abi3.so new file mode 100755 index 000000000..451d35970 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_ARC4.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_EKSBlowfish.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_EKSBlowfish.py new file mode 100644 index 000000000..a844fae43 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_EKSBlowfish.py @@ -0,0 +1,131 @@ +# =================================================================== +# +# Copyright (c) 2019, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import sys + +from Crypto.Cipher import _create_cipher +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, c_size_t, + c_uint8_ptr, c_uint) + +_raw_blowfish_lib = load_pycryptodome_raw_lib( + "Crypto.Cipher._raw_eksblowfish", + """ + int EKSBlowfish_start_operation(const uint8_t key[], + size_t key_len, + const uint8_t salt[16], + size_t salt_len, + unsigned cost, + unsigned invert, + void **pResult); + int EKSBlowfish_encrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int EKSBlowfish_decrypt(const void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int EKSBlowfish_stop_operation(void *state); + """ + ) + + +def _create_base_cipher(dict_parameters): + """This method instantiates and returns a smart pointer to + a low-level base cipher. It will absorb named parameters in + the process.""" + + try: + key = dict_parameters.pop("key") + salt = dict_parameters.pop("salt") + cost = dict_parameters.pop("cost") + except KeyError as e: + raise TypeError("Missing EKSBlowfish parameter: " + str(e)) + invert = dict_parameters.pop("invert", True) + + if len(key) not in key_size: + raise ValueError("Incorrect EKSBlowfish key length (%d bytes)" % len(key)) + + start_operation = _raw_blowfish_lib.EKSBlowfish_start_operation + stop_operation = _raw_blowfish_lib.EKSBlowfish_stop_operation + + void_p = VoidPointer() + result = start_operation(c_uint8_ptr(key), + c_size_t(len(key)), + c_uint8_ptr(salt), + c_size_t(len(salt)), + c_uint(cost), + c_uint(int(invert)), + void_p.address_of()) + if result: + raise ValueError("Error %X while instantiating the EKSBlowfish cipher" + % result) + return SmartPointer(void_p.get(), stop_operation) + + +def new(key, mode, salt, cost, invert): + """Create a new EKSBlowfish cipher + + Args: + + key (bytes, bytearray, memoryview): + The secret key to use in the symmetric cipher. + Its length can vary from 0 to 72 bytes. + + mode (one of the supported ``MODE_*`` constants): + The chaining mode to use for encryption or decryption. + + salt (bytes, bytearray, memoryview): + The salt that bcrypt uses to thwart rainbow table attacks + + cost (integer): + The complexity factor in bcrypt + + invert (bool): + If ``False``, in the inner loop use ``ExpandKey`` first over the salt + and then over the key, as defined in + the `original bcrypt specification `_. + If ``True``, reverse the order, as in the first implementation of + `bcrypt` in OpenBSD. + + :Return: an EKSBlowfish object + """ + + kwargs = { 'salt':salt, 'cost':cost, 'invert':invert } + return _create_cipher(sys.modules[__name__], key, mode, **kwargs) + + +MODE_ECB = 1 + +# Size of a data block (in bytes) +block_size = 8 +# Size of a key (in bytes) +key_size = range(0, 72 + 1) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_EKSBlowfish.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_EKSBlowfish.pyi new file mode 100644 index 000000000..95db37940 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_EKSBlowfish.pyi @@ -0,0 +1,15 @@ +from typing import Union, Iterable + +from Crypto.Cipher._mode_ecb import EcbMode + +MODE_ECB: int + +Buffer = Union[bytes, bytearray, memoryview] + +def new(key: Buffer, + mode: int, + salt: Buffer, + cost: int) -> EcbMode: ... + +block_size: int +key_size: Iterable[int] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_Salsa20.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_Salsa20.abi3.so new file mode 100755 index 000000000..a303d911a Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_Salsa20.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/__init__.py new file mode 100644 index 000000000..ba2d48559 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/__init__.py @@ -0,0 +1,79 @@ +# +# A block cipher is instantiated as a combination of: +# 1. A base cipher (such as AES) +# 2. A mode of operation (such as CBC) +# +# Both items are implemented as C modules. +# +# The API of #1 is (replace "AES" with the name of the actual cipher): +# - AES_start_operaion(key) --> base_cipher_state +# - AES_encrypt(base_cipher_state, in, out, length) +# - AES_decrypt(base_cipher_state, in, out, length) +# - AES_stop_operation(base_cipher_state) +# +# Where base_cipher_state is AES_State, a struct with BlockBase (set of +# pointers to encrypt/decrypt/stop) followed by cipher-specific data. +# +# The API of #2 is (replace "CBC" with the name of the actual mode): +# - CBC_start_operation(base_cipher_state) --> mode_state +# - CBC_encrypt(mode_state, in, out, length) +# - CBC_decrypt(mode_state, in, out, length) +# - CBC_stop_operation(mode_state) +# +# where mode_state is a a pointer to base_cipher_state plus mode-specific data. + +import os + +from Crypto.Cipher._mode_ecb import _create_ecb_cipher +from Crypto.Cipher._mode_cbc import _create_cbc_cipher +from Crypto.Cipher._mode_cfb import _create_cfb_cipher +from Crypto.Cipher._mode_ofb import _create_ofb_cipher +from Crypto.Cipher._mode_ctr import _create_ctr_cipher +from Crypto.Cipher._mode_openpgp import _create_openpgp_cipher +from Crypto.Cipher._mode_ccm import _create_ccm_cipher +from Crypto.Cipher._mode_eax import _create_eax_cipher +from Crypto.Cipher._mode_siv import _create_siv_cipher +from Crypto.Cipher._mode_gcm import _create_gcm_cipher +from Crypto.Cipher._mode_ocb import _create_ocb_cipher + +_modes = { 1:_create_ecb_cipher, + 2:_create_cbc_cipher, + 3:_create_cfb_cipher, + 5:_create_ofb_cipher, + 6:_create_ctr_cipher, + 7:_create_openpgp_cipher, + 9:_create_eax_cipher + } + +_extra_modes = { 8:_create_ccm_cipher, + 10:_create_siv_cipher, + 11:_create_gcm_cipher, + 12:_create_ocb_cipher + } + +def _create_cipher(factory, key, mode, *args, **kwargs): + + kwargs["key"] = key + + modes = dict(_modes) + if kwargs.pop("add_aes_modes", False): + modes.update(_extra_modes) + if not mode in modes: + raise ValueError("Mode not supported") + + if args: + if mode in (8, 9, 10, 11, 12): + if len(args) > 1: + raise TypeError("Too many arguments for this mode") + kwargs["nonce"] = args[0] + elif mode in (2, 3, 5, 7): + if len(args) > 1: + raise TypeError("Too many arguments for this mode") + kwargs["IV"] = args[0] + elif mode == 6: + if len(args) > 0: + raise TypeError("Too many arguments for this mode") + elif mode == 1: + raise TypeError("IV is not meaningful for the ECB mode") + + return modes[mode](factory, **kwargs) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/__init__.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/__init__.pyi new file mode 100644 index 000000000..e69de29bb diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_chacha20.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_chacha20.abi3.so new file mode 100755 index 000000000..f1f1fa1e4 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_chacha20.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_cbc.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_cbc.py new file mode 100644 index 000000000..36c78efe1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_cbc.py @@ -0,0 +1,293 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +""" +Ciphertext Block Chaining (CBC) mode. +""" + +__all__ = ['CbcMode'] + +from Crypto.Util.py3compat import _copy_bytes +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, + create_string_buffer, get_raw_buffer, + SmartPointer, c_size_t, c_uint8_ptr, + is_writeable_buffer) + +from Crypto.Random import get_random_bytes + +raw_cbc_lib = load_pycryptodome_raw_lib("Crypto.Cipher._raw_cbc", """ + int CBC_start_operation(void *cipher, + const uint8_t iv[], + size_t iv_len, + void **pResult); + int CBC_encrypt(void *cbcState, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int CBC_decrypt(void *cbcState, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int CBC_stop_operation(void *state); + """ + ) + + +class CbcMode(object): + """*Cipher-Block Chaining (CBC)*. + + Each of the ciphertext blocks depends on the current + and all previous plaintext blocks. + + An Initialization Vector (*IV*) is required. + + See `NIST SP800-38A`_ , Section 6.2 . + + .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf + + :undocumented: __init__ + """ + + def __init__(self, block_cipher, iv): + """Create a new block cipher, configured in CBC mode. + + :Parameters: + block_cipher : C pointer + A smart pointer to the low-level block cipher instance. + + iv : bytes/bytearray/memoryview + The initialization vector to use for encryption or decryption. + It is as long as the cipher block. + + **The IV must be unpredictable**. Ideally it is picked randomly. + + Reusing the *IV* for encryptions performed with the same key + compromises confidentiality. + """ + + self._state = VoidPointer() + result = raw_cbc_lib.CBC_start_operation(block_cipher.get(), + c_uint8_ptr(iv), + c_size_t(len(iv)), + self._state.address_of()) + if result: + raise ValueError("Error %d while instantiating the CBC mode" + % result) + + # Ensure that object disposal of this Python object will (eventually) + # free the memory allocated by the raw library for the cipher mode + self._state = SmartPointer(self._state.get(), + raw_cbc_lib.CBC_stop_operation) + + # Memory allocated for the underlying block cipher is now owed + # by the cipher mode + block_cipher.release() + + self.block_size = len(iv) + """The block size of the underlying cipher, in bytes.""" + + self.iv = _copy_bytes(None, None, iv) + """The Initialization Vector originally used to create the object. + The value does not change.""" + + self.IV = self.iv + """Alias for `iv`""" + + self._next = ["encrypt", "decrypt"] + + def encrypt(self, plaintext, output=None): + """Encrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have encrypted a message + you cannot encrypt (or decrypt) another message using the same + object. + + The data to encrypt can be broken up in two or + more pieces and `encrypt` can be called multiple times. + + That is, the statement: + + >>> c.encrypt(a) + c.encrypt(b) + + is equivalent to: + + >>> c.encrypt(a+b) + + That also means that you cannot reuse an object for encrypting + or decrypting other data with the same key. + + This function does not add any padding to the plaintext. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + Its lenght must be multiple of the cipher block size. + :Keywords: + output : bytearray/memoryview + The location where the ciphertext must be written to. + If ``None``, the ciphertext is returned. + :Return: + If ``output`` is ``None``, the ciphertext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if "encrypt" not in self._next: + raise TypeError("encrypt() cannot be called after decrypt()") + self._next = ["encrypt"] + + if output is None: + ciphertext = create_string_buffer(len(plaintext)) + else: + ciphertext = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(plaintext) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(plaintext)) + + result = raw_cbc_lib.CBC_encrypt(self._state.get(), + c_uint8_ptr(plaintext), + c_uint8_ptr(ciphertext), + c_size_t(len(plaintext))) + if result: + if result == 3: + raise ValueError("Data must be padded to %d byte boundary in CBC mode" % self.block_size) + raise ValueError("Error %d while encrypting in CBC mode" % result) + + if output is None: + return get_raw_buffer(ciphertext) + else: + return None + + def decrypt(self, ciphertext, output=None): + """Decrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have decrypted a message + you cannot decrypt (or encrypt) another message with the same + object. + + The data to decrypt can be broken up in two or + more pieces and `decrypt` can be called multiple times. + + That is, the statement: + + >>> c.decrypt(a) + c.decrypt(b) + + is equivalent to: + + >>> c.decrypt(a+b) + + This function does not remove any padding from the plaintext. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + Its length must be multiple of the cipher block size. + :Keywords: + output : bytearray/memoryview + The location where the plaintext must be written to. + If ``None``, the plaintext is returned. + :Return: + If ``output`` is ``None``, the plaintext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if "decrypt" not in self._next: + raise TypeError("decrypt() cannot be called after encrypt()") + self._next = ["decrypt"] + + if output is None: + plaintext = create_string_buffer(len(ciphertext)) + else: + plaintext = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(ciphertext) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(plaintext)) + + result = raw_cbc_lib.CBC_decrypt(self._state.get(), + c_uint8_ptr(ciphertext), + c_uint8_ptr(plaintext), + c_size_t(len(ciphertext))) + if result: + if result == 3: + raise ValueError("Data must be padded to %d byte boundary in CBC mode" % self.block_size) + raise ValueError("Error %d while decrypting in CBC mode" % result) + + if output is None: + return get_raw_buffer(plaintext) + else: + return None + + +def _create_cbc_cipher(factory, **kwargs): + """Instantiate a cipher object that performs CBC encryption/decryption. + + :Parameters: + factory : module + The underlying block cipher, a module from ``Crypto.Cipher``. + + :Keywords: + iv : bytes/bytearray/memoryview + The IV to use for CBC. + + IV : bytes/bytearray/memoryview + Alias for ``iv``. + + Any other keyword will be passed to the underlying block cipher. + See the relevant documentation for details (at least ``key`` will need + to be present). + """ + + cipher_state = factory._create_base_cipher(kwargs) + iv = kwargs.pop("IV", None) + IV = kwargs.pop("iv", None) + + if (None, None) == (iv, IV): + iv = get_random_bytes(factory.block_size) + if iv is not None: + if IV is not None: + raise TypeError("You must either use 'iv' or 'IV', not both") + else: + iv = IV + + if len(iv) != factory.block_size: + raise ValueError("Incorrect IV length (it must be %d bytes long)" % + factory.block_size) + + if kwargs: + raise TypeError("Unknown parameters for CBC: %s" % str(kwargs)) + + return CbcMode(cipher_state, iv) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_cbc.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_cbc.pyi new file mode 100644 index 000000000..8b9fb16d4 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_cbc.pyi @@ -0,0 +1,25 @@ +from typing import Union, overload + +from Crypto.Util._raw_api import SmartPointer + +Buffer = Union[bytes, bytearray, memoryview] + +__all__ = ['CbcMode'] + +class CbcMode(object): + block_size: int + iv: Buffer + IV: Buffer + + def __init__(self, + block_cipher: SmartPointer, + iv: Buffer) -> None: ... + @overload + def encrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + @overload + def decrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ccm.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ccm.py new file mode 100644 index 000000000..1c1105bcb --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ccm.py @@ -0,0 +1,650 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +""" +Counter with CBC-MAC (CCM) mode. +""" + +__all__ = ['CcmMode'] + +import struct +from binascii import unhexlify + +from Crypto.Util.py3compat import (byte_string, bord, + _copy_bytes) +from Crypto.Util._raw_api import is_writeable_buffer + +from Crypto.Util.strxor import strxor +from Crypto.Util.number import long_to_bytes + +from Crypto.Hash import BLAKE2s +from Crypto.Random import get_random_bytes + + +def enum(**enums): + return type('Enum', (), enums) + +MacStatus = enum(NOT_STARTED=0, PROCESSING_AUTH_DATA=1, PROCESSING_PLAINTEXT=2) + + +class CcmMode(object): + """Counter with CBC-MAC (CCM). + + This is an Authenticated Encryption with Associated Data (`AEAD`_) mode. + It provides both confidentiality and authenticity. + + The header of the message may be left in the clear, if needed, and it will + still be subject to authentication. The decryption step tells the receiver + if the message comes from a source that really knowns the secret key. + Additionally, decryption detects if any part of the message - including the + header - has been modified or corrupted. + + This mode requires a nonce. The nonce shall never repeat for two + different messages encrypted with the same key, but it does not need + to be random. + Note that there is a trade-off between the size of the nonce and the + maximum size of a single message you can encrypt. + + It is important to use a large nonce if the key is reused across several + messages and the nonce is chosen randomly. + + It is acceptable to us a short nonce if the key is only used a few times or + if the nonce is taken from a counter. + + The following table shows the trade-off when the nonce is chosen at + random. The column on the left shows how many messages it takes + for the keystream to repeat **on average**. In practice, you will want to + stop using the key way before that. + + +--------------------+---------------+-------------------+ + | Avg. # of messages | nonce | Max. message | + | before keystream | size | size | + | repeats | (bytes) | (bytes) | + +====================+===============+===================+ + | 2^52 | 13 | 64K | + +--------------------+---------------+-------------------+ + | 2^48 | 12 | 16M | + +--------------------+---------------+-------------------+ + | 2^44 | 11 | 4G | + +--------------------+---------------+-------------------+ + | 2^40 | 10 | 1T | + +--------------------+---------------+-------------------+ + | 2^36 | 9 | 64P | + +--------------------+---------------+-------------------+ + | 2^32 | 8 | 16E | + +--------------------+---------------+-------------------+ + + This mode is only available for ciphers that operate on 128 bits blocks + (e.g. AES but not TDES). + + See `NIST SP800-38C`_ or RFC3610_. + + .. _`NIST SP800-38C`: http://csrc.nist.gov/publications/nistpubs/800-38C/SP800-38C.pdf + .. _RFC3610: https://tools.ietf.org/html/rfc3610 + .. _AEAD: http://blog.cryptographyengineering.com/2012/05/how-to-choose-authenticated-encryption.html + + :undocumented: __init__ + """ + + def __init__(self, factory, key, nonce, mac_len, msg_len, assoc_len, + cipher_params): + + self.block_size = factory.block_size + """The block size of the underlying cipher, in bytes.""" + + self.nonce = _copy_bytes(None, None, nonce) + """The nonce used for this cipher instance""" + + self._factory = factory + self._key = _copy_bytes(None, None, key) + self._mac_len = mac_len + self._msg_len = msg_len + self._assoc_len = assoc_len + self._cipher_params = cipher_params + + self._mac_tag = None # Cache for MAC tag + + if self.block_size != 16: + raise ValueError("CCM mode is only available for ciphers" + " that operate on 128 bits blocks") + + # MAC tag length (Tlen) + if mac_len not in (4, 6, 8, 10, 12, 14, 16): + raise ValueError("Parameter 'mac_len' must be even" + " and in the range 4..16 (not %d)" % mac_len) + + # Nonce value + if not (nonce and 7 <= len(nonce) <= 13): + raise ValueError("Length of parameter 'nonce' must be" + " in the range 7..13 bytes") + + # Create MAC object (the tag will be the last block + # bytes worth of ciphertext) + self._mac = self._factory.new(key, + factory.MODE_CBC, + iv=b'\x00' * 16, + **cipher_params) + self._mac_status = MacStatus.NOT_STARTED + self._t = None + + # Allowed transitions after initialization + self._next = ["update", "encrypt", "decrypt", + "digest", "verify"] + + # Cumulative lengths + self._cumul_assoc_len = 0 + self._cumul_msg_len = 0 + + # Cache for unaligned associated data/plaintext. + # This is a list with byte strings, but when the MAC starts, + # it will become a binary string no longer than the block size. + self._cache = [] + + # Start CTR cipher, by formatting the counter (A.3) + q = 15 - len(nonce) # length of Q, the encoded message length + self._cipher = self._factory.new(key, + self._factory.MODE_CTR, + nonce=struct.pack("B", q - 1) + self.nonce, + **cipher_params) + + # S_0, step 6 in 6.1 for j=0 + self._s_0 = self._cipher.encrypt(b'\x00' * 16) + + # Try to start the MAC + if None not in (assoc_len, msg_len): + self._start_mac() + + def _start_mac(self): + + assert(self._mac_status == MacStatus.NOT_STARTED) + assert(None not in (self._assoc_len, self._msg_len)) + assert(isinstance(self._cache, list)) + + # Formatting control information and nonce (A.2.1) + q = 15 - len(self.nonce) # length of Q, the encoded message length + flags = (64 * (self._assoc_len > 0) + 8 * ((self._mac_len - 2) // 2) + + (q - 1)) + b_0 = struct.pack("B", flags) + self.nonce + long_to_bytes(self._msg_len, q) + + # Formatting associated data (A.2.2) + # Encoded 'a' is concatenated with the associated data 'A' + assoc_len_encoded = b'' + if self._assoc_len > 0: + if self._assoc_len < (2 ** 16 - 2 ** 8): + enc_size = 2 + elif self._assoc_len < (2 ** 32): + assoc_len_encoded = b'\xFF\xFE' + enc_size = 4 + else: + assoc_len_encoded = b'\xFF\xFF' + enc_size = 8 + assoc_len_encoded += long_to_bytes(self._assoc_len, enc_size) + + # b_0 and assoc_len_encoded must be processed first + self._cache.insert(0, b_0) + self._cache.insert(1, assoc_len_encoded) + + # Process all the data cached so far + first_data_to_mac = b"".join(self._cache) + self._cache = b"" + self._mac_status = MacStatus.PROCESSING_AUTH_DATA + self._update(first_data_to_mac) + + def _pad_cache_and_update(self): + + assert(self._mac_status != MacStatus.NOT_STARTED) + assert(len(self._cache) < self.block_size) + + # Associated data is concatenated with the least number + # of zero bytes (possibly none) to reach alignment to + # the 16 byte boundary (A.2.3) + len_cache = len(self._cache) + if len_cache > 0: + self._update(b'\x00' * (self.block_size - len_cache)) + + def update(self, assoc_data): + """Protect associated data + + If there is any associated data, the caller has to invoke + this function one or more times, before using + ``decrypt`` or ``encrypt``. + + By *associated data* it is meant any data (e.g. packet headers) that + will not be encrypted and will be transmitted in the clear. + However, the receiver is still able to detect any modification to it. + In CCM, the *associated data* is also called + *additional authenticated data* (AAD). + + If there is no associated data, this method must not be called. + + The caller may split associated data in segments of any size, and + invoke this method multiple times, each time with the next segment. + + :Parameters: + assoc_data : bytes/bytearray/memoryview + A piece of associated data. There are no restrictions on its size. + """ + + if "update" not in self._next: + raise TypeError("update() can only be called" + " immediately after initialization") + + self._next = ["update", "encrypt", "decrypt", + "digest", "verify"] + + self._cumul_assoc_len += len(assoc_data) + if self._assoc_len is not None and \ + self._cumul_assoc_len > self._assoc_len: + raise ValueError("Associated data is too long") + + self._update(assoc_data) + return self + + def _update(self, assoc_data_pt=b""): + """Update the MAC with associated data or plaintext + (without FSM checks)""" + + # If MAC has not started yet, we just park the data into a list. + # If the data is mutable, we create a copy and store that instead. + if self._mac_status == MacStatus.NOT_STARTED: + if is_writeable_buffer(assoc_data_pt): + assoc_data_pt = _copy_bytes(None, None, assoc_data_pt) + self._cache.append(assoc_data_pt) + return + + assert(len(self._cache) < self.block_size) + + if len(self._cache) > 0: + filler = min(self.block_size - len(self._cache), + len(assoc_data_pt)) + self._cache += _copy_bytes(None, filler, assoc_data_pt) + assoc_data_pt = _copy_bytes(filler, None, assoc_data_pt) + + if len(self._cache) < self.block_size: + return + + # The cache is exactly one block + self._t = self._mac.encrypt(self._cache) + self._cache = b"" + + update_len = len(assoc_data_pt) // self.block_size * self.block_size + self._cache = _copy_bytes(update_len, None, assoc_data_pt) + if update_len > 0: + self._t = self._mac.encrypt(assoc_data_pt[:update_len])[-16:] + + def encrypt(self, plaintext, output=None): + """Encrypt data with the key set at initialization. + + A cipher object is stateful: once you have encrypted a message + you cannot encrypt (or decrypt) another message using the same + object. + + This method can be called only **once** if ``msg_len`` was + not passed at initialization. + + If ``msg_len`` was given, the data to encrypt can be broken + up in two or more pieces and `encrypt` can be called + multiple times. + + That is, the statement: + + >>> c.encrypt(a) + c.encrypt(b) + + is equivalent to: + + >>> c.encrypt(a+b) + + This function does not add any padding to the plaintext. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + It can be of any length. + :Keywords: + output : bytearray/memoryview + The location where the ciphertext must be written to. + If ``None``, the ciphertext is returned. + :Return: + If ``output`` is ``None``, the ciphertext as ``bytes``. + Otherwise, ``None``. + """ + + if "encrypt" not in self._next: + raise TypeError("encrypt() can only be called after" + " initialization or an update()") + self._next = ["encrypt", "digest"] + + # No more associated data allowed from now + if self._assoc_len is None: + assert(isinstance(self._cache, list)) + self._assoc_len = sum([len(x) for x in self._cache]) + if self._msg_len is not None: + self._start_mac() + else: + if self._cumul_assoc_len < self._assoc_len: + raise ValueError("Associated data is too short") + + # Only once piece of plaintext accepted if message length was + # not declared in advance + if self._msg_len is None: + self._msg_len = len(plaintext) + self._start_mac() + self._next = ["digest"] + + self._cumul_msg_len += len(plaintext) + if self._cumul_msg_len > self._msg_len: + raise ValueError("Message is too long") + + if self._mac_status == MacStatus.PROCESSING_AUTH_DATA: + # Associated data is concatenated with the least number + # of zero bytes (possibly none) to reach alignment to + # the 16 byte boundary (A.2.3) + self._pad_cache_and_update() + self._mac_status = MacStatus.PROCESSING_PLAINTEXT + + self._update(plaintext) + return self._cipher.encrypt(plaintext, output=output) + + def decrypt(self, ciphertext, output=None): + """Decrypt data with the key set at initialization. + + A cipher object is stateful: once you have decrypted a message + you cannot decrypt (or encrypt) another message with the same + object. + + This method can be called only **once** if ``msg_len`` was + not passed at initialization. + + If ``msg_len`` was given, the data to decrypt can be + broken up in two or more pieces and `decrypt` can be + called multiple times. + + That is, the statement: + + >>> c.decrypt(a) + c.decrypt(b) + + is equivalent to: + + >>> c.decrypt(a+b) + + This function does not remove any padding from the plaintext. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + It can be of any length. + :Keywords: + output : bytearray/memoryview + The location where the plaintext must be written to. + If ``None``, the plaintext is returned. + :Return: + If ``output`` is ``None``, the plaintext as ``bytes``. + Otherwise, ``None``. + """ + + if "decrypt" not in self._next: + raise TypeError("decrypt() can only be called" + " after initialization or an update()") + self._next = ["decrypt", "verify"] + + # No more associated data allowed from now + if self._assoc_len is None: + assert(isinstance(self._cache, list)) + self._assoc_len = sum([len(x) for x in self._cache]) + if self._msg_len is not None: + self._start_mac() + else: + if self._cumul_assoc_len < self._assoc_len: + raise ValueError("Associated data is too short") + + # Only once piece of ciphertext accepted if message length was + # not declared in advance + if self._msg_len is None: + self._msg_len = len(ciphertext) + self._start_mac() + self._next = ["verify"] + + self._cumul_msg_len += len(ciphertext) + if self._cumul_msg_len > self._msg_len: + raise ValueError("Message is too long") + + if self._mac_status == MacStatus.PROCESSING_AUTH_DATA: + # Associated data is concatenated with the least number + # of zero bytes (possibly none) to reach alignment to + # the 16 byte boundary (A.2.3) + self._pad_cache_and_update() + self._mac_status = MacStatus.PROCESSING_PLAINTEXT + + # Encrypt is equivalent to decrypt with the CTR mode + plaintext = self._cipher.encrypt(ciphertext, output=output) + if output is None: + self._update(plaintext) + else: + self._update(output) + return plaintext + + def digest(self): + """Compute the *binary* MAC tag. + + The caller invokes this function at the very end. + + This method returns the MAC that shall be sent to the receiver, + together with the ciphertext. + + :Return: the MAC, as a byte string. + """ + + if "digest" not in self._next: + raise TypeError("digest() cannot be called when decrypting" + " or validating a message") + self._next = ["digest"] + return self._digest() + + def _digest(self): + if self._mac_tag: + return self._mac_tag + + if self._assoc_len is None: + assert(isinstance(self._cache, list)) + self._assoc_len = sum([len(x) for x in self._cache]) + if self._msg_len is not None: + self._start_mac() + else: + if self._cumul_assoc_len < self._assoc_len: + raise ValueError("Associated data is too short") + + if self._msg_len is None: + self._msg_len = 0 + self._start_mac() + + if self._cumul_msg_len != self._msg_len: + raise ValueError("Message is too short") + + # Both associated data and payload are concatenated with the least + # number of zero bytes (possibly none) that align it to the + # 16 byte boundary (A.2.2 and A.2.3) + self._pad_cache_and_update() + + # Step 8 in 6.1 (T xor MSB_Tlen(S_0)) + self._mac_tag = strxor(self._t, self._s_0)[:self._mac_len] + + return self._mac_tag + + def hexdigest(self): + """Compute the *printable* MAC tag. + + This method is like `digest`. + + :Return: the MAC, as a hexadecimal string. + """ + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def verify(self, received_mac_tag): + """Validate the *binary* MAC tag. + + The caller invokes this function at the very end. + + This method checks if the decrypted message is indeed valid + (that is, if the key is correct) and it has not been + tampered with while in transit. + + :Parameters: + received_mac_tag : bytes/bytearray/memoryview + This is the *binary* MAC, as received from the sender. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + if "verify" not in self._next: + raise TypeError("verify() cannot be called" + " when encrypting a message") + self._next = ["verify"] + + self._digest() + secret = get_random_bytes(16) + + mac1 = BLAKE2s.new(digest_bits=160, key=secret, data=self._mac_tag) + mac2 = BLAKE2s.new(digest_bits=160, key=secret, data=received_mac_tag) + + if mac1.digest() != mac2.digest(): + raise ValueError("MAC check failed") + + def hexverify(self, hex_mac_tag): + """Validate the *printable* MAC tag. + + This method is like `verify`. + + :Parameters: + hex_mac_tag : string + This is the *printable* MAC, as received from the sender. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + self.verify(unhexlify(hex_mac_tag)) + + def encrypt_and_digest(self, plaintext, output=None): + """Perform encrypt() and digest() in one step. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + :Keywords: + output : bytearray/memoryview + The location where the ciphertext must be written to. + If ``None``, the ciphertext is returned. + :Return: + a tuple with two items: + + - the ciphertext, as ``bytes`` + - the MAC tag, as ``bytes`` + + The first item becomes ``None`` when the ``output`` parameter + specified a location for the result. + """ + + return self.encrypt(plaintext, output=output), self.digest() + + def decrypt_and_verify(self, ciphertext, received_mac_tag, output=None): + """Perform decrypt() and verify() in one step. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + received_mac_tag : bytes/bytearray/memoryview + This is the *binary* MAC, as received from the sender. + :Keywords: + output : bytearray/memoryview + The location where the plaintext must be written to. + If ``None``, the plaintext is returned. + :Return: the plaintext as ``bytes`` or ``None`` when the ``output`` + parameter specified a location for the result. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + plaintext = self.decrypt(ciphertext, output=output) + self.verify(received_mac_tag) + return plaintext + + +def _create_ccm_cipher(factory, **kwargs): + """Create a new block cipher, configured in CCM mode. + + :Parameters: + factory : module + A symmetric cipher module from `Crypto.Cipher` (like + `Crypto.Cipher.AES`). + + :Keywords: + key : bytes/bytearray/memoryview + The secret key to use in the symmetric cipher. + + nonce : bytes/bytearray/memoryview + A value that must never be reused for any other encryption. + + Its length must be in the range ``[7..13]``. + 11 or 12 bytes are reasonable values in general. Bear in + mind that with CCM there is a trade-off between nonce length and + maximum message size. + + If not specified, a 11 byte long random string is used. + + mac_len : integer + Length of the MAC, in bytes. It must be even and in + the range ``[4..16]``. The default is 16. + + msg_len : integer + Length of the message to (de)cipher. + If not specified, ``encrypt`` or ``decrypt`` may only be called once. + + assoc_len : integer + Length of the associated data. + If not specified, all data is internally buffered. + """ + + try: + key = key = kwargs.pop("key") + except KeyError as e: + raise TypeError("Missing parameter: " + str(e)) + + nonce = kwargs.pop("nonce", None) # N + if nonce is None: + nonce = get_random_bytes(11) + mac_len = kwargs.pop("mac_len", factory.block_size) + msg_len = kwargs.pop("msg_len", None) # p + assoc_len = kwargs.pop("assoc_len", None) # a + cipher_params = dict(kwargs) + + return CcmMode(factory, key, nonce, mac_len, msg_len, + assoc_len, cipher_params) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ccm.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ccm.pyi new file mode 100644 index 000000000..4b9f6200d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ccm.pyi @@ -0,0 +1,47 @@ +from types import ModuleType +from typing import Union, overload, Dict, Tuple, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +__all__ = ['CcmMode'] + +class CcmMode(object): + block_size: int + nonce: bytes + + def __init__(self, + factory: ModuleType, + key: Buffer, + nonce: Buffer, + mac_len: int, + msg_len: int, + assoc_len: int, + cipher_params: Dict) -> None: ... + + def update(self, assoc_data: Buffer) -> CcmMode: ... + + @overload + def encrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + @overload + def decrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def verify(self, received_mac_tag: Buffer) -> None: ... + def hexverify(self, hex_mac_tag: str) -> None: ... + + @overload + def encrypt_and_digest(self, + plaintext: Buffer) -> Tuple[bytes, bytes]: ... + @overload + def encrypt_and_digest(self, + plaintext: Buffer, + output: Buffer) -> Tuple[None, bytes]: ... + def decrypt_and_verify(self, + ciphertext: Buffer, + received_mac_tag: Buffer, + output: Optional[Union[bytearray, memoryview]] = ...) -> bytes: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_cfb.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_cfb.py new file mode 100644 index 000000000..482e44cb5 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_cfb.py @@ -0,0 +1,293 @@ +# -*- coding: utf-8 -*- +# +# Cipher/mode_cfb.py : CFB mode +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +""" +Counter Feedback (CFB) mode. +""" + +__all__ = ['CfbMode'] + +from Crypto.Util.py3compat import _copy_bytes +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, + create_string_buffer, get_raw_buffer, + SmartPointer, c_size_t, c_uint8_ptr, + is_writeable_buffer) + +from Crypto.Random import get_random_bytes + +raw_cfb_lib = load_pycryptodome_raw_lib("Crypto.Cipher._raw_cfb",""" + int CFB_start_operation(void *cipher, + const uint8_t iv[], + size_t iv_len, + size_t segment_len, /* In bytes */ + void **pResult); + int CFB_encrypt(void *cfbState, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int CFB_decrypt(void *cfbState, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int CFB_stop_operation(void *state);""" + ) + + +class CfbMode(object): + """*Cipher FeedBack (CFB)*. + + This mode is similar to CFB, but it transforms + the underlying block cipher into a stream cipher. + + Plaintext and ciphertext are processed in *segments* + of **s** bits. The mode is therefore sometimes + labelled **s**-bit CFB. + + An Initialization Vector (*IV*) is required. + + See `NIST SP800-38A`_ , Section 6.3. + + .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf + + :undocumented: __init__ + """ + + def __init__(self, block_cipher, iv, segment_size): + """Create a new block cipher, configured in CFB mode. + + :Parameters: + block_cipher : C pointer + A smart pointer to the low-level block cipher instance. + + iv : bytes/bytearray/memoryview + The initialization vector to use for encryption or decryption. + It is as long as the cipher block. + + **The IV must be unpredictable**. Ideally it is picked randomly. + + Reusing the *IV* for encryptions performed with the same key + compromises confidentiality. + + segment_size : integer + The number of bytes the plaintext and ciphertext are segmented in. + """ + + self._state = VoidPointer() + result = raw_cfb_lib.CFB_start_operation(block_cipher.get(), + c_uint8_ptr(iv), + c_size_t(len(iv)), + c_size_t(segment_size), + self._state.address_of()) + if result: + raise ValueError("Error %d while instantiating the CFB mode" % result) + + # Ensure that object disposal of this Python object will (eventually) + # free the memory allocated by the raw library for the cipher mode + self._state = SmartPointer(self._state.get(), + raw_cfb_lib.CFB_stop_operation) + + # Memory allocated for the underlying block cipher is now owed + # by the cipher mode + block_cipher.release() + + self.block_size = len(iv) + """The block size of the underlying cipher, in bytes.""" + + self.iv = _copy_bytes(None, None, iv) + """The Initialization Vector originally used to create the object. + The value does not change.""" + + self.IV = self.iv + """Alias for `iv`""" + + self._next = ["encrypt", "decrypt"] + + def encrypt(self, plaintext, output=None): + """Encrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have encrypted a message + you cannot encrypt (or decrypt) another message using the same + object. + + The data to encrypt can be broken up in two or + more pieces and `encrypt` can be called multiple times. + + That is, the statement: + + >>> c.encrypt(a) + c.encrypt(b) + + is equivalent to: + + >>> c.encrypt(a+b) + + This function does not add any padding to the plaintext. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + It can be of any length. + :Keywords: + output : bytearray/memoryview + The location where the ciphertext must be written to. + If ``None``, the ciphertext is returned. + :Return: + If ``output`` is ``None``, the ciphertext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if "encrypt" not in self._next: + raise TypeError("encrypt() cannot be called after decrypt()") + self._next = ["encrypt"] + + if output is None: + ciphertext = create_string_buffer(len(plaintext)) + else: + ciphertext = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(plaintext) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(plaintext)) + + result = raw_cfb_lib.CFB_encrypt(self._state.get(), + c_uint8_ptr(plaintext), + c_uint8_ptr(ciphertext), + c_size_t(len(plaintext))) + if result: + raise ValueError("Error %d while encrypting in CFB mode" % result) + + if output is None: + return get_raw_buffer(ciphertext) + else: + return None + + def decrypt(self, ciphertext, output=None): + """Decrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have decrypted a message + you cannot decrypt (or encrypt) another message with the same + object. + + The data to decrypt can be broken up in two or + more pieces and `decrypt` can be called multiple times. + + That is, the statement: + + >>> c.decrypt(a) + c.decrypt(b) + + is equivalent to: + + >>> c.decrypt(a+b) + + This function does not remove any padding from the plaintext. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + It can be of any length. + :Keywords: + output : bytearray/memoryview + The location where the plaintext must be written to. + If ``None``, the plaintext is returned. + :Return: + If ``output`` is ``None``, the plaintext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if "decrypt" not in self._next: + raise TypeError("decrypt() cannot be called after encrypt()") + self._next = ["decrypt"] + + if output is None: + plaintext = create_string_buffer(len(ciphertext)) + else: + plaintext = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(ciphertext) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(plaintext)) + + result = raw_cfb_lib.CFB_decrypt(self._state.get(), + c_uint8_ptr(ciphertext), + c_uint8_ptr(plaintext), + c_size_t(len(ciphertext))) + if result: + raise ValueError("Error %d while decrypting in CFB mode" % result) + + if output is None: + return get_raw_buffer(plaintext) + else: + return None + + +def _create_cfb_cipher(factory, **kwargs): + """Instantiate a cipher object that performs CFB encryption/decryption. + + :Parameters: + factory : module + The underlying block cipher, a module from ``Crypto.Cipher``. + + :Keywords: + iv : bytes/bytearray/memoryview + The IV to use for CFB. + + IV : bytes/bytearray/memoryview + Alias for ``iv``. + + segment_size : integer + The number of bit the plaintext and ciphertext are segmented in. + If not present, the default is 8. + + Any other keyword will be passed to the underlying block cipher. + See the relevant documentation for details (at least ``key`` will need + to be present). + """ + + cipher_state = factory._create_base_cipher(kwargs) + + iv = kwargs.pop("IV", None) + IV = kwargs.pop("iv", None) + + if (None, None) == (iv, IV): + iv = get_random_bytes(factory.block_size) + if iv is not None: + if IV is not None: + raise TypeError("You must either use 'iv' or 'IV', not both") + else: + iv = IV + + if len(iv) != factory.block_size: + raise ValueError("Incorrect IV length (it must be %d bytes long)" % + factory.block_size) + + segment_size_bytes, rem = divmod(kwargs.pop("segment_size", 8), 8) + if segment_size_bytes == 0 or rem != 0: + raise ValueError("'segment_size' must be positive and multiple of 8 bits") + + if kwargs: + raise TypeError("Unknown parameters for CFB: %s" % str(kwargs)) + return CfbMode(cipher_state, iv, segment_size_bytes) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_cfb.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_cfb.pyi new file mode 100644 index 000000000..e13a909b1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_cfb.pyi @@ -0,0 +1,26 @@ +from typing import Union, overload + +from Crypto.Util._raw_api import SmartPointer + +Buffer = Union[bytes, bytearray, memoryview] + +__all__ = ['CfbMode'] + + +class CfbMode(object): + block_size: int + iv: Buffer + IV: Buffer + + def __init__(self, + block_cipher: SmartPointer, + iv: Buffer, + segment_size: int) -> None: ... + @overload + def encrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + @overload + def decrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ctr.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ctr.py new file mode 100644 index 000000000..81f2be9c3 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ctr.py @@ -0,0 +1,393 @@ +# -*- coding: utf-8 -*- +# +# Cipher/mode_ctr.py : CTR mode +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +""" +Counter (CTR) mode. +""" + +__all__ = ['CtrMode'] + +import struct + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, + create_string_buffer, get_raw_buffer, + SmartPointer, c_size_t, c_uint8_ptr, + is_writeable_buffer) + +from Crypto.Random import get_random_bytes +from Crypto.Util.py3compat import _copy_bytes, is_native_int +from Crypto.Util.number import long_to_bytes + +raw_ctr_lib = load_pycryptodome_raw_lib("Crypto.Cipher._raw_ctr", """ + int CTR_start_operation(void *cipher, + uint8_t initialCounterBlock[], + size_t initialCounterBlock_len, + size_t prefix_len, + unsigned counter_len, + unsigned littleEndian, + void **pResult); + int CTR_encrypt(void *ctrState, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int CTR_decrypt(void *ctrState, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int CTR_stop_operation(void *ctrState);""" + ) + + +class CtrMode(object): + """*CounTeR (CTR)* mode. + + This mode is very similar to ECB, in that + encryption of one block is done independently of all other blocks. + + Unlike ECB, the block *position* contributes to the encryption + and no information leaks about symbol frequency. + + Each message block is associated to a *counter* which + must be unique across all messages that get encrypted + with the same key (not just within the same message). + The counter is as big as the block size. + + Counters can be generated in several ways. The most + straightword one is to choose an *initial counter block* + (which can be made public, similarly to the *IV* for the + other modes) and increment its lowest **m** bits by one + (modulo *2^m*) for each block. In most cases, **m** is + chosen to be half the block size. + + See `NIST SP800-38A`_, Section 6.5 (for the mode) and + Appendix B (for how to manage the *initial counter block*). + + .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf + + :undocumented: __init__ + """ + + def __init__(self, block_cipher, initial_counter_block, + prefix_len, counter_len, little_endian): + """Create a new block cipher, configured in CTR mode. + + :Parameters: + block_cipher : C pointer + A smart pointer to the low-level block cipher instance. + + initial_counter_block : bytes/bytearray/memoryview + The initial plaintext to use to generate the key stream. + + It is as large as the cipher block, and it embeds + the initial value of the counter. + + This value must not be reused. + It shall contain a nonce or a random component. + Reusing the *initial counter block* for encryptions + performed with the same key compromises confidentiality. + + prefix_len : integer + The amount of bytes at the beginning of the counter block + that never change. + + counter_len : integer + The length in bytes of the counter embedded in the counter + block. + + little_endian : boolean + True if the counter in the counter block is an integer encoded + in little endian mode. If False, it is big endian. + """ + + if len(initial_counter_block) == prefix_len + counter_len: + self.nonce = _copy_bytes(None, prefix_len, initial_counter_block) + """Nonce; not available if there is a fixed suffix""" + + self._state = VoidPointer() + result = raw_ctr_lib.CTR_start_operation(block_cipher.get(), + c_uint8_ptr(initial_counter_block), + c_size_t(len(initial_counter_block)), + c_size_t(prefix_len), + counter_len, + little_endian, + self._state.address_of()) + if result: + raise ValueError("Error %X while instantiating the CTR mode" + % result) + + # Ensure that object disposal of this Python object will (eventually) + # free the memory allocated by the raw library for the cipher mode + self._state = SmartPointer(self._state.get(), + raw_ctr_lib.CTR_stop_operation) + + # Memory allocated for the underlying block cipher is now owed + # by the cipher mode + block_cipher.release() + + self.block_size = len(initial_counter_block) + """The block size of the underlying cipher, in bytes.""" + + self._next = ["encrypt", "decrypt"] + + def encrypt(self, plaintext, output=None): + """Encrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have encrypted a message + you cannot encrypt (or decrypt) another message using the same + object. + + The data to encrypt can be broken up in two or + more pieces and `encrypt` can be called multiple times. + + That is, the statement: + + >>> c.encrypt(a) + c.encrypt(b) + + is equivalent to: + + >>> c.encrypt(a+b) + + This function does not add any padding to the plaintext. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + It can be of any length. + :Keywords: + output : bytearray/memoryview + The location where the ciphertext must be written to. + If ``None``, the ciphertext is returned. + :Return: + If ``output`` is ``None``, the ciphertext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if "encrypt" not in self._next: + raise TypeError("encrypt() cannot be called after decrypt()") + self._next = ["encrypt"] + + if output is None: + ciphertext = create_string_buffer(len(plaintext)) + else: + ciphertext = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(plaintext) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(plaintext)) + + result = raw_ctr_lib.CTR_encrypt(self._state.get(), + c_uint8_ptr(plaintext), + c_uint8_ptr(ciphertext), + c_size_t(len(plaintext))) + if result: + if result == 0x60002: + raise OverflowError("The counter has wrapped around in" + " CTR mode") + raise ValueError("Error %X while encrypting in CTR mode" % result) + + if output is None: + return get_raw_buffer(ciphertext) + else: + return None + + def decrypt(self, ciphertext, output=None): + """Decrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have decrypted a message + you cannot decrypt (or encrypt) another message with the same + object. + + The data to decrypt can be broken up in two or + more pieces and `decrypt` can be called multiple times. + + That is, the statement: + + >>> c.decrypt(a) + c.decrypt(b) + + is equivalent to: + + >>> c.decrypt(a+b) + + This function does not remove any padding from the plaintext. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + It can be of any length. + :Keywords: + output : bytearray/memoryview + The location where the plaintext must be written to. + If ``None``, the plaintext is returned. + :Return: + If ``output`` is ``None``, the plaintext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if "decrypt" not in self._next: + raise TypeError("decrypt() cannot be called after encrypt()") + self._next = ["decrypt"] + + if output is None: + plaintext = create_string_buffer(len(ciphertext)) + else: + plaintext = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(ciphertext) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(plaintext)) + + result = raw_ctr_lib.CTR_decrypt(self._state.get(), + c_uint8_ptr(ciphertext), + c_uint8_ptr(plaintext), + c_size_t(len(ciphertext))) + if result: + if result == 0x60002: + raise OverflowError("The counter has wrapped around in" + " CTR mode") + raise ValueError("Error %X while decrypting in CTR mode" % result) + + if output is None: + return get_raw_buffer(plaintext) + else: + return None + + +def _create_ctr_cipher(factory, **kwargs): + """Instantiate a cipher object that performs CTR encryption/decryption. + + :Parameters: + factory : module + The underlying block cipher, a module from ``Crypto.Cipher``. + + :Keywords: + nonce : bytes/bytearray/memoryview + The fixed part at the beginning of the counter block - the rest is + the counter number that gets increased when processing the next block. + The nonce must be such that no two messages are encrypted under the + same key and the same nonce. + + The nonce must be shorter than the block size (it can have + zero length; the counter is then as long as the block). + + If this parameter is not present, a random nonce will be created with + length equal to half the block size. No random nonce shorter than + 64 bits will be created though - you must really think through all + security consequences of using such a short block size. + + initial_value : posive integer or bytes/bytearray/memoryview + The initial value for the counter. If not present, the cipher will + start counting from 0. The value is incremented by one for each block. + The counter number is encoded in big endian mode. + + counter : object + Instance of ``Crypto.Util.Counter``, which allows full customization + of the counter block. This parameter is incompatible to both ``nonce`` + and ``initial_value``. + + Any other keyword will be passed to the underlying block cipher. + See the relevant documentation for details (at least ``key`` will need + to be present). + """ + + cipher_state = factory._create_base_cipher(kwargs) + + counter = kwargs.pop("counter", None) + nonce = kwargs.pop("nonce", None) + initial_value = kwargs.pop("initial_value", None) + if kwargs: + raise TypeError("Invalid parameters for CTR mode: %s" % str(kwargs)) + + if counter is not None and (nonce, initial_value) != (None, None): + raise TypeError("'counter' and 'nonce'/'initial_value'" + " are mutually exclusive") + + if counter is None: + # Crypto.Util.Counter is not used + if nonce is None: + if factory.block_size < 16: + raise TypeError("Impossible to create a safe nonce for short" + " block sizes") + nonce = get_random_bytes(factory.block_size // 2) + else: + if len(nonce) >= factory.block_size: + raise ValueError("Nonce is too long") + + # What is not nonce is counter + counter_len = factory.block_size - len(nonce) + + if initial_value is None: + initial_value = 0 + + if is_native_int(initial_value): + if (1 << (counter_len * 8)) - 1 < initial_value: + raise ValueError("Initial counter value is too large") + initial_counter_block = nonce + long_to_bytes(initial_value, counter_len) + else: + if len(initial_value) != counter_len: + raise ValueError("Incorrect length for counter byte string (%d bytes, expected %d)" % + (len(initial_value), counter_len)) + initial_counter_block = nonce + initial_value + + return CtrMode(cipher_state, + initial_counter_block, + len(nonce), # prefix + counter_len, + False) # little_endian + + # Crypto.Util.Counter is used + + # 'counter' used to be a callable object, but now it is + # just a dictionary for backward compatibility. + _counter = dict(counter) + try: + counter_len = _counter.pop("counter_len") + prefix = _counter.pop("prefix") + suffix = _counter.pop("suffix") + initial_value = _counter.pop("initial_value") + little_endian = _counter.pop("little_endian") + except KeyError: + raise TypeError("Incorrect counter object" + " (use Crypto.Util.Counter.new)") + + # Compute initial counter block + words = [] + while initial_value > 0: + words.append(struct.pack('B', initial_value & 255)) + initial_value >>= 8 + words += [b'\x00'] * max(0, counter_len - len(words)) + if not little_endian: + words.reverse() + initial_counter_block = prefix + b"".join(words) + suffix + + if len(initial_counter_block) != factory.block_size: + raise ValueError("Size of the counter block (%d bytes) must match" + " block size (%d)" % (len(initial_counter_block), + factory.block_size)) + + return CtrMode(cipher_state, initial_counter_block, + len(prefix), counter_len, little_endian) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ctr.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ctr.pyi new file mode 100644 index 000000000..ce708557d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ctr.pyi @@ -0,0 +1,27 @@ +from typing import Union, overload + +from Crypto.Util._raw_api import SmartPointer + +Buffer = Union[bytes, bytearray, memoryview] + +__all__ = ['CtrMode'] + +class CtrMode(object): + block_size: int + nonce: bytes + + def __init__(self, + block_cipher: SmartPointer, + initial_counter_block: Buffer, + prefix_len: int, + counter_len: int, + little_endian: bool) -> None: ... + @overload + def encrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + @overload + def decrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_eax.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_eax.py new file mode 100644 index 000000000..62cf4d8b3 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_eax.py @@ -0,0 +1,408 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +""" +EAX mode. +""" + +__all__ = ['EaxMode'] + +import struct +from binascii import unhexlify + +from Crypto.Util.py3compat import byte_string, bord, _copy_bytes + +from Crypto.Util._raw_api import is_buffer + +from Crypto.Util.strxor import strxor +from Crypto.Util.number import long_to_bytes, bytes_to_long + +from Crypto.Hash import CMAC, BLAKE2s +from Crypto.Random import get_random_bytes + + +class EaxMode(object): + """*EAX* mode. + + This is an Authenticated Encryption with Associated Data + (`AEAD`_) mode. It provides both confidentiality and authenticity. + + The header of the message may be left in the clear, if needed, + and it will still be subject to authentication. + + The decryption step tells the receiver if the message comes + from a source that really knowns the secret key. + Additionally, decryption detects if any part of the message - + including the header - has been modified or corrupted. + + This mode requires a *nonce*. + + This mode is only available for ciphers that operate on 64 or + 128 bits blocks. + + There are no official standards defining EAX. + The implementation is based on `a proposal`__ that + was presented to NIST. + + .. _AEAD: http://blog.cryptographyengineering.com/2012/05/how-to-choose-authenticated-encryption.html + .. __: http://csrc.nist.gov/groups/ST/toolkit/BCM/documents/proposedmodes/eax/eax-spec.pdf + + :undocumented: __init__ + """ + + def __init__(self, factory, key, nonce, mac_len, cipher_params): + """EAX cipher mode""" + + self.block_size = factory.block_size + """The block size of the underlying cipher, in bytes.""" + + self.nonce = _copy_bytes(None, None, nonce) + """The nonce originally used to create the object.""" + + self._mac_len = mac_len + self._mac_tag = None # Cache for MAC tag + + # Allowed transitions after initialization + self._next = ["update", "encrypt", "decrypt", + "digest", "verify"] + + # MAC tag length + if not (2 <= self._mac_len <= self.block_size): + raise ValueError("'mac_len' must be at least 2 and not larger than %d" + % self.block_size) + + # Nonce cannot be empty and must be a byte string + if len(self.nonce) == 0: + raise ValueError("Nonce cannot be empty in EAX mode") + if not is_buffer(nonce): + raise TypeError("nonce must be bytes, bytearray or memoryview") + + self._omac = [ + CMAC.new(key, + b'\x00' * (self.block_size - 1) + struct.pack('B', i), + ciphermod=factory, + cipher_params=cipher_params) + for i in range(0, 3) + ] + + # Compute MAC of nonce + self._omac[0].update(self.nonce) + self._signer = self._omac[1] + + # MAC of the nonce is also the initial counter for CTR encryption + counter_int = bytes_to_long(self._omac[0].digest()) + self._cipher = factory.new(key, + factory.MODE_CTR, + initial_value=counter_int, + nonce=b"", + **cipher_params) + + def update(self, assoc_data): + """Protect associated data + + If there is any associated data, the caller has to invoke + this function one or more times, before using + ``decrypt`` or ``encrypt``. + + By *associated data* it is meant any data (e.g. packet headers) that + will not be encrypted and will be transmitted in the clear. + However, the receiver is still able to detect any modification to it. + + If there is no associated data, this method must not be called. + + The caller may split associated data in segments of any size, and + invoke this method multiple times, each time with the next segment. + + :Parameters: + assoc_data : bytes/bytearray/memoryview + A piece of associated data. There are no restrictions on its size. + """ + + if "update" not in self._next: + raise TypeError("update() can only be called" + " immediately after initialization") + + self._next = ["update", "encrypt", "decrypt", + "digest", "verify"] + + self._signer.update(assoc_data) + return self + + def encrypt(self, plaintext, output=None): + """Encrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have encrypted a message + you cannot encrypt (or decrypt) another message using the same + object. + + The data to encrypt can be broken up in two or + more pieces and `encrypt` can be called multiple times. + + That is, the statement: + + >>> c.encrypt(a) + c.encrypt(b) + + is equivalent to: + + >>> c.encrypt(a+b) + + This function does not add any padding to the plaintext. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + It can be of any length. + :Keywords: + output : bytearray/memoryview + The location where the ciphertext must be written to. + If ``None``, the ciphertext is returned. + :Return: + If ``output`` is ``None``, the ciphertext as ``bytes``. + Otherwise, ``None``. + """ + + if "encrypt" not in self._next: + raise TypeError("encrypt() can only be called after" + " initialization or an update()") + self._next = ["encrypt", "digest"] + ct = self._cipher.encrypt(plaintext, output=output) + if output is None: + self._omac[2].update(ct) + else: + self._omac[2].update(output) + return ct + + def decrypt(self, ciphertext, output=None): + """Decrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have decrypted a message + you cannot decrypt (or encrypt) another message with the same + object. + + The data to decrypt can be broken up in two or + more pieces and `decrypt` can be called multiple times. + + That is, the statement: + + >>> c.decrypt(a) + c.decrypt(b) + + is equivalent to: + + >>> c.decrypt(a+b) + + This function does not remove any padding from the plaintext. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + It can be of any length. + :Keywords: + output : bytearray/memoryview + The location where the plaintext must be written to. + If ``None``, the plaintext is returned. + :Return: + If ``output`` is ``None``, the plaintext as ``bytes``. + Otherwise, ``None``. + """ + + if "decrypt" not in self._next: + raise TypeError("decrypt() can only be called" + " after initialization or an update()") + self._next = ["decrypt", "verify"] + self._omac[2].update(ciphertext) + return self._cipher.decrypt(ciphertext, output=output) + + def digest(self): + """Compute the *binary* MAC tag. + + The caller invokes this function at the very end. + + This method returns the MAC that shall be sent to the receiver, + together with the ciphertext. + + :Return: the MAC, as a byte string. + """ + + if "digest" not in self._next: + raise TypeError("digest() cannot be called when decrypting" + " or validating a message") + self._next = ["digest"] + + if not self._mac_tag: + tag = b'\x00' * self.block_size + for i in range(3): + tag = strxor(tag, self._omac[i].digest()) + self._mac_tag = tag[:self._mac_len] + + return self._mac_tag + + def hexdigest(self): + """Compute the *printable* MAC tag. + + This method is like `digest`. + + :Return: the MAC, as a hexadecimal string. + """ + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def verify(self, received_mac_tag): + """Validate the *binary* MAC tag. + + The caller invokes this function at the very end. + + This method checks if the decrypted message is indeed valid + (that is, if the key is correct) and it has not been + tampered with while in transit. + + :Parameters: + received_mac_tag : bytes/bytearray/memoryview + This is the *binary* MAC, as received from the sender. + :Raises MacMismatchError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + if "verify" not in self._next: + raise TypeError("verify() cannot be called" + " when encrypting a message") + self._next = ["verify"] + + if not self._mac_tag: + tag = b'\x00' * self.block_size + for i in range(3): + tag = strxor(tag, self._omac[i].digest()) + self._mac_tag = tag[:self._mac_len] + + secret = get_random_bytes(16) + + mac1 = BLAKE2s.new(digest_bits=160, key=secret, data=self._mac_tag) + mac2 = BLAKE2s.new(digest_bits=160, key=secret, data=received_mac_tag) + + if mac1.digest() != mac2.digest(): + raise ValueError("MAC check failed") + + def hexverify(self, hex_mac_tag): + """Validate the *printable* MAC tag. + + This method is like `verify`. + + :Parameters: + hex_mac_tag : string + This is the *printable* MAC, as received from the sender. + :Raises MacMismatchError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + self.verify(unhexlify(hex_mac_tag)) + + def encrypt_and_digest(self, plaintext, output=None): + """Perform encrypt() and digest() in one step. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + :Keywords: + output : bytearray/memoryview + The location where the ciphertext must be written to. + If ``None``, the ciphertext is returned. + :Return: + a tuple with two items: + + - the ciphertext, as ``bytes`` + - the MAC tag, as ``bytes`` + + The first item becomes ``None`` when the ``output`` parameter + specified a location for the result. + """ + + return self.encrypt(plaintext, output=output), self.digest() + + def decrypt_and_verify(self, ciphertext, received_mac_tag, output=None): + """Perform decrypt() and verify() in one step. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + received_mac_tag : bytes/bytearray/memoryview + This is the *binary* MAC, as received from the sender. + :Keywords: + output : bytearray/memoryview + The location where the plaintext must be written to. + If ``None``, the plaintext is returned. + :Return: the plaintext as ``bytes`` or ``None`` when the ``output`` + parameter specified a location for the result. + :Raises MacMismatchError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + pt = self.decrypt(ciphertext, output=output) + self.verify(received_mac_tag) + return pt + + +def _create_eax_cipher(factory, **kwargs): + """Create a new block cipher, configured in EAX mode. + + :Parameters: + factory : module + A symmetric cipher module from `Crypto.Cipher` (like + `Crypto.Cipher.AES`). + + :Keywords: + key : bytes/bytearray/memoryview + The secret key to use in the symmetric cipher. + + nonce : bytes/bytearray/memoryview + A value that must never be reused for any other encryption. + There are no restrictions on its length, but it is recommended to use + at least 16 bytes. + + The nonce shall never repeat for two different messages encrypted with + the same key, but it does not need to be random. + + If not specified, a 16 byte long random string is used. + + mac_len : integer + Length of the MAC, in bytes. It must be no larger than the cipher + block bytes (which is the default). + """ + + try: + key = kwargs.pop("key") + nonce = kwargs.pop("nonce", None) + if nonce is None: + nonce = get_random_bytes(16) + mac_len = kwargs.pop("mac_len", factory.block_size) + except KeyError as e: + raise TypeError("Missing parameter: " + str(e)) + + return EaxMode(factory, key, nonce, mac_len, kwargs) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_eax.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_eax.pyi new file mode 100644 index 000000000..cbfa467eb --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_eax.pyi @@ -0,0 +1,45 @@ +from types import ModuleType +from typing import Any, Union, Tuple, Dict, overload, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +__all__ = ['EaxMode'] + +class EaxMode(object): + block_size: int + nonce: bytes + + def __init__(self, + factory: ModuleType, + key: Buffer, + nonce: Buffer, + mac_len: int, + cipher_params: Dict) -> None: ... + + def update(self, assoc_data: Buffer) -> EaxMode: ... + + @overload + def encrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + @overload + def decrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def verify(self, received_mac_tag: Buffer) -> None: ... + def hexverify(self, hex_mac_tag: str) -> None: ... + + @overload + def encrypt_and_digest(self, + plaintext: Buffer) -> Tuple[bytes, bytes]: ... + @overload + def encrypt_and_digest(self, + plaintext: Buffer, + output: Buffer) -> Tuple[None, bytes]: ... + def decrypt_and_verify(self, + ciphertext: Buffer, + received_mac_tag: Buffer, + output: Optional[Union[bytearray, memoryview]] = ...) -> bytes: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ecb.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ecb.py new file mode 100644 index 000000000..378335721 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ecb.py @@ -0,0 +1,220 @@ +# -*- coding: utf-8 -*- +# +# Cipher/mode_ecb.py : ECB mode +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +""" +Electronic Code Book (ECB) mode. +""" + +__all__ = [ 'EcbMode' ] + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, create_string_buffer, + get_raw_buffer, SmartPointer, + c_size_t, c_uint8_ptr, + is_writeable_buffer) + +raw_ecb_lib = load_pycryptodome_raw_lib("Crypto.Cipher._raw_ecb", """ + int ECB_start_operation(void *cipher, + void **pResult); + int ECB_encrypt(void *ecbState, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int ECB_decrypt(void *ecbState, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int ECB_stop_operation(void *state); + """ + ) + + +class EcbMode(object): + """*Electronic Code Book (ECB)*. + + This is the simplest encryption mode. Each of the plaintext blocks + is directly encrypted into a ciphertext block, independently of + any other block. + + This mode is dangerous because it exposes frequency of symbols + in your plaintext. Other modes (e.g. *CBC*) should be used instead. + + See `NIST SP800-38A`_ , Section 6.1. + + .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf + + :undocumented: __init__ + """ + + def __init__(self, block_cipher): + """Create a new block cipher, configured in ECB mode. + + :Parameters: + block_cipher : C pointer + A smart pointer to the low-level block cipher instance. + """ + self.block_size = block_cipher.block_size + + self._state = VoidPointer() + result = raw_ecb_lib.ECB_start_operation(block_cipher.get(), + self._state.address_of()) + if result: + raise ValueError("Error %d while instantiating the ECB mode" + % result) + + # Ensure that object disposal of this Python object will (eventually) + # free the memory allocated by the raw library for the cipher + # mode + self._state = SmartPointer(self._state.get(), + raw_ecb_lib.ECB_stop_operation) + + # Memory allocated for the underlying block cipher is now owned + # by the cipher mode + block_cipher.release() + + def encrypt(self, plaintext, output=None): + """Encrypt data with the key set at initialization. + + The data to encrypt can be broken up in two or + more pieces and `encrypt` can be called multiple times. + + That is, the statement: + + >>> c.encrypt(a) + c.encrypt(b) + + is equivalent to: + + >>> c.encrypt(a+b) + + This function does not add any padding to the plaintext. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + The length must be multiple of the cipher block length. + :Keywords: + output : bytearray/memoryview + The location where the ciphertext must be written to. + If ``None``, the ciphertext is returned. + :Return: + If ``output`` is ``None``, the ciphertext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if output is None: + ciphertext = create_string_buffer(len(plaintext)) + else: + ciphertext = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(plaintext) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(plaintext)) + + result = raw_ecb_lib.ECB_encrypt(self._state.get(), + c_uint8_ptr(plaintext), + c_uint8_ptr(ciphertext), + c_size_t(len(plaintext))) + if result: + if result == 3: + raise ValueError("Data must be aligned to block boundary in ECB mode") + raise ValueError("Error %d while encrypting in ECB mode" % result) + + if output is None: + return get_raw_buffer(ciphertext) + else: + return None + + def decrypt(self, ciphertext, output=None): + """Decrypt data with the key set at initialization. + + The data to decrypt can be broken up in two or + more pieces and `decrypt` can be called multiple times. + + That is, the statement: + + >>> c.decrypt(a) + c.decrypt(b) + + is equivalent to: + + >>> c.decrypt(a+b) + + This function does not remove any padding from the plaintext. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + The length must be multiple of the cipher block length. + :Keywords: + output : bytearray/memoryview + The location where the plaintext must be written to. + If ``None``, the plaintext is returned. + :Return: + If ``output`` is ``None``, the plaintext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if output is None: + plaintext = create_string_buffer(len(ciphertext)) + else: + plaintext = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(ciphertext) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(plaintext)) + + result = raw_ecb_lib.ECB_decrypt(self._state.get(), + c_uint8_ptr(ciphertext), + c_uint8_ptr(plaintext), + c_size_t(len(ciphertext))) + if result: + if result == 3: + raise ValueError("Data must be aligned to block boundary in ECB mode") + raise ValueError("Error %d while decrypting in ECB mode" % result) + + if output is None: + return get_raw_buffer(plaintext) + else: + return None + + +def _create_ecb_cipher(factory, **kwargs): + """Instantiate a cipher object that performs ECB encryption/decryption. + + :Parameters: + factory : module + The underlying block cipher, a module from ``Crypto.Cipher``. + + All keywords are passed to the underlying block cipher. + See the relevant documentation for details (at least ``key`` will need + to be present""" + + cipher_state = factory._create_base_cipher(kwargs) + cipher_state.block_size = factory.block_size + if kwargs: + raise TypeError("Unknown parameters for ECB: %s" % str(kwargs)) + return EcbMode(cipher_state) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ecb.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ecb.pyi new file mode 100644 index 000000000..1772b23cc --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ecb.pyi @@ -0,0 +1,19 @@ +from typing import Union, overload + +from Crypto.Util._raw_api import SmartPointer + +Buffer = Union[bytes, bytearray, memoryview] + +__all__ = [ 'EcbMode' ] + +class EcbMode(object): + def __init__(self, block_cipher: SmartPointer) -> None: ... + @overload + def encrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + @overload + def decrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_gcm.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_gcm.py new file mode 100644 index 000000000..051951076 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_gcm.py @@ -0,0 +1,620 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +""" +Galois/Counter Mode (GCM). +""" + +__all__ = ['GcmMode'] + +from binascii import unhexlify + +from Crypto.Util.py3compat import bord, _copy_bytes + +from Crypto.Util._raw_api import is_buffer + +from Crypto.Util.number import long_to_bytes, bytes_to_long +from Crypto.Hash import BLAKE2s +from Crypto.Random import get_random_bytes + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, + create_string_buffer, get_raw_buffer, + SmartPointer, c_size_t, c_uint8_ptr) + +from Crypto.Util import _cpu_features + + +# C API by module implementing GHASH +_ghash_api_template = """ + int ghash_%imp%(uint8_t y_out[16], + const uint8_t block_data[], + size_t len, + const uint8_t y_in[16], + const void *exp_key); + int ghash_expand_%imp%(const uint8_t h[16], + void **ghash_tables); + int ghash_destroy_%imp%(void *ghash_tables); +""" + +def _build_impl(lib, postfix): + from collections import namedtuple + + funcs = ( "ghash", "ghash_expand", "ghash_destroy" ) + GHASH_Imp = namedtuple('_GHash_Imp', funcs) + try: + imp_funcs = [ getattr(lib, x + "_" + postfix) for x in funcs ] + except AttributeError: # Make sphinx stop complaining with its mocklib + imp_funcs = [ None ] * 3 + params = dict(zip(funcs, imp_funcs)) + return GHASH_Imp(**params) + + +def _get_ghash_portable(): + api = _ghash_api_template.replace("%imp%", "portable") + lib = load_pycryptodome_raw_lib("Crypto.Hash._ghash_portable", api) + result = _build_impl(lib, "portable") + return result +_ghash_portable = _get_ghash_portable() + + +def _get_ghash_clmul(): + """Return None if CLMUL implementation is not available""" + + if not _cpu_features.have_clmul(): + return None + try: + api = _ghash_api_template.replace("%imp%", "clmul") + lib = load_pycryptodome_raw_lib("Crypto.Hash._ghash_clmul", api) + result = _build_impl(lib, "clmul") + except OSError: + result = None + return result +_ghash_clmul = _get_ghash_clmul() + + +class _GHASH(object): + """GHASH function defined in NIST SP 800-38D, Algorithm 2. + + If X_1, X_2, .. X_m are the blocks of input data, the function + computes: + + X_1*H^{m} + X_2*H^{m-1} + ... + X_m*H + + in the Galois field GF(2^256) using the reducing polynomial + (x^128 + x^7 + x^2 + x + 1). + """ + + def __init__(self, subkey, ghash_c): + assert len(subkey) == 16 + + self.ghash_c = ghash_c + + self._exp_key = VoidPointer() + result = ghash_c.ghash_expand(c_uint8_ptr(subkey), + self._exp_key.address_of()) + if result: + raise ValueError("Error %d while expanding the GHASH key" % result) + + self._exp_key = SmartPointer(self._exp_key.get(), + ghash_c.ghash_destroy) + + # create_string_buffer always returns a string of zeroes + self._last_y = create_string_buffer(16) + + def update(self, block_data): + assert len(block_data) % 16 == 0 + + result = self.ghash_c.ghash(self._last_y, + c_uint8_ptr(block_data), + c_size_t(len(block_data)), + self._last_y, + self._exp_key.get()) + if result: + raise ValueError("Error %d while updating GHASH" % result) + + return self + + def digest(self): + return get_raw_buffer(self._last_y) + + +def enum(**enums): + return type('Enum', (), enums) + + +MacStatus = enum(PROCESSING_AUTH_DATA=1, PROCESSING_CIPHERTEXT=2) + + +class GcmMode(object): + """Galois Counter Mode (GCM). + + This is an Authenticated Encryption with Associated Data (`AEAD`_) mode. + It provides both confidentiality and authenticity. + + The header of the message may be left in the clear, if needed, and it will + still be subject to authentication. The decryption step tells the receiver + if the message comes from a source that really knowns the secret key. + Additionally, decryption detects if any part of the message - including the + header - has been modified or corrupted. + + This mode requires a *nonce*. + + This mode is only available for ciphers that operate on 128 bits blocks + (e.g. AES but not TDES). + + See `NIST SP800-38D`_. + + .. _`NIST SP800-38D`: http://csrc.nist.gov/publications/nistpubs/800-38D/SP-800-38D.pdf + .. _AEAD: http://blog.cryptographyengineering.com/2012/05/how-to-choose-authenticated-encryption.html + + :undocumented: __init__ + """ + + def __init__(self, factory, key, nonce, mac_len, cipher_params, ghash_c): + + self.block_size = factory.block_size + if self.block_size != 16: + raise ValueError("GCM mode is only available for ciphers" + " that operate on 128 bits blocks") + + if len(nonce) == 0: + raise ValueError("Nonce cannot be empty") + + if not is_buffer(nonce): + raise TypeError("Nonce must be bytes, bytearray or memoryview") + + # See NIST SP 800 38D, 5.2.1.1 + if len(nonce) > 2**64 - 1: + raise ValueError("Nonce exceeds maximum length") + + + self.nonce = _copy_bytes(None, None, nonce) + """Nonce""" + + self._factory = factory + self._key = _copy_bytes(None, None, key) + self._tag = None # Cache for MAC tag + + self._mac_len = mac_len + if not (4 <= mac_len <= 16): + raise ValueError("Parameter 'mac_len' must be in the range 4..16") + + # Allowed transitions after initialization + self._next = ["update", "encrypt", "decrypt", + "digest", "verify"] + + self._no_more_assoc_data = False + + # Length of associated data + self._auth_len = 0 + + # Length of the ciphertext or plaintext + self._msg_len = 0 + + # Step 1 in SP800-38D, Algorithm 4 (encryption) - Compute H + # See also Algorithm 5 (decryption) + hash_subkey = factory.new(key, + self._factory.MODE_ECB, + **cipher_params + ).encrypt(b'\x00' * 16) + + # Step 2 - Compute J0 + if len(self.nonce) == 12: + j0 = self.nonce + b"\x00\x00\x00\x01" + else: + fill = (16 - (len(self.nonce) % 16)) % 16 + 8 + ghash_in = (self.nonce + + b'\x00' * fill + + long_to_bytes(8 * len(self.nonce), 8)) + j0 = _GHASH(hash_subkey, ghash_c).update(ghash_in).digest() + + # Step 3 - Prepare GCTR cipher for encryption/decryption + nonce_ctr = j0[:12] + iv_ctr = (bytes_to_long(j0) + 1) & 0xFFFFFFFF + self._cipher = factory.new(key, + self._factory.MODE_CTR, + initial_value=iv_ctr, + nonce=nonce_ctr, + **cipher_params) + + # Step 5 - Bootstrat GHASH + self._signer = _GHASH(hash_subkey, ghash_c) + + # Step 6 - Prepare GCTR cipher for GMAC + self._tag_cipher = factory.new(key, + self._factory.MODE_CTR, + initial_value=j0, + nonce=b"", + **cipher_params) + + # Cache for data to authenticate + self._cache = b"" + + self._status = MacStatus.PROCESSING_AUTH_DATA + + def update(self, assoc_data): + """Protect associated data + + If there is any associated data, the caller has to invoke + this function one or more times, before using + ``decrypt`` or ``encrypt``. + + By *associated data* it is meant any data (e.g. packet headers) that + will not be encrypted and will be transmitted in the clear. + However, the receiver is still able to detect any modification to it. + In GCM, the *associated data* is also called + *additional authenticated data* (AAD). + + If there is no associated data, this method must not be called. + + The caller may split associated data in segments of any size, and + invoke this method multiple times, each time with the next segment. + + :Parameters: + assoc_data : bytes/bytearray/memoryview + A piece of associated data. There are no restrictions on its size. + """ + + if "update" not in self._next: + raise TypeError("update() can only be called" + " immediately after initialization") + + self._next = ["update", "encrypt", "decrypt", + "digest", "verify"] + + self._update(assoc_data) + self._auth_len += len(assoc_data) + + # See NIST SP 800 38D, 5.2.1.1 + if self._auth_len > 2**64 - 1: + raise ValueError("Additional Authenticated Data exceeds maximum length") + + return self + + def _update(self, data): + assert(len(self._cache) < 16) + + if len(self._cache) > 0: + filler = min(16 - len(self._cache), len(data)) + self._cache += _copy_bytes(None, filler, data) + data = data[filler:] + + if len(self._cache) < 16: + return + + # The cache is exactly one block + self._signer.update(self._cache) + self._cache = b"" + + update_len = len(data) // 16 * 16 + self._cache = _copy_bytes(update_len, None, data) + if update_len > 0: + self._signer.update(data[:update_len]) + + def _pad_cache_and_update(self): + assert(len(self._cache) < 16) + + # The authenticated data A is concatenated to the minimum + # number of zero bytes (possibly none) such that the + # - ciphertext C is aligned to the 16 byte boundary. + # See step 5 in section 7.1 + # - ciphertext C is aligned to the 16 byte boundary. + # See step 6 in section 7.2 + len_cache = len(self._cache) + if len_cache > 0: + self._update(b'\x00' * (16 - len_cache)) + + def encrypt(self, plaintext, output=None): + """Encrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have encrypted a message + you cannot encrypt (or decrypt) another message using the same + object. + + The data to encrypt can be broken up in two or + more pieces and `encrypt` can be called multiple times. + + That is, the statement: + + >>> c.encrypt(a) + c.encrypt(b) + + is equivalent to: + + >>> c.encrypt(a+b) + + This function does not add any padding to the plaintext. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + It can be of any length. + :Keywords: + output : bytearray/memoryview + The location where the ciphertext must be written to. + If ``None``, the ciphertext is returned. + :Return: + If ``output`` is ``None``, the ciphertext as ``bytes``. + Otherwise, ``None``. + """ + + if "encrypt" not in self._next: + raise TypeError("encrypt() can only be called after" + " initialization or an update()") + self._next = ["encrypt", "digest"] + + ciphertext = self._cipher.encrypt(plaintext, output=output) + + if self._status == MacStatus.PROCESSING_AUTH_DATA: + self._pad_cache_and_update() + self._status = MacStatus.PROCESSING_CIPHERTEXT + + self._update(ciphertext if output is None else output) + self._msg_len += len(plaintext) + + # See NIST SP 800 38D, 5.2.1.1 + if self._msg_len > 2**39 - 256: + raise ValueError("Plaintext exceeds maximum length") + + return ciphertext + + def decrypt(self, ciphertext, output=None): + """Decrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have decrypted a message + you cannot decrypt (or encrypt) another message with the same + object. + + The data to decrypt can be broken up in two or + more pieces and `decrypt` can be called multiple times. + + That is, the statement: + + >>> c.decrypt(a) + c.decrypt(b) + + is equivalent to: + + >>> c.decrypt(a+b) + + This function does not remove any padding from the plaintext. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + It can be of any length. + :Keywords: + output : bytearray/memoryview + The location where the plaintext must be written to. + If ``None``, the plaintext is returned. + :Return: + If ``output`` is ``None``, the plaintext as ``bytes``. + Otherwise, ``None``. + """ + + if "decrypt" not in self._next: + raise TypeError("decrypt() can only be called" + " after initialization or an update()") + self._next = ["decrypt", "verify"] + + if self._status == MacStatus.PROCESSING_AUTH_DATA: + self._pad_cache_and_update() + self._status = MacStatus.PROCESSING_CIPHERTEXT + + self._update(ciphertext) + self._msg_len += len(ciphertext) + + return self._cipher.decrypt(ciphertext, output=output) + + def digest(self): + """Compute the *binary* MAC tag in an AEAD mode. + + The caller invokes this function at the very end. + + This method returns the MAC that shall be sent to the receiver, + together with the ciphertext. + + :Return: the MAC, as a byte string. + """ + + if "digest" not in self._next: + raise TypeError("digest() cannot be called when decrypting" + " or validating a message") + self._next = ["digest"] + + return self._compute_mac() + + def _compute_mac(self): + """Compute MAC without any FSM checks.""" + + if self._tag: + return self._tag + + # Step 5 in NIST SP 800-38D, Algorithm 4 - Compute S + self._pad_cache_and_update() + self._update(long_to_bytes(8 * self._auth_len, 8)) + self._update(long_to_bytes(8 * self._msg_len, 8)) + s_tag = self._signer.digest() + + # Step 6 - Compute T + self._tag = self._tag_cipher.encrypt(s_tag)[:self._mac_len] + + return self._tag + + def hexdigest(self): + """Compute the *printable* MAC tag. + + This method is like `digest`. + + :Return: the MAC, as a hexadecimal string. + """ + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def verify(self, received_mac_tag): + """Validate the *binary* MAC tag. + + The caller invokes this function at the very end. + + This method checks if the decrypted message is indeed valid + (that is, if the key is correct) and it has not been + tampered with while in transit. + + :Parameters: + received_mac_tag : bytes/bytearray/memoryview + This is the *binary* MAC, as received from the sender. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + if "verify" not in self._next: + raise TypeError("verify() cannot be called" + " when encrypting a message") + self._next = ["verify"] + + secret = get_random_bytes(16) + + mac1 = BLAKE2s.new(digest_bits=160, key=secret, + data=self._compute_mac()) + mac2 = BLAKE2s.new(digest_bits=160, key=secret, + data=received_mac_tag) + + if mac1.digest() != mac2.digest(): + raise ValueError("MAC check failed") + + def hexverify(self, hex_mac_tag): + """Validate the *printable* MAC tag. + + This method is like `verify`. + + :Parameters: + hex_mac_tag : string + This is the *printable* MAC, as received from the sender. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + self.verify(unhexlify(hex_mac_tag)) + + def encrypt_and_digest(self, plaintext, output=None): + """Perform encrypt() and digest() in one step. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + :Keywords: + output : bytearray/memoryview + The location where the ciphertext must be written to. + If ``None``, the ciphertext is returned. + :Return: + a tuple with two items: + + - the ciphertext, as ``bytes`` + - the MAC tag, as ``bytes`` + + The first item becomes ``None`` when the ``output`` parameter + specified a location for the result. + """ + + return self.encrypt(plaintext, output=output), self.digest() + + def decrypt_and_verify(self, ciphertext, received_mac_tag, output=None): + """Perform decrypt() and verify() in one step. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + received_mac_tag : byte string + This is the *binary* MAC, as received from the sender. + :Keywords: + output : bytearray/memoryview + The location where the plaintext must be written to. + If ``None``, the plaintext is returned. + :Return: the plaintext as ``bytes`` or ``None`` when the ``output`` + parameter specified a location for the result. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + plaintext = self.decrypt(ciphertext, output=output) + self.verify(received_mac_tag) + return plaintext + + +def _create_gcm_cipher(factory, **kwargs): + """Create a new block cipher, configured in Galois Counter Mode (GCM). + + :Parameters: + factory : module + A block cipher module, taken from `Crypto.Cipher`. + The cipher must have block length of 16 bytes. + GCM has been only defined for `Crypto.Cipher.AES`. + + :Keywords: + key : bytes/bytearray/memoryview + The secret key to use in the symmetric cipher. + It must be 16 (e.g. *AES-128*), 24 (e.g. *AES-192*) + or 32 (e.g. *AES-256*) bytes long. + + nonce : bytes/bytearray/memoryview + A value that must never be reused for any other encryption. + + There are no restrictions on its length, + but it is recommended to use at least 16 bytes. + + The nonce shall never repeat for two + different messages encrypted with the same key, + but it does not need to be random. + + If not provided, a 16 byte nonce will be randomly created. + + mac_len : integer + Length of the MAC, in bytes. + It must be no larger than 16 bytes (which is the default). + """ + + try: + key = kwargs.pop("key") + except KeyError as e: + raise TypeError("Missing parameter:" + str(e)) + + nonce = kwargs.pop("nonce", None) + if nonce is None: + nonce = get_random_bytes(16) + mac_len = kwargs.pop("mac_len", 16) + + # Not documented - only used for testing + use_clmul = kwargs.pop("use_clmul", True) + if use_clmul and _ghash_clmul: + ghash_c = _ghash_clmul + else: + ghash_c = _ghash_portable + + return GcmMode(factory, key, nonce, mac_len, kwargs, ghash_c) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_gcm.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_gcm.pyi new file mode 100644 index 000000000..8912955f2 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_gcm.pyi @@ -0,0 +1,45 @@ +from types import ModuleType +from typing import Union, Tuple, Dict, overload, Optional + +__all__ = ['GcmMode'] + +Buffer = Union[bytes, bytearray, memoryview] + +class GcmMode(object): + block_size: int + nonce: Buffer + + def __init__(self, + factory: ModuleType, + key: Buffer, + nonce: Buffer, + mac_len: int, + cipher_params: Dict) -> None: ... + + def update(self, assoc_data: Buffer) -> GcmMode: ... + + @overload + def encrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + @overload + def decrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def verify(self, received_mac_tag: Buffer) -> None: ... + def hexverify(self, hex_mac_tag: str) -> None: ... + + @overload + def encrypt_and_digest(self, + plaintext: Buffer) -> Tuple[bytes, bytes]: ... + @overload + def encrypt_and_digest(self, + plaintext: Buffer, + output: Buffer) -> Tuple[None, bytes]: ... + def decrypt_and_verify(self, + ciphertext: Buffer, + received_mac_tag: Buffer, + output: Optional[Union[bytearray, memoryview]] = ...) -> bytes: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ocb.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ocb.py new file mode 100644 index 000000000..a271ca126 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ocb.py @@ -0,0 +1,532 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +""" +Offset Codebook (OCB) mode. + +OCB is Authenticated Encryption with Associated Data (AEAD) cipher mode +designed by Prof. Phillip Rogaway and specified in `RFC7253`_. + +The algorithm provides both authenticity and privacy, it is very efficient, +it uses only one key and it can be used in online mode (so that encryption +or decryption can start before the end of the message is available). + +This module implements the third and last variant of OCB (OCB3) and it only +works in combination with a 128-bit block symmetric cipher, like AES. + +OCB is patented in US but `free licenses`_ exist for software implementations +meant for non-military purposes. + +Example: + >>> from Crypto.Cipher import AES + >>> from Crypto.Random import get_random_bytes + >>> + >>> key = get_random_bytes(32) + >>> cipher = AES.new(key, AES.MODE_OCB) + >>> plaintext = b"Attack at dawn" + >>> ciphertext, mac = cipher.encrypt_and_digest(plaintext) + >>> # Deliver cipher.nonce, ciphertext and mac + ... + >>> cipher = AES.new(key, AES.MODE_OCB, nonce=nonce) + >>> try: + >>> plaintext = cipher.decrypt_and_verify(ciphertext, mac) + >>> except ValueError: + >>> print "Invalid message" + >>> else: + >>> print plaintext + +:undocumented: __package__ + +.. _RFC7253: http://www.rfc-editor.org/info/rfc7253 +.. _free licenses: http://web.cs.ucdavis.edu/~rogaway/ocb/license.htm +""" + +import struct +from binascii import unhexlify + +from Crypto.Util.py3compat import bord, _copy_bytes, bchr +from Crypto.Util.number import long_to_bytes, bytes_to_long +from Crypto.Util.strxor import strxor + +from Crypto.Hash import BLAKE2s +from Crypto.Random import get_random_bytes + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, + create_string_buffer, get_raw_buffer, + SmartPointer, c_size_t, c_uint8_ptr, + is_buffer) + +_raw_ocb_lib = load_pycryptodome_raw_lib("Crypto.Cipher._raw_ocb", """ + int OCB_start_operation(void *cipher, + const uint8_t *offset_0, + size_t offset_0_len, + void **pState); + int OCB_encrypt(void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int OCB_decrypt(void *state, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int OCB_update(void *state, + const uint8_t *in, + size_t data_len); + int OCB_digest(void *state, + uint8_t *tag, + size_t tag_len); + int OCB_stop_operation(void *state); + """) + + +class OcbMode(object): + """Offset Codebook (OCB) mode. + + :undocumented: __init__ + """ + + def __init__(self, factory, nonce, mac_len, cipher_params): + + if factory.block_size != 16: + raise ValueError("OCB mode is only available for ciphers" + " that operate on 128 bits blocks") + + self.block_size = 16 + """The block size of the underlying cipher, in bytes.""" + + self.nonce = _copy_bytes(None, None, nonce) + """Nonce used for this session.""" + if len(nonce) not in range(1, 16): + raise ValueError("Nonce must be at most 15 bytes long") + if not is_buffer(nonce): + raise TypeError("Nonce must be bytes, bytearray or memoryview") + + self._mac_len = mac_len + if not 8 <= mac_len <= 16: + raise ValueError("MAC tag must be between 8 and 16 bytes long") + + # Cache for MAC tag + self._mac_tag = None + + # Cache for unaligned associated data + self._cache_A = b"" + + # Cache for unaligned ciphertext/plaintext + self._cache_P = b"" + + # Allowed transitions after initialization + self._next = ["update", "encrypt", "decrypt", + "digest", "verify"] + + # Compute Offset_0 + params_without_key = dict(cipher_params) + key = params_without_key.pop("key") + + taglen_mod128 = (self._mac_len * 8) % 128 + if len(self.nonce) < 15: + nonce = bchr(taglen_mod128 << 1) +\ + b'\x00' * (14 - len(nonce)) +\ + b'\x01' +\ + self.nonce + else: + nonce = bchr((taglen_mod128 << 1) | 0x01) +\ + self.nonce + + bottom_bits = bord(nonce[15]) & 0x3F # 6 bits, 0..63 + top_bits = bord(nonce[15]) & 0xC0 # 2 bits + + ktop_cipher = factory.new(key, + factory.MODE_ECB, + **params_without_key) + ktop = ktop_cipher.encrypt(struct.pack('15sB', + nonce[:15], + top_bits)) + + stretch = ktop + strxor(ktop[:8], ktop[1:9]) # 192 bits + offset_0 = long_to_bytes(bytes_to_long(stretch) >> + (64 - bottom_bits), 24)[8:] + + # Create low-level cipher instance + raw_cipher = factory._create_base_cipher(cipher_params) + if cipher_params: + raise TypeError("Unknown keywords: " + str(cipher_params)) + + self._state = VoidPointer() + result = _raw_ocb_lib.OCB_start_operation(raw_cipher.get(), + offset_0, + c_size_t(len(offset_0)), + self._state.address_of()) + if result: + raise ValueError("Error %d while instantiating the OCB mode" + % result) + + # Ensure that object disposal of this Python object will (eventually) + # free the memory allocated by the raw library for the cipher mode + self._state = SmartPointer(self._state.get(), + _raw_ocb_lib.OCB_stop_operation) + + # Memory allocated for the underlying block cipher is now owed + # by the cipher mode + raw_cipher.release() + + def _update(self, assoc_data, assoc_data_len): + result = _raw_ocb_lib.OCB_update(self._state.get(), + c_uint8_ptr(assoc_data), + c_size_t(assoc_data_len)) + if result: + raise ValueError("Error %d while computing MAC in OCB mode" % result) + + def update(self, assoc_data): + """Process the associated data. + + If there is any associated data, the caller has to invoke + this method one or more times, before using + ``decrypt`` or ``encrypt``. + + By *associated data* it is meant any data (e.g. packet headers) that + will not be encrypted and will be transmitted in the clear. + However, the receiver shall still able to detect modifications. + + If there is no associated data, this method must not be called. + + The caller may split associated data in segments of any size, and + invoke this method multiple times, each time with the next segment. + + :Parameters: + assoc_data : bytes/bytearray/memoryview + A piece of associated data. + """ + + if "update" not in self._next: + raise TypeError("update() can only be called" + " immediately after initialization") + + self._next = ["encrypt", "decrypt", "digest", + "verify", "update"] + + if len(self._cache_A) > 0: + filler = min(16 - len(self._cache_A), len(assoc_data)) + self._cache_A += _copy_bytes(None, filler, assoc_data) + assoc_data = assoc_data[filler:] + + if len(self._cache_A) < 16: + return self + + # Clear the cache, and proceeding with any other aligned data + self._cache_A, seg = b"", self._cache_A + self.update(seg) + + update_len = len(assoc_data) // 16 * 16 + self._cache_A = _copy_bytes(update_len, None, assoc_data) + self._update(assoc_data, update_len) + return self + + def _transcrypt_aligned(self, in_data, in_data_len, + trans_func, trans_desc): + + out_data = create_string_buffer(in_data_len) + result = trans_func(self._state.get(), + in_data, + out_data, + c_size_t(in_data_len)) + if result: + raise ValueError("Error %d while %sing in OCB mode" + % (result, trans_desc)) + return get_raw_buffer(out_data) + + def _transcrypt(self, in_data, trans_func, trans_desc): + # Last piece to encrypt/decrypt + if in_data is None: + out_data = self._transcrypt_aligned(self._cache_P, + len(self._cache_P), + trans_func, + trans_desc) + self._cache_P = b"" + return out_data + + # Try to fill up the cache, if it already contains something + prefix = b"" + if len(self._cache_P) > 0: + filler = min(16 - len(self._cache_P), len(in_data)) + self._cache_P += _copy_bytes(None, filler, in_data) + in_data = in_data[filler:] + + if len(self._cache_P) < 16: + # We could not manage to fill the cache, so there is certainly + # no output yet. + return b"" + + # Clear the cache, and proceeding with any other aligned data + prefix = self._transcrypt_aligned(self._cache_P, + len(self._cache_P), + trans_func, + trans_desc) + self._cache_P = b"" + + # Process data in multiples of the block size + trans_len = len(in_data) // 16 * 16 + result = self._transcrypt_aligned(c_uint8_ptr(in_data), + trans_len, + trans_func, + trans_desc) + if prefix: + result = prefix + result + + # Left-over + self._cache_P = _copy_bytes(trans_len, None, in_data) + + return result + + def encrypt(self, plaintext=None): + """Encrypt the next piece of plaintext. + + After the entire plaintext has been passed (but before `digest`), + you **must** call this method one last time with no arguments to collect + the final piece of ciphertext. + + If possible, use the method `encrypt_and_digest` instead. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The next piece of data to encrypt or ``None`` to signify + that encryption has finished and that any remaining ciphertext + has to be produced. + :Return: + the ciphertext, as a byte string. + Its length may not match the length of the *plaintext*. + """ + + if "encrypt" not in self._next: + raise TypeError("encrypt() can only be called after" + " initialization or an update()") + + if plaintext is None: + self._next = ["digest"] + else: + self._next = ["encrypt"] + return self._transcrypt(plaintext, _raw_ocb_lib.OCB_encrypt, "encrypt") + + def decrypt(self, ciphertext=None): + """Decrypt the next piece of ciphertext. + + After the entire ciphertext has been passed (but before `verify`), + you **must** call this method one last time with no arguments to collect + the remaining piece of plaintext. + + If possible, use the method `decrypt_and_verify` instead. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The next piece of data to decrypt or ``None`` to signify + that decryption has finished and that any remaining plaintext + has to be produced. + :Return: + the plaintext, as a byte string. + Its length may not match the length of the *ciphertext*. + """ + + if "decrypt" not in self._next: + raise TypeError("decrypt() can only be called after" + " initialization or an update()") + + if ciphertext is None: + self._next = ["verify"] + else: + self._next = ["decrypt"] + return self._transcrypt(ciphertext, + _raw_ocb_lib.OCB_decrypt, + "decrypt") + + def _compute_mac_tag(self): + + if self._mac_tag is not None: + return + + if self._cache_A: + self._update(self._cache_A, len(self._cache_A)) + self._cache_A = b"" + + mac_tag = create_string_buffer(16) + result = _raw_ocb_lib.OCB_digest(self._state.get(), + mac_tag, + c_size_t(len(mac_tag)) + ) + if result: + raise ValueError("Error %d while computing digest in OCB mode" + % result) + self._mac_tag = get_raw_buffer(mac_tag)[:self._mac_len] + + def digest(self): + """Compute the *binary* MAC tag. + + Call this method after the final `encrypt` (the one with no arguments) + to obtain the MAC tag. + + The MAC tag is needed by the receiver to determine authenticity + of the message. + + :Return: the MAC, as a byte string. + """ + + if "digest" not in self._next: + raise TypeError("digest() cannot be called now for this cipher") + + assert(len(self._cache_P) == 0) + + self._next = ["digest"] + + if self._mac_tag is None: + self._compute_mac_tag() + + return self._mac_tag + + def hexdigest(self): + """Compute the *printable* MAC tag. + + This method is like `digest`. + + :Return: the MAC, as a hexadecimal string. + """ + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def verify(self, received_mac_tag): + """Validate the *binary* MAC tag. + + Call this method after the final `decrypt` (the one with no arguments) + to check if the message is authentic and valid. + + :Parameters: + received_mac_tag : bytes/bytearray/memoryview + This is the *binary* MAC, as received from the sender. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + if "verify" not in self._next: + raise TypeError("verify() cannot be called now for this cipher") + + assert(len(self._cache_P) == 0) + + self._next = ["verify"] + + if self._mac_tag is None: + self._compute_mac_tag() + + secret = get_random_bytes(16) + mac1 = BLAKE2s.new(digest_bits=160, key=secret, data=self._mac_tag) + mac2 = BLAKE2s.new(digest_bits=160, key=secret, data=received_mac_tag) + + if mac1.digest() != mac2.digest(): + raise ValueError("MAC check failed") + + def hexverify(self, hex_mac_tag): + """Validate the *printable* MAC tag. + + This method is like `verify`. + + :Parameters: + hex_mac_tag : string + This is the *printable* MAC, as received from the sender. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + self.verify(unhexlify(hex_mac_tag)) + + def encrypt_and_digest(self, plaintext): + """Encrypt the message and create the MAC tag in one step. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The entire message to encrypt. + :Return: + a tuple with two byte strings: + + - the encrypted data + - the MAC + """ + + return self.encrypt(plaintext) + self.encrypt(), self.digest() + + def decrypt_and_verify(self, ciphertext, received_mac_tag): + """Decrypted the message and verify its authenticity in one step. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The entire message to decrypt. + received_mac_tag : byte string + This is the *binary* MAC, as received from the sender. + + :Return: the decrypted data (byte string). + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + plaintext = self.decrypt(ciphertext) + self.decrypt() + self.verify(received_mac_tag) + return plaintext + + +def _create_ocb_cipher(factory, **kwargs): + """Create a new block cipher, configured in OCB mode. + + :Parameters: + factory : module + A symmetric cipher module from `Crypto.Cipher` + (like `Crypto.Cipher.AES`). + + :Keywords: + nonce : bytes/bytearray/memoryview + A value that must never be reused for any other encryption. + Its length can vary from 1 to 15 bytes. + If not specified, a random 15 bytes long nonce is generated. + + mac_len : integer + Length of the MAC, in bytes. + It must be in the range ``[8..16]``. + The default is 16 (128 bits). + + Any other keyword will be passed to the underlying block cipher. + See the relevant documentation for details (at least ``key`` will need + to be present). + """ + + try: + nonce = kwargs.pop("nonce", None) + if nonce is None: + nonce = get_random_bytes(15) + mac_len = kwargs.pop("mac_len", 16) + except KeyError as e: + raise TypeError("Keyword missing: " + str(e)) + + return OcbMode(factory, nonce, mac_len, kwargs) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ocb.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ocb.pyi new file mode 100644 index 000000000..a1909fc38 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ocb.pyi @@ -0,0 +1,36 @@ +from types import ModuleType +from typing import Union, Any, Optional, Tuple, Dict, overload + +Buffer = Union[bytes, bytearray, memoryview] + +class OcbMode(object): + block_size: int + nonce: Buffer + + def __init__(self, + factory: ModuleType, + nonce: Buffer, + mac_len: int, + cipher_params: Dict) -> None: ... + + def update(self, assoc_data: Buffer) -> OcbMode: ... + + @overload + def encrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + @overload + def decrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def verify(self, received_mac_tag: Buffer) -> None: ... + def hexverify(self, hex_mac_tag: str) -> None: ... + + def encrypt_and_digest(self, + plaintext: Buffer) -> Tuple[bytes, bytes]: ... + def decrypt_and_verify(self, + ciphertext: Buffer, + received_mac_tag: Buffer) -> bytes: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ofb.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ofb.py new file mode 100644 index 000000000..1a29b0569 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ofb.py @@ -0,0 +1,282 @@ +# -*- coding: utf-8 -*- +# +# Cipher/mode_ofb.py : OFB mode +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +""" +Output Feedback (CFB) mode. +""" + +__all__ = ['OfbMode'] + +from Crypto.Util.py3compat import _copy_bytes +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, + create_string_buffer, get_raw_buffer, + SmartPointer, c_size_t, c_uint8_ptr, + is_writeable_buffer) + +from Crypto.Random import get_random_bytes + +raw_ofb_lib = load_pycryptodome_raw_lib("Crypto.Cipher._raw_ofb", """ + int OFB_start_operation(void *cipher, + const uint8_t iv[], + size_t iv_len, + void **pResult); + int OFB_encrypt(void *ofbState, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int OFB_decrypt(void *ofbState, + const uint8_t *in, + uint8_t *out, + size_t data_len); + int OFB_stop_operation(void *state); + """ + ) + + +class OfbMode(object): + """*Output FeedBack (OFB)*. + + This mode is very similar to CBC, but it + transforms the underlying block cipher into a stream cipher. + + The keystream is the iterated block encryption of the + previous ciphertext block. + + An Initialization Vector (*IV*) is required. + + See `NIST SP800-38A`_ , Section 6.4. + + .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf + + :undocumented: __init__ + """ + + def __init__(self, block_cipher, iv): + """Create a new block cipher, configured in OFB mode. + + :Parameters: + block_cipher : C pointer + A smart pointer to the low-level block cipher instance. + + iv : bytes/bytearray/memoryview + The initialization vector to use for encryption or decryption. + It is as long as the cipher block. + + **The IV must be a nonce, to to be reused for any other + message**. It shall be a nonce or a random value. + + Reusing the *IV* for encryptions performed with the same key + compromises confidentiality. + """ + + self._state = VoidPointer() + result = raw_ofb_lib.OFB_start_operation(block_cipher.get(), + c_uint8_ptr(iv), + c_size_t(len(iv)), + self._state.address_of()) + if result: + raise ValueError("Error %d while instantiating the OFB mode" + % result) + + # Ensure that object disposal of this Python object will (eventually) + # free the memory allocated by the raw library for the cipher mode + self._state = SmartPointer(self._state.get(), + raw_ofb_lib.OFB_stop_operation) + + # Memory allocated for the underlying block cipher is now owed + # by the cipher mode + block_cipher.release() + + self.block_size = len(iv) + """The block size of the underlying cipher, in bytes.""" + + self.iv = _copy_bytes(None, None, iv) + """The Initialization Vector originally used to create the object. + The value does not change.""" + + self.IV = self.iv + """Alias for `iv`""" + + self._next = ["encrypt", "decrypt"] + + def encrypt(self, plaintext, output=None): + """Encrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have encrypted a message + you cannot encrypt (or decrypt) another message using the same + object. + + The data to encrypt can be broken up in two or + more pieces and `encrypt` can be called multiple times. + + That is, the statement: + + >>> c.encrypt(a) + c.encrypt(b) + + is equivalent to: + + >>> c.encrypt(a+b) + + This function does not add any padding to the plaintext. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + It can be of any length. + :Keywords: + output : bytearray/memoryview + The location where the ciphertext must be written to. + If ``None``, the ciphertext is returned. + :Return: + If ``output`` is ``None``, the ciphertext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if "encrypt" not in self._next: + raise TypeError("encrypt() cannot be called after decrypt()") + self._next = ["encrypt"] + + if output is None: + ciphertext = create_string_buffer(len(plaintext)) + else: + ciphertext = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(plaintext) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(plaintext)) + + result = raw_ofb_lib.OFB_encrypt(self._state.get(), + c_uint8_ptr(plaintext), + c_uint8_ptr(ciphertext), + c_size_t(len(plaintext))) + if result: + raise ValueError("Error %d while encrypting in OFB mode" % result) + + if output is None: + return get_raw_buffer(ciphertext) + else: + return None + + def decrypt(self, ciphertext, output=None): + """Decrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have decrypted a message + you cannot decrypt (or encrypt) another message with the same + object. + + The data to decrypt can be broken up in two or + more pieces and `decrypt` can be called multiple times. + + That is, the statement: + + >>> c.decrypt(a) + c.decrypt(b) + + is equivalent to: + + >>> c.decrypt(a+b) + + This function does not remove any padding from the plaintext. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + It can be of any length. + :Keywords: + output : bytearray/memoryview + The location where the plaintext is written to. + If ``None``, the plaintext is returned. + :Return: + If ``output`` is ``None``, the plaintext is returned as ``bytes``. + Otherwise, ``None``. + """ + + if "decrypt" not in self._next: + raise TypeError("decrypt() cannot be called after encrypt()") + self._next = ["decrypt"] + + if output is None: + plaintext = create_string_buffer(len(ciphertext)) + else: + plaintext = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(ciphertext) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(plaintext)) + + result = raw_ofb_lib.OFB_decrypt(self._state.get(), + c_uint8_ptr(ciphertext), + c_uint8_ptr(plaintext), + c_size_t(len(ciphertext))) + if result: + raise ValueError("Error %d while decrypting in OFB mode" % result) + + if output is None: + return get_raw_buffer(plaintext) + else: + return None + + +def _create_ofb_cipher(factory, **kwargs): + """Instantiate a cipher object that performs OFB encryption/decryption. + + :Parameters: + factory : module + The underlying block cipher, a module from ``Crypto.Cipher``. + + :Keywords: + iv : bytes/bytearray/memoryview + The IV to use for OFB. + + IV : bytes/bytearray/memoryview + Alias for ``iv``. + + Any other keyword will be passed to the underlying block cipher. + See the relevant documentation for details (at least ``key`` will need + to be present). + """ + + cipher_state = factory._create_base_cipher(kwargs) + iv = kwargs.pop("IV", None) + IV = kwargs.pop("iv", None) + + if (None, None) == (iv, IV): + iv = get_random_bytes(factory.block_size) + if iv is not None: + if IV is not None: + raise TypeError("You must either use 'iv' or 'IV', not both") + else: + iv = IV + + if len(iv) != factory.block_size: + raise ValueError("Incorrect IV length (it must be %d bytes long)" % + factory.block_size) + + if kwargs: + raise TypeError("Unknown parameters for OFB: %s" % str(kwargs)) + + return OfbMode(cipher_state, iv) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ofb.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ofb.pyi new file mode 100644 index 000000000..60f7f0046 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_ofb.pyi @@ -0,0 +1,25 @@ +from typing import Union, overload + +from Crypto.Util._raw_api import SmartPointer + +Buffer = Union[bytes, bytearray, memoryview] + +__all__ = ['OfbMode'] + +class OfbMode(object): + block_size: int + iv: Buffer + IV: Buffer + + def __init__(self, + block_cipher: SmartPointer, + iv: Buffer) -> None: ... + @overload + def encrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def encrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + @overload + def decrypt(self, plaintext: Buffer) -> bytes: ... + @overload + def decrypt(self, plaintext: Buffer, output: Union[bytearray, memoryview]) -> None: ... + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_openpgp.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_openpgp.py new file mode 100644 index 000000000..d079d5914 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_openpgp.py @@ -0,0 +1,206 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +""" +OpenPGP mode. +""" + +__all__ = ['OpenPgpMode'] + +from Crypto.Util.py3compat import _copy_bytes +from Crypto.Random import get_random_bytes + +class OpenPgpMode(object): + """OpenPGP mode. + + This mode is a variant of CFB, and it is only used in PGP and + OpenPGP_ applications. If in doubt, use another mode. + + An Initialization Vector (*IV*) is required. + + Unlike CFB, the *encrypted* IV (not the IV itself) is + transmitted to the receiver. + + The IV is a random data block. For legacy reasons, two of its bytes are + duplicated to act as a checksum for the correctness of the key, which is now + known to be insecure and is ignored. The encrypted IV is therefore 2 bytes + longer than the clean IV. + + .. _OpenPGP: http://tools.ietf.org/html/rfc4880 + + :undocumented: __init__ + """ + + def __init__(self, factory, key, iv, cipher_params): + + #: The block size of the underlying cipher, in bytes. + self.block_size = factory.block_size + + self._done_first_block = False # True after the first encryption + + # Instantiate a temporary cipher to process the IV + IV_cipher = factory.new( + key, + factory.MODE_CFB, + IV=b'\x00' * self.block_size, + segment_size=self.block_size * 8, + **cipher_params) + + iv = _copy_bytes(None, None, iv) + + # The cipher will be used for... + if len(iv) == self.block_size: + # ... encryption + self._encrypted_IV = IV_cipher.encrypt(iv + iv[-2:]) + elif len(iv) == self.block_size + 2: + # ... decryption + self._encrypted_IV = iv + # Last two bytes are for a deprecated "quick check" feature that + # should not be used. (https://eprint.iacr.org/2005/033) + iv = IV_cipher.decrypt(iv)[:-2] + else: + raise ValueError("Length of IV must be %d or %d bytes" + " for MODE_OPENPGP" + % (self.block_size, self.block_size + 2)) + + self.iv = self.IV = iv + + # Instantiate the cipher for the real PGP data + self._cipher = factory.new( + key, + factory.MODE_CFB, + IV=self._encrypted_IV[-self.block_size:], + segment_size=self.block_size * 8, + **cipher_params) + + def encrypt(self, plaintext): + """Encrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have encrypted a message + you cannot encrypt (or decrypt) another message using the same + object. + + The data to encrypt can be broken up in two or + more pieces and `encrypt` can be called multiple times. + + That is, the statement: + + >>> c.encrypt(a) + c.encrypt(b) + + is equivalent to: + + >>> c.encrypt(a+b) + + This function does not add any padding to the plaintext. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + + :Return: + the encrypted data, as a byte string. + It is as long as *plaintext* with one exception: + when encrypting the first message chunk, + the encypted IV is prepended to the returned ciphertext. + """ + + res = self._cipher.encrypt(plaintext) + if not self._done_first_block: + res = self._encrypted_IV + res + self._done_first_block = True + return res + + def decrypt(self, ciphertext): + """Decrypt data with the key and the parameters set at initialization. + + A cipher object is stateful: once you have decrypted a message + you cannot decrypt (or encrypt) another message with the same + object. + + The data to decrypt can be broken up in two or + more pieces and `decrypt` can be called multiple times. + + That is, the statement: + + >>> c.decrypt(a) + c.decrypt(b) + + is equivalent to: + + >>> c.decrypt(a+b) + + This function does not remove any padding from the plaintext. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + + :Return: the decrypted data (byte string). + """ + + return self._cipher.decrypt(ciphertext) + + +def _create_openpgp_cipher(factory, **kwargs): + """Create a new block cipher, configured in OpenPGP mode. + + :Parameters: + factory : module + The module. + + :Keywords: + key : bytes/bytearray/memoryview + The secret key to use in the symmetric cipher. + + IV : bytes/bytearray/memoryview + The initialization vector to use for encryption or decryption. + + For encryption, the IV must be as long as the cipher block size. + + For decryption, it must be 2 bytes longer (it is actually the + *encrypted* IV which was prefixed to the ciphertext). + """ + + iv = kwargs.pop("IV", None) + IV = kwargs.pop("iv", None) + + if (None, None) == (iv, IV): + iv = get_random_bytes(factory.block_size) + if iv is not None: + if IV is not None: + raise TypeError("You must either use 'iv' or 'IV', not both") + else: + iv = IV + + try: + key = kwargs.pop("key") + except KeyError as e: + raise TypeError("Missing component: " + str(e)) + + return OpenPgpMode(factory, key, iv, kwargs) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_openpgp.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_openpgp.pyi new file mode 100644 index 000000000..14b810588 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_openpgp.pyi @@ -0,0 +1,20 @@ +from types import ModuleType +from typing import Union, Dict + +Buffer = Union[bytes, bytearray, memoryview] + +__all__ = ['OpenPgpMode'] + +class OpenPgpMode(object): + block_size: int + iv: Union[bytes, bytearray, memoryview] + IV: Union[bytes, bytearray, memoryview] + + def __init__(self, + factory: ModuleType, + key: Buffer, + iv: Buffer, + cipher_params: Dict) -> None: ... + def encrypt(self, plaintext: Buffer) -> bytes: ... + def decrypt(self, plaintext: Buffer) -> bytes: ... + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_siv.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_siv.py new file mode 100644 index 000000000..1805ec09f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_siv.py @@ -0,0 +1,392 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +""" +Synthetic Initialization Vector (SIV) mode. +""" + +__all__ = ['SivMode'] + +from binascii import hexlify, unhexlify + +from Crypto.Util.py3compat import bord, _copy_bytes + +from Crypto.Util._raw_api import is_buffer + +from Crypto.Util.number import long_to_bytes, bytes_to_long +from Crypto.Protocol.KDF import _S2V +from Crypto.Hash import BLAKE2s +from Crypto.Random import get_random_bytes + + +class SivMode(object): + """Synthetic Initialization Vector (SIV). + + This is an Authenticated Encryption with Associated Data (`AEAD`_) mode. + It provides both confidentiality and authenticity. + + The header of the message may be left in the clear, if needed, and it will + still be subject to authentication. The decryption step tells the receiver + if the message comes from a source that really knowns the secret key. + Additionally, decryption detects if any part of the message - including the + header - has been modified or corrupted. + + Unlike other AEAD modes such as CCM, EAX or GCM, accidental reuse of a + nonce is not catastrophic for the confidentiality of the message. The only + effect is that an attacker can tell when the same plaintext (and same + associated data) is protected with the same key. + + The length of the MAC is fixed to the block size of the underlying cipher. + The key size is twice the length of the key of the underlying cipher. + + This mode is only available for AES ciphers. + + +--------------------+---------------+-------------------+ + | Cipher | SIV MAC size | SIV key length | + | | (bytes) | (bytes) | + +====================+===============+===================+ + | AES-128 | 16 | 32 | + +--------------------+---------------+-------------------+ + | AES-192 | 16 | 48 | + +--------------------+---------------+-------------------+ + | AES-256 | 16 | 64 | + +--------------------+---------------+-------------------+ + + See `RFC5297`_ and the `original paper`__. + + .. _RFC5297: https://tools.ietf.org/html/rfc5297 + .. _AEAD: http://blog.cryptographyengineering.com/2012/05/how-to-choose-authenticated-encryption.html + .. __: http://www.cs.ucdavis.edu/~rogaway/papers/keywrap.pdf + + :undocumented: __init__ + """ + + def __init__(self, factory, key, nonce, kwargs): + + self.block_size = factory.block_size + """The block size of the underlying cipher, in bytes.""" + + self._factory = factory + + self._cipher_params = kwargs + + if len(key) not in (32, 48, 64): + raise ValueError("Incorrect key length (%d bytes)" % len(key)) + + if nonce is not None: + if not is_buffer(nonce): + raise TypeError("When provided, the nonce must be bytes, bytearray or memoryview") + + if len(nonce) == 0: + raise ValueError("When provided, the nonce must be non-empty") + + self.nonce = _copy_bytes(None, None, nonce) + """Public attribute is only available in case of non-deterministic + encryption.""" + + subkey_size = len(key) // 2 + + self._mac_tag = None # Cache for MAC tag + self._kdf = _S2V(key[:subkey_size], + ciphermod=factory, + cipher_params=self._cipher_params) + self._subkey_cipher = key[subkey_size:] + + # Purely for the purpose of verifying that cipher_params are OK + factory.new(key[:subkey_size], factory.MODE_ECB, **kwargs) + + # Allowed transitions after initialization + self._next = ["update", "encrypt", "decrypt", + "digest", "verify"] + + def _create_ctr_cipher(self, v): + """Create a new CTR cipher from V in SIV mode""" + + v_int = bytes_to_long(v) + q = v_int & 0xFFFFFFFFFFFFFFFF7FFFFFFF7FFFFFFF + return self._factory.new( + self._subkey_cipher, + self._factory.MODE_CTR, + initial_value=q, + nonce=b"", + **self._cipher_params) + + def update(self, component): + """Protect one associated data component + + For SIV, the associated data is a sequence (*vector*) of non-empty + byte strings (*components*). + + This method consumes the next component. It must be called + once for each of the components that constitue the associated data. + + Note that the components have clear boundaries, so that: + + >>> cipher.update(b"builtin") + >>> cipher.update(b"securely") + + is not equivalent to: + + >>> cipher.update(b"built") + >>> cipher.update(b"insecurely") + + If there is no associated data, this method must not be called. + + :Parameters: + component : bytes/bytearray/memoryview + The next associated data component. + """ + + if "update" not in self._next: + raise TypeError("update() can only be called" + " immediately after initialization") + + self._next = ["update", "encrypt", "decrypt", + "digest", "verify"] + + return self._kdf.update(component) + + def encrypt(self, plaintext): + """ + For SIV, encryption and MAC authentication must take place at the same + point. This method shall not be used. + + Use `encrypt_and_digest` instead. + """ + + raise TypeError("encrypt() not allowed for SIV mode." + " Use encrypt_and_digest() instead.") + + def decrypt(self, ciphertext): + """ + For SIV, decryption and verification must take place at the same + point. This method shall not be used. + + Use `decrypt_and_verify` instead. + """ + + raise TypeError("decrypt() not allowed for SIV mode." + " Use decrypt_and_verify() instead.") + + def digest(self): + """Compute the *binary* MAC tag. + + The caller invokes this function at the very end. + + This method returns the MAC that shall be sent to the receiver, + together with the ciphertext. + + :Return: the MAC, as a byte string. + """ + + if "digest" not in self._next: + raise TypeError("digest() cannot be called when decrypting" + " or validating a message") + self._next = ["digest"] + if self._mac_tag is None: + self._mac_tag = self._kdf.derive() + return self._mac_tag + + def hexdigest(self): + """Compute the *printable* MAC tag. + + This method is like `digest`. + + :Return: the MAC, as a hexadecimal string. + """ + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def verify(self, received_mac_tag): + """Validate the *binary* MAC tag. + + The caller invokes this function at the very end. + + This method checks if the decrypted message is indeed valid + (that is, if the key is correct) and it has not been + tampered with while in transit. + + :Parameters: + received_mac_tag : bytes/bytearray/memoryview + This is the *binary* MAC, as received from the sender. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + if "verify" not in self._next: + raise TypeError("verify() cannot be called" + " when encrypting a message") + self._next = ["verify"] + + if self._mac_tag is None: + self._mac_tag = self._kdf.derive() + + secret = get_random_bytes(16) + + mac1 = BLAKE2s.new(digest_bits=160, key=secret, data=self._mac_tag) + mac2 = BLAKE2s.new(digest_bits=160, key=secret, data=received_mac_tag) + + if mac1.digest() != mac2.digest(): + raise ValueError("MAC check failed") + + def hexverify(self, hex_mac_tag): + """Validate the *printable* MAC tag. + + This method is like `verify`. + + :Parameters: + hex_mac_tag : string + This is the *printable* MAC, as received from the sender. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + self.verify(unhexlify(hex_mac_tag)) + + def encrypt_and_digest(self, plaintext, output=None): + """Perform encrypt() and digest() in one step. + + :Parameters: + plaintext : bytes/bytearray/memoryview + The piece of data to encrypt. + :Keywords: + output : bytearray/memoryview + The location where the ciphertext must be written to. + If ``None``, the ciphertext is returned. + :Return: + a tuple with two items: + + - the ciphertext, as ``bytes`` + - the MAC tag, as ``bytes`` + + The first item becomes ``None`` when the ``output`` parameter + specified a location for the result. + """ + + if "encrypt" not in self._next: + raise TypeError("encrypt() can only be called after" + " initialization or an update()") + + self._next = ["digest"] + + # Compute V (MAC) + if hasattr(self, 'nonce'): + self._kdf.update(self.nonce) + self._kdf.update(plaintext) + self._mac_tag = self._kdf.derive() + + cipher = self._create_ctr_cipher(self._mac_tag) + + return cipher.encrypt(plaintext, output=output), self._mac_tag + + def decrypt_and_verify(self, ciphertext, mac_tag, output=None): + """Perform decryption and verification in one step. + + A cipher object is stateful: once you have decrypted a message + you cannot decrypt (or encrypt) another message with the same + object. + + You cannot reuse an object for encrypting + or decrypting other data with the same key. + + This function does not remove any padding from the plaintext. + + :Parameters: + ciphertext : bytes/bytearray/memoryview + The piece of data to decrypt. + It can be of any length. + mac_tag : bytes/bytearray/memoryview + This is the *binary* MAC, as received from the sender. + :Keywords: + output : bytearray/memoryview + The location where the plaintext must be written to. + If ``None``, the plaintext is returned. + :Return: the plaintext as ``bytes`` or ``None`` when the ``output`` + parameter specified a location for the result. + :Raises ValueError: + if the MAC does not match. The message has been tampered with + or the key is incorrect. + """ + + if "decrypt" not in self._next: + raise TypeError("decrypt() can only be called" + " after initialization or an update()") + self._next = ["verify"] + + # Take the MAC and start the cipher for decryption + self._cipher = self._create_ctr_cipher(mac_tag) + + plaintext = self._cipher.decrypt(ciphertext, output=output) + + if hasattr(self, 'nonce'): + self._kdf.update(self.nonce) + self._kdf.update(plaintext if output is None else output) + self.verify(mac_tag) + + return plaintext + + +def _create_siv_cipher(factory, **kwargs): + """Create a new block cipher, configured in + Synthetic Initializaton Vector (SIV) mode. + + :Parameters: + + factory : object + A symmetric cipher module from `Crypto.Cipher` + (like `Crypto.Cipher.AES`). + + :Keywords: + + key : bytes/bytearray/memoryview + The secret key to use in the symmetric cipher. + It must be 32, 48 or 64 bytes long. + If AES is the chosen cipher, the variants *AES-128*, + *AES-192* and or *AES-256* will be used internally. + + nonce : bytes/bytearray/memoryview + For deterministic encryption, it is not present. + + Otherwise, it is a value that must never be reused + for encrypting message under this key. + + There are no restrictions on its length, + but it is recommended to use at least 16 bytes. + """ + + try: + key = kwargs.pop("key") + except KeyError as e: + raise TypeError("Missing parameter: " + str(e)) + + nonce = kwargs.pop("nonce", None) + + return SivMode(factory, key, nonce, kwargs) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_siv.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_siv.pyi new file mode 100644 index 000000000..2934f23de --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_mode_siv.pyi @@ -0,0 +1,38 @@ +from types import ModuleType +from typing import Union, Tuple, Dict, Optional, overload + +Buffer = Union[bytes, bytearray, memoryview] + +__all__ = ['SivMode'] + +class SivMode(object): + block_size: int + nonce: bytes + + def __init__(self, + factory: ModuleType, + key: Buffer, + nonce: Buffer, + kwargs: Dict) -> None: ... + + def update(self, component: Buffer) -> SivMode: ... + + def encrypt(self, plaintext: Buffer) -> bytes: ... + def decrypt(self, plaintext: Buffer) -> bytes: ... + + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def verify(self, received_mac_tag: Buffer) -> None: ... + def hexverify(self, hex_mac_tag: str) -> None: ... + + @overload + def encrypt_and_digest(self, + plaintext: Buffer) -> Tuple[bytes, bytes]: ... + @overload + def encrypt_and_digest(self, + plaintext: Buffer, + output: Buffer) -> Tuple[None, bytes]: ... + def decrypt_and_verify(self, + ciphertext: Buffer, + received_mac_tag: Buffer, + output: Optional[Union[bytearray, memoryview]] = ...) -> bytes: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_pkcs1_decode.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_pkcs1_decode.abi3.so new file mode 100755 index 000000000..71cd31165 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_pkcs1_decode.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_pkcs1_oaep_decode.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_pkcs1_oaep_decode.py new file mode 100644 index 000000000..fc0752829 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_pkcs1_oaep_decode.py @@ -0,0 +1,41 @@ +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, c_size_t, + c_uint8_ptr) + + +_raw_pkcs1_decode = load_pycryptodome_raw_lib("Crypto.Cipher._pkcs1_decode", + """ + int pkcs1_decode(const uint8_t *em, size_t len_em, + const uint8_t *sentinel, size_t len_sentinel, + size_t expected_pt_len, + uint8_t *output); + + int oaep_decode(const uint8_t *em, + size_t em_len, + const uint8_t *lHash, + size_t hLen, + const uint8_t *db, + size_t db_len); + """) + + +def pkcs1_decode(em, sentinel, expected_pt_len, output): + if len(em) != len(output): + raise ValueError("Incorrect output length") + + ret = _raw_pkcs1_decode.pkcs1_decode(c_uint8_ptr(em), + c_size_t(len(em)), + c_uint8_ptr(sentinel), + c_size_t(len(sentinel)), + c_size_t(expected_pt_len), + c_uint8_ptr(output)) + return ret + + +def oaep_decode(em, lHash, db): + ret = _raw_pkcs1_decode.oaep_decode(c_uint8_ptr(em), + c_size_t(len(em)), + c_uint8_ptr(lHash), + c_size_t(len(lHash)), + c_uint8_ptr(db), + c_size_t(len(db))) + return ret diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_aes.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_aes.abi3.so new file mode 100755 index 000000000..b37dd9513 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_aes.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_aesni.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_aesni.abi3.so new file mode 100755 index 000000000..5f08fe70a Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_aesni.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_arc2.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_arc2.abi3.so new file mode 100755 index 000000000..2287d2eba Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_arc2.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_blowfish.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_blowfish.abi3.so new file mode 100755 index 000000000..ad77ccb14 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_blowfish.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_cast.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_cast.abi3.so new file mode 100755 index 000000000..730e178e0 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_cast.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_cbc.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_cbc.abi3.so new file mode 100755 index 000000000..847d82408 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_cbc.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_cfb.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_cfb.abi3.so new file mode 100755 index 000000000..2c9b852c4 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_cfb.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_ctr.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_ctr.abi3.so new file mode 100755 index 000000000..761cd3627 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_ctr.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_des.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_des.abi3.so new file mode 100755 index 000000000..7f1f8246c Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_des.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_des3.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_des3.abi3.so new file mode 100755 index 000000000..b475c52c6 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_des3.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_ecb.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_ecb.abi3.so new file mode 100755 index 000000000..91e81262d Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_ecb.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_eksblowfish.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_eksblowfish.abi3.so new file mode 100755 index 000000000..c3c45d52c Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_eksblowfish.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_ocb.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_ocb.abi3.so new file mode 100755 index 000000000..9685971ee Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_ocb.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_ofb.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_ofb.abi3.so new file mode 100755 index 000000000..a4a629a8e Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Cipher/_raw_ofb.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/BLAKE2b.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/BLAKE2b.py new file mode 100644 index 000000000..e3ec46e65 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/BLAKE2b.py @@ -0,0 +1,247 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from binascii import unhexlify + +from Crypto.Util.py3compat import bord, tobytes + +from Crypto.Random import get_random_bytes +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +_raw_blake2b_lib = load_pycryptodome_raw_lib("Crypto.Hash._BLAKE2b", + """ + int blake2b_init(void **state, + const uint8_t *key, + size_t key_size, + size_t digest_size); + int blake2b_destroy(void *state); + int blake2b_update(void *state, + const uint8_t *buf, + size_t len); + int blake2b_digest(const void *state, + uint8_t digest[64]); + int blake2b_copy(const void *src, void *dst); + """) + + +class BLAKE2b_Hash(object): + """A BLAKE2b hash object. + Do not instantiate directly. Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar block_size: the size in bytes of the internal message block, + input to the compression function + :vartype block_size: integer + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The internal block size of the hash algorithm in bytes. + block_size = 64 + + def __init__(self, data, key, digest_bytes, update_after_digest): + + # The size of the resulting hash in bytes. + self.digest_size = digest_bytes + + self._update_after_digest = update_after_digest + self._digest_done = False + + # See https://tools.ietf.org/html/rfc7693 + if digest_bytes in (20, 32, 48, 64) and not key: + self.oid = "1.3.6.1.4.1.1722.12.2.1." + str(digest_bytes) + + state = VoidPointer() + result = _raw_blake2b_lib.blake2b_init(state.address_of(), + c_uint8_ptr(key), + c_size_t(len(key)), + c_size_t(digest_bytes) + ) + if result: + raise ValueError("Error %d while instantiating BLAKE2b" % result) + self._state = SmartPointer(state.get(), + _raw_blake2b_lib.blake2b_destroy) + if data: + self.update(data) + + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (bytes/bytearray/memoryview): The next chunk of the message being hashed. + """ + + if self._digest_done and not self._update_after_digest: + raise TypeError("You can only call 'digest' or 'hexdigest' on this object") + + result = _raw_blake2b_lib.blake2b_update(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while hashing BLAKE2b data" % result) + return self + + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + bfr = create_string_buffer(64) + result = _raw_blake2b_lib.blake2b_digest(self._state.get(), + bfr) + if result: + raise ValueError("Error %d while creating BLAKE2b digest" % result) + + self._digest_done = True + + return get_raw_buffer(bfr)[:self.digest_size] + + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in tuple(self.digest())]) + + + def verify(self, mac_tag): + """Verify that a given **binary** MAC (computed by another party) + is valid. + + Args: + mac_tag (bytes/bytearray/memoryview): the expected MAC of the message. + + Raises: + ValueError: if the MAC does not match. It means that the message + has been tampered with or that the MAC key is incorrect. + """ + + secret = get_random_bytes(16) + + mac1 = new(digest_bits=160, key=secret, data=mac_tag) + mac2 = new(digest_bits=160, key=secret, data=self.digest()) + + if mac1.digest() != mac2.digest(): + raise ValueError("MAC check failed") + + + def hexverify(self, hex_mac_tag): + """Verify that a given **printable** MAC (computed by another party) + is valid. + + Args: + hex_mac_tag (string): the expected MAC of the message, as a hexadecimal string. + + Raises: + ValueError: if the MAC does not match. It means that the message + has been tampered with or that the MAC key is incorrect. + """ + + self.verify(unhexlify(tobytes(hex_mac_tag))) + + + def new(self, **kwargs): + """Return a new instance of a BLAKE2b hash object. + See :func:`new`. + """ + + if "digest_bytes" not in kwargs and "digest_bits" not in kwargs: + kwargs["digest_bytes"] = self.digest_size + + return new(**kwargs) + + +def new(**kwargs): + """Create a new hash object. + + Args: + data (bytes/bytearray/memoryview): + Optional. The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`BLAKE2b_Hash.update`. + digest_bytes (integer): + Optional. The size of the digest, in bytes (1 to 64). Default is 64. + digest_bits (integer): + Optional and alternative to ``digest_bytes``. + The size of the digest, in bits (8 to 512, in steps of 8). + Default is 512. + key (bytes/bytearray/memoryview): + Optional. The key to use to compute the MAC (1 to 64 bytes). + If not specified, no key will be used. + update_after_digest (boolean): + Optional. By default, a hash object cannot be updated anymore after + the digest is computed. When this flag is ``True``, such check + is no longer enforced. + + Returns: + A :class:`BLAKE2b_Hash` hash object + """ + + data = kwargs.pop("data", None) + update_after_digest = kwargs.pop("update_after_digest", False) + + digest_bytes = kwargs.pop("digest_bytes", None) + digest_bits = kwargs.pop("digest_bits", None) + if None not in (digest_bytes, digest_bits): + raise TypeError("Only one digest parameter must be provided") + if (None, None) == (digest_bytes, digest_bits): + digest_bytes = 64 + if digest_bytes is not None: + if not (1 <= digest_bytes <= 64): + raise ValueError("'digest_bytes' not in range 1..64") + else: + if not (8 <= digest_bits <= 512) or (digest_bits % 8): + raise ValueError("'digest_bits' not in range 8..512, " + "with steps of 8") + digest_bytes = digest_bits // 8 + + key = kwargs.pop("key", b"") + if len(key) > 64: + raise ValueError("BLAKE2b key cannot exceed 64 bytes") + + if kwargs: + raise TypeError("Unknown parameters: " + str(kwargs)) + + return BLAKE2b_Hash(data, key, digest_bytes, update_after_digest) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/BLAKE2b.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/BLAKE2b.pyi new file mode 100644 index 000000000..d37c37497 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/BLAKE2b.pyi @@ -0,0 +1,32 @@ +from typing import Any, Union +from types import ModuleType + +Buffer = Union[bytes, bytearray, memoryview] + +class BLAKE2b_Hash(object): + block_size: int + digest_size: int + oid: str + + def __init__(self, + data: Buffer, + key: Buffer, + digest_bytes: bytes, + update_after_digest: bool) -> None: ... + def update(self, data: Buffer) -> BLAKE2b_Hash: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def verify(self, mac_tag: Buffer) -> None: ... + def hexverify(self, hex_mac_tag: str) -> None: ... + def new(self, + data: Buffer = ..., + digest_bytes: int = ..., + digest_bits: int = ..., + key: Buffer = ..., + update_after_digest: bool = ...) -> BLAKE2b_Hash: ... + +def new(data: Buffer = ..., + digest_bytes: int = ..., + digest_bits: int = ..., + key: Buffer = ..., + update_after_digest: bool = ...) -> BLAKE2b_Hash: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/BLAKE2s.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/BLAKE2s.py new file mode 100644 index 000000000..192d14594 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/BLAKE2s.py @@ -0,0 +1,247 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from binascii import unhexlify + +from Crypto.Util.py3compat import bord, tobytes + +from Crypto.Random import get_random_bytes +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +_raw_blake2s_lib = load_pycryptodome_raw_lib("Crypto.Hash._BLAKE2s", + """ + int blake2s_init(void **state, + const uint8_t *key, + size_t key_size, + size_t digest_size); + int blake2s_destroy(void *state); + int blake2s_update(void *state, + const uint8_t *buf, + size_t len); + int blake2s_digest(const void *state, + uint8_t digest[32]); + int blake2s_copy(const void *src, void *dst); + """) + + +class BLAKE2s_Hash(object): + """A BLAKE2s hash object. + Do not instantiate directly. Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar block_size: the size in bytes of the internal message block, + input to the compression function + :vartype block_size: integer + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The internal block size of the hash algorithm in bytes. + block_size = 32 + + def __init__(self, data, key, digest_bytes, update_after_digest): + + # The size of the resulting hash in bytes. + self.digest_size = digest_bytes + + self._update_after_digest = update_after_digest + self._digest_done = False + + # See https://tools.ietf.org/html/rfc7693 + if digest_bytes in (16, 20, 28, 32) and not key: + self.oid = "1.3.6.1.4.1.1722.12.2.2." + str(digest_bytes) + + state = VoidPointer() + result = _raw_blake2s_lib.blake2s_init(state.address_of(), + c_uint8_ptr(key), + c_size_t(len(key)), + c_size_t(digest_bytes) + ) + if result: + raise ValueError("Error %d while instantiating BLAKE2s" % result) + self._state = SmartPointer(state.get(), + _raw_blake2s_lib.blake2s_destroy) + if data: + self.update(data) + + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + if self._digest_done and not self._update_after_digest: + raise TypeError("You can only call 'digest' or 'hexdigest' on this object") + + result = _raw_blake2s_lib.blake2s_update(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while hashing BLAKE2s data" % result) + return self + + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + bfr = create_string_buffer(32) + result = _raw_blake2s_lib.blake2s_digest(self._state.get(), + bfr) + if result: + raise ValueError("Error %d while creating BLAKE2s digest" % result) + + self._digest_done = True + + return get_raw_buffer(bfr)[:self.digest_size] + + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in tuple(self.digest())]) + + + def verify(self, mac_tag): + """Verify that a given **binary** MAC (computed by another party) + is valid. + + Args: + mac_tag (byte string/byte array/memoryview): the expected MAC of the message. + + Raises: + ValueError: if the MAC does not match. It means that the message + has been tampered with or that the MAC key is incorrect. + """ + + secret = get_random_bytes(16) + + mac1 = new(digest_bits=160, key=secret, data=mac_tag) + mac2 = new(digest_bits=160, key=secret, data=self.digest()) + + if mac1.digest() != mac2.digest(): + raise ValueError("MAC check failed") + + + def hexverify(self, hex_mac_tag): + """Verify that a given **printable** MAC (computed by another party) + is valid. + + Args: + hex_mac_tag (string): the expected MAC of the message, as a hexadecimal string. + + Raises: + ValueError: if the MAC does not match. It means that the message + has been tampered with or that the MAC key is incorrect. + """ + + self.verify(unhexlify(tobytes(hex_mac_tag))) + + + def new(self, **kwargs): + """Return a new instance of a BLAKE2s hash object. + See :func:`new`. + """ + + if "digest_bytes" not in kwargs and "digest_bits" not in kwargs: + kwargs["digest_bytes"] = self.digest_size + + return new(**kwargs) + + +def new(**kwargs): + """Create a new hash object. + + Args: + data (byte string/byte array/memoryview): + Optional. The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`BLAKE2s_Hash.update`. + digest_bytes (integer): + Optional. The size of the digest, in bytes (1 to 32). Default is 32. + digest_bits (integer): + Optional and alternative to ``digest_bytes``. + The size of the digest, in bits (8 to 256, in steps of 8). + Default is 256. + key (byte string): + Optional. The key to use to compute the MAC (1 to 64 bytes). + If not specified, no key will be used. + update_after_digest (boolean): + Optional. By default, a hash object cannot be updated anymore after + the digest is computed. When this flag is ``True``, such check + is no longer enforced. + + Returns: + A :class:`BLAKE2s_Hash` hash object + """ + + data = kwargs.pop("data", None) + update_after_digest = kwargs.pop("update_after_digest", False) + + digest_bytes = kwargs.pop("digest_bytes", None) + digest_bits = kwargs.pop("digest_bits", None) + if None not in (digest_bytes, digest_bits): + raise TypeError("Only one digest parameter must be provided") + if (None, None) == (digest_bytes, digest_bits): + digest_bytes = 32 + if digest_bytes is not None: + if not (1 <= digest_bytes <= 32): + raise ValueError("'digest_bytes' not in range 1..32") + else: + if not (8 <= digest_bits <= 256) or (digest_bits % 8): + raise ValueError("'digest_bits' not in range 8..256, " + "with steps of 8") + digest_bytes = digest_bits // 8 + + key = kwargs.pop("key", b"") + if len(key) > 32: + raise ValueError("BLAKE2s key cannot exceed 32 bytes") + + if kwargs: + raise TypeError("Unknown parameters: " + str(kwargs)) + + return BLAKE2s_Hash(data, key, digest_bytes, update_after_digest) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/BLAKE2s.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/BLAKE2s.pyi new file mode 100644 index 000000000..374b3a43e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/BLAKE2s.pyi @@ -0,0 +1,26 @@ +from typing import Any, Union + +Buffer = Union[bytes, bytearray, memoryview] + +class BLAKE2s_Hash(object): + block_size: int + digest_size: int + oid: str + + def __init__(self, + data: Buffer, + key: Buffer, + digest_bytes: bytes, + update_after_digest: bool) -> None: ... + def update(self, data: Buffer) -> BLAKE2s_Hash: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def verify(self, mac_tag: Buffer) -> None: ... + def hexverify(self, hex_mac_tag: str) -> None: ... + def new(self, **kwargs: Any) -> BLAKE2s_Hash: ... + +def new(data: Buffer = ..., + digest_bytes: int = ..., + digest_bits: int = ..., + key: Buffer = ..., + update_after_digest: bool = ...) -> BLAKE2s_Hash: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/CMAC.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/CMAC.py new file mode 100644 index 000000000..764be8617 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/CMAC.py @@ -0,0 +1,306 @@ +# -*- coding: utf-8 -*- +# +# Hash/CMAC.py - Implements the CMAC algorithm +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from binascii import unhexlify + +from Crypto.Hash import BLAKE2s +from Crypto.Util.strxor import strxor +from Crypto.Util.number import long_to_bytes, bytes_to_long +from Crypto.Util.py3compat import bord, tobytes, _copy_bytes +from Crypto.Random import get_random_bytes + + +# The size of the authentication tag produced by the MAC. +digest_size = None + + +def _shift_bytes(bs, xor_lsb=0): + num = (bytes_to_long(bs) << 1) ^ xor_lsb + return long_to_bytes(num, len(bs))[-len(bs):] + + +class CMAC(object): + """A CMAC hash object. + Do not instantiate directly. Use the :func:`new` function. + + :ivar digest_size: the size in bytes of the resulting MAC tag + :vartype digest_size: integer + """ + + digest_size = None + + def __init__(self, key, msg, ciphermod, cipher_params, mac_len, + update_after_digest): + + self.digest_size = mac_len + + self._key = _copy_bytes(None, None, key) + self._factory = ciphermod + self._cipher_params = cipher_params + self._block_size = bs = ciphermod.block_size + self._mac_tag = None + self._update_after_digest = update_after_digest + + # Section 5.3 of NIST SP 800 38B and Appendix B + if bs == 8: + const_Rb = 0x1B + self._max_size = 8 * (2 ** 21) + elif bs == 16: + const_Rb = 0x87 + self._max_size = 16 * (2 ** 48) + else: + raise TypeError("CMAC requires a cipher with a block size" + " of 8 or 16 bytes, not %d" % bs) + + # Compute sub-keys + zero_block = b'\x00' * bs + self._ecb = ciphermod.new(key, + ciphermod.MODE_ECB, + **self._cipher_params) + L = self._ecb.encrypt(zero_block) + if bord(L[0]) & 0x80: + self._k1 = _shift_bytes(L, const_Rb) + else: + self._k1 = _shift_bytes(L) + if bord(self._k1[0]) & 0x80: + self._k2 = _shift_bytes(self._k1, const_Rb) + else: + self._k2 = _shift_bytes(self._k1) + + # Initialize CBC cipher with zero IV + self._cbc = ciphermod.new(key, + ciphermod.MODE_CBC, + zero_block, + **self._cipher_params) + + # Cache for outstanding data to authenticate + self._cache = bytearray(bs) + self._cache_n = 0 + + # Last piece of ciphertext produced + self._last_ct = zero_block + + # Last block that was encrypted with AES + self._last_pt = None + + # Counter for total message size + self._data_size = 0 + + if msg: + self.update(msg) + + def update(self, msg): + """Authenticate the next chunk of message. + + Args: + data (byte string/byte array/memoryview): The next chunk of data + """ + + if self._mac_tag is not None and not self._update_after_digest: + raise TypeError("update() cannot be called after digest() or verify()") + + self._data_size += len(msg) + bs = self._block_size + + if self._cache_n > 0: + filler = min(bs - self._cache_n, len(msg)) + self._cache[self._cache_n:self._cache_n+filler] = msg[:filler] + self._cache_n += filler + + if self._cache_n < bs: + return self + + msg = memoryview(msg)[filler:] + self._update(self._cache) + self._cache_n = 0 + + remain = len(msg) % bs + if remain > 0: + self._update(msg[:-remain]) + self._cache[:remain] = msg[-remain:] + else: + self._update(msg) + self._cache_n = remain + return self + + def _update(self, data_block): + """Update a block aligned to the block boundary""" + + bs = self._block_size + assert len(data_block) % bs == 0 + + if len(data_block) == 0: + return + + ct = self._cbc.encrypt(data_block) + if len(data_block) == bs: + second_last = self._last_ct + else: + second_last = ct[-bs*2:-bs] + self._last_ct = ct[-bs:] + self._last_pt = strxor(second_last, data_block[-bs:]) + + def copy(self): + """Return a copy ("clone") of the CMAC object. + + The copy will have the same internal state as the original CMAC + object. + This can be used to efficiently compute the MAC tag of byte + strings that share a common initial substring. + + :return: An :class:`CMAC` + """ + + obj = self.__new__(CMAC) + obj.__dict__ = self.__dict__.copy() + obj._cbc = self._factory.new(self._key, + self._factory.MODE_CBC, + self._last_ct, + **self._cipher_params) + obj._cache = self._cache[:] + obj._last_ct = self._last_ct[:] + return obj + + def digest(self): + """Return the **binary** (non-printable) MAC tag of the message + that has been authenticated so far. + + :return: The MAC tag, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + bs = self._block_size + + if self._mac_tag is not None and not self._update_after_digest: + return self._mac_tag + + if self._data_size > self._max_size: + raise ValueError("MAC is unsafe for this message") + + if self._cache_n == 0 and self._data_size > 0: + # Last block was full + pt = strxor(self._last_pt, self._k1) + else: + # Last block is partial (or message length is zero) + partial = self._cache[:] + partial[self._cache_n:] = b'\x80' + b'\x00' * (bs - self._cache_n - 1) + pt = strxor(strxor(self._last_ct, partial), self._k2) + + self._mac_tag = self._ecb.encrypt(pt)[:self.digest_size] + + return self._mac_tag + + def hexdigest(self): + """Return the **printable** MAC tag of the message authenticated so far. + + :return: The MAC tag, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) + for x in tuple(self.digest())]) + + def verify(self, mac_tag): + """Verify that a given **binary** MAC (computed by another party) + is valid. + + Args: + mac_tag (byte string/byte array/memoryview): the expected MAC of the message. + + Raises: + ValueError: if the MAC does not match. It means that the message + has been tampered with or that the MAC key is incorrect. + """ + + secret = get_random_bytes(16) + + mac1 = BLAKE2s.new(digest_bits=160, key=secret, data=mac_tag) + mac2 = BLAKE2s.new(digest_bits=160, key=secret, data=self.digest()) + + if mac1.digest() != mac2.digest(): + raise ValueError("MAC check failed") + + def hexverify(self, hex_mac_tag): + """Verify that a given **printable** MAC (computed by another party) + is valid. + + Args: + hex_mac_tag (string): the expected MAC of the message, as a hexadecimal string. + + Raises: + ValueError: if the MAC does not match. It means that the message + has been tampered with or that the MAC key is incorrect. + """ + + self.verify(unhexlify(tobytes(hex_mac_tag))) + + +def new(key, msg=None, ciphermod=None, cipher_params=None, mac_len=None, + update_after_digest=False): + """Create a new MAC object. + + Args: + key (byte string/byte array/memoryview): + key for the CMAC object. + The key must be valid for the underlying cipher algorithm. + For instance, it must be 16 bytes long for AES-128. + ciphermod (module): + A cipher module from :mod:`Crypto.Cipher`. + The cipher's block size has to be 128 bits, + like :mod:`Crypto.Cipher.AES`, to reduce the probability + of collisions. + msg (byte string/byte array/memoryview): + Optional. The very first chunk of the message to authenticate. + It is equivalent to an early call to `CMAC.update`. Optional. + cipher_params (dict): + Optional. A set of parameters to use when instantiating a cipher + object. + mac_len (integer): + Length of the MAC, in bytes. + It must be at least 4 bytes long. + The default (and recommended) length matches the size of a cipher block. + update_after_digest (boolean): + Optional. By default, a hash object cannot be updated anymore after + the digest is computed. When this flag is ``True``, such check + is no longer enforced. + Returns: + A :class:`CMAC` object + """ + + if ciphermod is None: + raise TypeError("ciphermod must be specified (try AES)") + + cipher_params = {} if cipher_params is None else dict(cipher_params) + + if mac_len is None: + mac_len = ciphermod.block_size + + if mac_len < 4: + raise ValueError("MAC tag length must be at least 4 bytes long") + + if mac_len > ciphermod.block_size: + raise ValueError("MAC tag length cannot be larger than a cipher block (%d) bytes" % ciphermod.block_size) + + return CMAC(key, msg, ciphermod, cipher_params, mac_len, + update_after_digest) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/CMAC.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/CMAC.pyi new file mode 100644 index 000000000..acdf0553e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/CMAC.pyi @@ -0,0 +1,30 @@ +from types import ModuleType +from typing import Union, Dict, Any + +Buffer = Union[bytes, bytearray, memoryview] + +digest_size: int + +class CMAC(object): + digest_size: int + + def __init__(self, + key: Buffer, + msg: Buffer, + ciphermod: ModuleType, + cipher_params: Dict[str, Any], + mac_len: int, update_after_digest: bool) -> None: ... + def update(self, data: Buffer) -> CMAC: ... + def copy(self) -> CMAC: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def verify(self, mac_tag: Buffer) -> None: ... + def hexverify(self, hex_mac_tag: str) -> None: ... + + +def new(key: Buffer, + msg: Buffer = ..., + ciphermod: ModuleType = ..., + cipher_params: Dict[str, Any] = ..., + mac_len: int = ..., + update_after_digest: bool = ...) -> CMAC: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/HMAC.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/HMAC.py new file mode 100644 index 000000000..7124d3e19 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/HMAC.py @@ -0,0 +1,238 @@ +# +# HMAC.py - Implements the HMAC algorithm as described by RFC 2104. +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util.py3compat import bord, tobytes + +from binascii import unhexlify + +from Crypto.Hash import BLAKE2s +from Crypto.Util.strxor import strxor +from Crypto.Random import get_random_bytes + +__all__ = ['new', 'HMAC'] + +_hash2hmac_oid = { + '1.3.14.3.2.26': '1.2.840.113549.2.7', # SHA-1 + '2.16.840.1.101.3.4.2.4': '1.2.840.113549.2.8', # SHA-224 + '2.16.840.1.101.3.4.2.1': '1.2.840.113549.2.9', # SHA-256 + '2.16.840.1.101.3.4.2.2': '1.2.840.113549.2.10', # SHA-384 + '2.16.840.1.101.3.4.2.3': '1.2.840.113549.2.11', # SHA-512 + '2.16.840.1.101.3.4.2.5': '1.2.840.113549.2.12', # SHA-512_224 + '2.16.840.1.101.3.4.2.6': '1.2.840.113549.2.13', # SHA-512_256 + '2.16.840.1.101.3.4.2.7': '2.16.840.1.101.3.4.2.13', # SHA-3 224 + '2.16.840.1.101.3.4.2.8': '2.16.840.1.101.3.4.2.14', # SHA-3 256 + '2.16.840.1.101.3.4.2.9': '2.16.840.1.101.3.4.2.15', # SHA-3 384 + '2.16.840.1.101.3.4.2.10': '2.16.840.1.101.3.4.2.16', # SHA-3 512 +} + +_hmac2hash_oid = {v: k for k, v in _hash2hmac_oid.items()} + + +class HMAC(object): + """An HMAC hash object. + Do not instantiate directly. Use the :func:`new` function. + + :ivar digest_size: the size in bytes of the resulting MAC tag + :vartype digest_size: integer + + :ivar oid: the ASN.1 object ID of the HMAC algorithm. + Only present if the algorithm was officially assigned one. + """ + + def __init__(self, key, msg=b"", digestmod=None): + + if digestmod is None: + from Crypto.Hash import MD5 + digestmod = MD5 + + if msg is None: + msg = b"" + + # Size of the MAC tag + self.digest_size = digestmod.digest_size + + self._digestmod = digestmod + + # Hash OID --> HMAC OID + try: + self.oid = _hash2hmac_oid[digestmod.oid] + except (KeyError, AttributeError): + pass + + if isinstance(key, memoryview): + key = key.tobytes() + + try: + if len(key) <= digestmod.block_size: + # Step 1 or 2 + key_0 = key + b"\x00" * (digestmod.block_size - len(key)) + else: + # Step 3 + hash_k = digestmod.new(key).digest() + key_0 = hash_k + b"\x00" * (digestmod.block_size - len(hash_k)) + except AttributeError: + # Not all hash types have "block_size" + raise ValueError("Hash type incompatible to HMAC") + + # Step 4 + key_0_ipad = strxor(key_0, b"\x36" * len(key_0)) + + # Start step 5 and 6 + self._inner = digestmod.new(key_0_ipad) + self._inner.update(msg) + + # Step 7 + key_0_opad = strxor(key_0, b"\x5c" * len(key_0)) + + # Start step 8 and 9 + self._outer = digestmod.new(key_0_opad) + + def update(self, msg): + """Authenticate the next chunk of message. + + Args: + data (byte string/byte array/memoryview): The next chunk of data + """ + + self._inner.update(msg) + return self + + def _pbkdf2_hmac_assist(self, first_digest, iterations): + """Carry out the expensive inner loop for PBKDF2-HMAC""" + + result = self._digestmod._pbkdf2_hmac_assist( + self._inner, + self._outer, + first_digest, + iterations) + return result + + def copy(self): + """Return a copy ("clone") of the HMAC object. + + The copy will have the same internal state as the original HMAC + object. + This can be used to efficiently compute the MAC tag of byte + strings that share a common initial substring. + + :return: An :class:`HMAC` + """ + + new_hmac = HMAC(b"fake key", digestmod=self._digestmod) + + # Syncronize the state + new_hmac._inner = self._inner.copy() + new_hmac._outer = self._outer.copy() + + return new_hmac + + def digest(self): + """Return the **binary** (non-printable) MAC tag of the message + authenticated so far. + + :return: The MAC tag digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + frozen_outer_hash = self._outer.copy() + frozen_outer_hash.update(self._inner.digest()) + return frozen_outer_hash.digest() + + def verify(self, mac_tag): + """Verify that a given **binary** MAC (computed by another party) + is valid. + + Args: + mac_tag (byte string/byte string/memoryview): the expected MAC of the message. + + Raises: + ValueError: if the MAC does not match. It means that the message + has been tampered with or that the MAC key is incorrect. + """ + + secret = get_random_bytes(16) + + mac1 = BLAKE2s.new(digest_bits=160, key=secret, data=mac_tag) + mac2 = BLAKE2s.new(digest_bits=160, key=secret, data=self.digest()) + + if mac1.digest() != mac2.digest(): + raise ValueError("MAC check failed") + + def hexdigest(self): + """Return the **printable** MAC tag of the message authenticated so far. + + :return: The MAC tag, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) + for x in tuple(self.digest())]) + + def hexverify(self, hex_mac_tag): + """Verify that a given **printable** MAC (computed by another party) + is valid. + + Args: + hex_mac_tag (string): the expected MAC of the message, + as a hexadecimal string. + + Raises: + ValueError: if the MAC does not match. It means that the message + has been tampered with or that the MAC key is incorrect. + """ + + self.verify(unhexlify(tobytes(hex_mac_tag))) + + +def new(key, msg=b"", digestmod=None): + """Create a new MAC object. + + Args: + key (bytes/bytearray/memoryview): + key for the MAC object. + It must be long enough to match the expected security level of the + MAC. + msg (bytes/bytearray/memoryview): + Optional. The very first chunk of the message to authenticate. + It is equivalent to an early call to :meth:`HMAC.update`. + digestmod (module): + The hash to use to implement the HMAC. + Default is :mod:`Crypto.Hash.MD5`. + + Returns: + An :class:`HMAC` object + """ + + return HMAC(key, msg, digestmod) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/HMAC.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/HMAC.pyi new file mode 100644 index 000000000..b577230f5 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/HMAC.pyi @@ -0,0 +1,25 @@ +from types import ModuleType +from typing import Union, Dict + +Buffer = Union[bytes, bytearray, memoryview] + +digest_size: int + +class HMAC(object): + digest_size: int + + def __init__(self, + key: Buffer, + msg: Buffer, + digestmod: ModuleType) -> None: ... + def update(self, msg: Buffer) -> HMAC: ... + def copy(self) -> HMAC: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def verify(self, mac_tag: Buffer) -> None: ... + def hexverify(self, hex_mac_tag: str) -> None: ... + + +def new(key: Buffer, + msg: Buffer = ..., + digestmod: ModuleType = ...) -> HMAC: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/KMAC128.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/KMAC128.py new file mode 100644 index 000000000..05061fc2e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/KMAC128.py @@ -0,0 +1,179 @@ +# =================================================================== +# +# Copyright (c) 2021, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from binascii import unhexlify + +from Crypto.Util.py3compat import bord, tobytes, is_bytes +from Crypto.Random import get_random_bytes + +from . import cSHAKE128, SHA3_256 +from .cSHAKE128 import _bytepad, _encode_str, _right_encode + + +class KMAC_Hash(object): + """A KMAC hash object. + Do not instantiate directly. + Use the :func:`new` function. + """ + + def __init__(self, data, key, mac_len, custom, + oid_variant, cshake, rate): + + # See https://tools.ietf.org/html/rfc8702 + self.oid = "2.16.840.1.101.3.4.2." + oid_variant + self.digest_size = mac_len + + self._mac = None + + partial_newX = _bytepad(_encode_str(tobytes(key)), rate) + self._cshake = cshake._new(partial_newX, custom, b"KMAC") + + if data: + self._cshake.update(data) + + def update(self, data): + """Authenticate the next chunk of message. + + Args: + data (bytes/bytearray/memoryview): The next chunk of the message to + authenticate. + """ + + if self._mac: + raise TypeError("You can only call 'digest' or 'hexdigest' on this object") + + self._cshake.update(data) + return self + + def digest(self): + """Return the **binary** (non-printable) MAC tag of the message. + + :return: The MAC tag. Binary form. + :rtype: byte string + """ + + if not self._mac: + self._cshake.update(_right_encode(self.digest_size * 8)) + self._mac = self._cshake.read(self.digest_size) + + return self._mac + + def hexdigest(self): + """Return the **printable** MAC tag of the message. + + :return: The MAC tag. Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in tuple(self.digest())]) + + def verify(self, mac_tag): + """Verify that a given **binary** MAC (computed by another party) + is valid. + + Args: + mac_tag (bytes/bytearray/memoryview): the expected MAC of the message. + + Raises: + ValueError: if the MAC does not match. It means that the message + has been tampered with or that the MAC key is incorrect. + """ + + secret = get_random_bytes(16) + + mac1 = SHA3_256.new(secret + mac_tag) + mac2 = SHA3_256.new(secret + self.digest()) + + if mac1.digest() != mac2.digest(): + raise ValueError("MAC check failed") + + def hexverify(self, hex_mac_tag): + """Verify that a given **printable** MAC (computed by another party) + is valid. + + Args: + hex_mac_tag (string): the expected MAC of the message, as a hexadecimal string. + + Raises: + ValueError: if the MAC does not match. It means that the message + has been tampered with or that the MAC key is incorrect. + """ + + self.verify(unhexlify(tobytes(hex_mac_tag))) + + def new(self, **kwargs): + """Return a new instance of a KMAC hash object. + See :func:`new`. + """ + + if "mac_len" not in kwargs: + kwargs["mac_len"] = self.digest_size + + return new(**kwargs) + + +def new(**kwargs): + """Create a new KMAC128 object. + + Args: + key (bytes/bytearray/memoryview): + The key to use to compute the MAC. + It must be at least 128 bits long (16 bytes). + data (bytes/bytearray/memoryview): + Optional. The very first chunk of the message to authenticate. + It is equivalent to an early call to :meth:`KMAC_Hash.update`. + mac_len (integer): + Optional. The size of the authentication tag, in bytes. + Default is 64. Minimum is 8. + custom (bytes/bytearray/memoryview): + Optional. A customization byte string (``S`` in SP 800-185). + + Returns: + A :class:`KMAC_Hash` hash object + """ + + key = kwargs.pop("key", None) + if not is_bytes(key): + raise TypeError("You must pass a key to KMAC128") + if len(key) < 16: + raise ValueError("The key must be at least 128 bits long (16 bytes)") + + data = kwargs.pop("data", None) + + mac_len = kwargs.pop("mac_len", 64) + if mac_len < 8: + raise ValueError("'mac_len' must be 8 bytes or more") + + custom = kwargs.pop("custom", b"") + + if kwargs: + raise TypeError("Unknown parameters: " + str(kwargs)) + + return KMAC_Hash(data, key, mac_len, custom, "19", cSHAKE128, 168) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/KMAC128.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/KMAC128.pyi new file mode 100644 index 000000000..8947dabc6 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/KMAC128.pyi @@ -0,0 +1,33 @@ +from typing import Union +from types import ModuleType + +Buffer = Union[bytes, bytearray, memoryview] + +class KMAC_Hash(object): + + def __init__(self, + data: Buffer, + key: Buffer, + mac_len: int, + custom: Buffer, + oid_variant: str, + cshake: ModuleType, + rate: int) -> None: ... + + def update(self, data: Buffer) -> KMAC_Hash: ... + + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def verify(self, mac_tag: Buffer) -> None: ... + def hexverify(self, hex_mac_tag: str) -> None: ... + def new(self, + data: Buffer = ..., + mac_len: int = ..., + key: Buffer = ..., + custom: Buffer = ...) -> KMAC_Hash: ... + + +def new(key: Buffer, + data: Buffer = ..., + mac_len: int = ..., + custom: Buffer = ...) -> KMAC_Hash: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/KMAC256.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/KMAC256.py new file mode 100644 index 000000000..2be8e2f3d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/KMAC256.py @@ -0,0 +1,74 @@ +# =================================================================== +# +# Copyright (c) 2021, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util.py3compat import is_bytes + +from .KMAC128 import KMAC_Hash +from . import cSHAKE256 + + +def new(**kwargs): + """Create a new KMAC256 object. + + Args: + key (bytes/bytearray/memoryview): + The key to use to compute the MAC. + It must be at least 256 bits long (32 bytes). + data (bytes/bytearray/memoryview): + Optional. The very first chunk of the message to authenticate. + It is equivalent to an early call to :meth:`KMAC_Hash.update`. + mac_len (integer): + Optional. The size of the authentication tag, in bytes. + Default is 64. Minimum is 8. + custom (bytes/bytearray/memoryview): + Optional. A customization byte string (``S`` in SP 800-185). + + Returns: + A :class:`KMAC_Hash` hash object + """ + + key = kwargs.pop("key", None) + if not is_bytes(key): + raise TypeError("You must pass a key to KMAC256") + if len(key) < 32: + raise ValueError("The key must be at least 256 bits long (32 bytes)") + + data = kwargs.pop("data", None) + + mac_len = kwargs.pop("mac_len", 64) + if mac_len < 8: + raise ValueError("'mac_len' must be 8 bytes or more") + + custom = kwargs.pop("custom", b"") + + if kwargs: + raise TypeError("Unknown parameters: " + str(kwargs)) + + return KMAC_Hash(data, key, mac_len, custom, "20", cSHAKE256, 136) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/KMAC256.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/KMAC256.pyi new file mode 100644 index 000000000..86cc50001 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/KMAC256.pyi @@ -0,0 +1,10 @@ +from typing import Union + +from .KMAC128 import KMAC_Hash + +Buffer = Union[bytes, bytearray, memoryview] + +def new(key: Buffer, + data: Buffer = ..., + mac_len: int = ..., + custom: Buffer = ...) -> KMAC_Hash: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/KangarooTwelve.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/KangarooTwelve.py new file mode 100644 index 000000000..5cf68579b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/KangarooTwelve.py @@ -0,0 +1,222 @@ +# =================================================================== +# +# Copyright (c) 2021, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util.number import long_to_bytes +from Crypto.Util.py3compat import bchr + +from . import TurboSHAKE128 + +def _length_encode(x): + if x == 0: + return b'\x00' + + S = long_to_bytes(x) + return S + bchr(len(S)) + + +# Possible states for a KangarooTwelve instance, which depend on the amount of data processed so far. +SHORT_MSG = 1 # Still within the first 8192 bytes, but it is not certain we will exceed them. +LONG_MSG_S0 = 2 # Still within the first 8192 bytes, and it is certain we will exceed them. +LONG_MSG_SX = 3 # Beyond the first 8192 bytes. +SQUEEZING = 4 # No more data to process. + + +class K12_XOF(object): + """A KangarooTwelve hash object. + Do not instantiate directly. + Use the :func:`new` function. + """ + + def __init__(self, data, custom): + + if custom == None: + custom = b'' + + self._custom = custom + _length_encode(len(custom)) + self._state = SHORT_MSG + self._padding = None # Final padding is only decided in read() + + # Internal hash that consumes FinalNode + # The real domain separation byte will be known before squeezing + self._hash1 = TurboSHAKE128.new(domain=1) + self._length1 = 0 + + # Internal hash that produces CV_i (reset each time) + self._hash2 = None + self._length2 = 0 + + # Incremented by one for each 8192-byte block + self._ctr = 0 + + if data: + self.update(data) + + def update(self, data): + """Hash the next piece of data. + + .. note:: + For better performance, submit chunks with a length multiple of 8192 bytes. + + Args: + data (byte string/byte array/memoryview): The next chunk of the + message to hash. + """ + + if self._state == SQUEEZING: + raise TypeError("You cannot call 'update' after the first 'read'") + + if self._state == SHORT_MSG: + next_length = self._length1 + len(data) + + if next_length + len(self._custom) <= 8192: + self._length1 = next_length + self._hash1.update(data) + return self + + # Switch to tree hashing + self._state = LONG_MSG_S0 + + if self._state == LONG_MSG_S0: + data_mem = memoryview(data) + assert(self._length1 < 8192) + dtc = min(len(data), 8192 - self._length1) + self._hash1.update(data_mem[:dtc]) + self._length1 += dtc + + if self._length1 < 8192: + return self + + # Finish hashing S_0 and start S_1 + assert(self._length1 == 8192) + + divider = b'\x03' + b'\x00' * 7 + self._hash1.update(divider) + self._length1 += 8 + + self._hash2 = TurboSHAKE128.new(domain=0x0B) + self._length2 = 0 + self._ctr = 1 + + self._state = LONG_MSG_SX + return self.update(data_mem[dtc:]) + + # LONG_MSG_SX + assert(self._state == LONG_MSG_SX) + index = 0 + len_data = len(data) + + # All iteractions could actually run in parallel + data_mem = memoryview(data) + while index < len_data: + + new_index = min(index + 8192 - self._length2, len_data) + self._hash2.update(data_mem[index:new_index]) + self._length2 += new_index - index + index = new_index + + if self._length2 == 8192: + cv_i = self._hash2.read(32) + self._hash1.update(cv_i) + self._length1 += 32 + self._hash2._reset() + self._length2 = 0 + self._ctr += 1 + + return self + + def read(self, length): + """ + Produce more bytes of the digest. + + .. note:: + You cannot use :meth:`update` anymore after the first call to + :meth:`read`. + + Args: + length (integer): the amount of bytes this method must return + + :return: the next piece of XOF output (of the given length) + :rtype: byte string + """ + + custom_was_consumed = False + + if self._state == SHORT_MSG: + self._hash1.update(self._custom) + self._padding = 0x07 + self._state = SQUEEZING + + if self._state == LONG_MSG_S0: + self.update(self._custom) + custom_was_consumed = True + assert(self._state == LONG_MSG_SX) + + if self._state == LONG_MSG_SX: + if not custom_was_consumed: + self.update(self._custom) + + # Is there still some leftover data in hash2? + if self._length2 > 0: + cv_i = self._hash2.read(32) + self._hash1.update(cv_i) + self._length1 += 32 + self._hash2._reset() + self._length2 = 0 + self._ctr += 1 + + trailer = _length_encode(self._ctr - 1) + b'\xFF\xFF' + self._hash1.update(trailer) + + self._padding = 0x06 + self._state = SQUEEZING + + self._hash1._domain = self._padding + return self._hash1.read(length) + + def new(self, data=None, custom=b''): + return type(self)(data, custom) + + +def new(data=None, custom=None): + """Return a fresh instance of a KangarooTwelve object. + + Args: + data (bytes/bytearray/memoryview): + Optional. + The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`update`. + custom (bytes): + Optional. + A customization byte string. + + :Return: A :class:`K12_XOF` object + """ + + return K12_XOF(data, custom) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/KangarooTwelve.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/KangarooTwelve.pyi new file mode 100644 index 000000000..8b3fd74ff --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/KangarooTwelve.pyi @@ -0,0 +1,16 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +class K12_XOF(object): + def __init__(self, + data: Optional[Buffer] = ..., + custom: Optional[bytes] = ...) -> None: ... + def update(self, data: Buffer) -> K12_XOF: ... + def read(self, length: int) -> bytes: ... + def new(self, + data: Optional[Buffer] = ..., + custom: Optional[bytes] = ...) -> None: ... + +def new(data: Optional[Buffer] = ..., + custom: Optional[Buffer] = ...) -> K12_XOF: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/MD2.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/MD2.py new file mode 100644 index 000000000..41decbb6a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/MD2.py @@ -0,0 +1,166 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +_raw_md2_lib = load_pycryptodome_raw_lib( + "Crypto.Hash._MD2", + """ + int md2_init(void **shaState); + int md2_destroy(void *shaState); + int md2_update(void *hs, + const uint8_t *buf, + size_t len); + int md2_digest(const void *shaState, + uint8_t digest[20]); + int md2_copy(const void *src, void *dst); + """) + + +class MD2Hash(object): + """An MD2 hash object. + Do not instantiate directly. Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar block_size: the size in bytes of the internal message block, + input to the compression function + :vartype block_size: integer + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The size of the resulting hash in bytes. + digest_size = 16 + # The internal block size of the hash algorithm in bytes. + block_size = 16 + # ASN.1 Object ID + oid = "1.2.840.113549.2.2" + + def __init__(self, data=None): + state = VoidPointer() + result = _raw_md2_lib.md2_init(state.address_of()) + if result: + raise ValueError("Error %d while instantiating MD2" + % result) + self._state = SmartPointer(state.get(), + _raw_md2_lib.md2_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + result = _raw_md2_lib.md2_update(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while instantiating MD2" + % result) + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + bfr = create_string_buffer(self.digest_size) + result = _raw_md2_lib.md2_digest(self._state.get(), + bfr) + if result: + raise ValueError("Error %d while instantiating MD2" + % result) + + return get_raw_buffer(bfr) + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def copy(self): + """Return a copy ("clone") of the hash object. + + The copy will have the same internal state as the original hash + object. + This can be used to efficiently compute the digests of strings that + share a common initial substring. + + :return: A hash object of the same type + """ + + clone = MD2Hash() + result = _raw_md2_lib.md2_copy(self._state.get(), + clone._state.get()) + if result: + raise ValueError("Error %d while copying MD2" % result) + return clone + + def new(self, data=None): + return MD2Hash(data) + + +def new(data=None): + """Create a new hash object. + + :parameter data: + Optional. The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`MD2Hash.update`. + :type data: bytes/bytearray/memoryview + + :Return: A :class:`MD2Hash` hash object + """ + + return MD2Hash().new(data) + +# The size of the resulting hash in bytes. +digest_size = MD2Hash.digest_size + +# The internal block size of the hash algorithm in bytes. +block_size = MD2Hash.block_size diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/MD2.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/MD2.pyi new file mode 100644 index 000000000..95a97a9ea --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/MD2.pyi @@ -0,0 +1,19 @@ +from typing import Union + +Buffer = Union[bytes, bytearray, memoryview] + +class MD4Hash(object): + digest_size: int + block_size: int + oid: str + + def __init__(self, data: Buffer = ...) -> None: ... + def update(self, data: Buffer) -> None: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def copy(self) -> MD4Hash: ... + def new(self, data: Buffer = ...) -> MD4Hash: ... + +def new(data: Buffer = ...) -> MD4Hash: ... +digest_size: int +block_size: int diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/MD4.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/MD4.py new file mode 100644 index 000000000..be12b192a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/MD4.py @@ -0,0 +1,185 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +""" +MD4 is specified in RFC1320_ and produces the 128 bit digest of a message. + + >>> from Crypto.Hash import MD4 + >>> + >>> h = MD4.new() + >>> h.update(b'Hello') + >>> print h.hexdigest() + +MD4 stand for Message Digest version 4, and it was invented by Rivest in 1990. +This algorithm is insecure. Do not use it for new designs. + +.. _RFC1320: http://tools.ietf.org/html/rfc1320 +""" + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +_raw_md4_lib = load_pycryptodome_raw_lib( + "Crypto.Hash._MD4", + """ + int md4_init(void **shaState); + int md4_destroy(void *shaState); + int md4_update(void *hs, + const uint8_t *buf, + size_t len); + int md4_digest(const void *shaState, + uint8_t digest[20]); + int md4_copy(const void *src, void *dst); + """) + + +class MD4Hash(object): + """Class that implements an MD4 hash + """ + + #: The size of the resulting hash in bytes. + digest_size = 16 + #: The internal block size of the hash algorithm in bytes. + block_size = 64 + #: ASN.1 Object ID + oid = "1.2.840.113549.2.4" + + def __init__(self, data=None): + state = VoidPointer() + result = _raw_md4_lib.md4_init(state.address_of()) + if result: + raise ValueError("Error %d while instantiating MD4" + % result) + self._state = SmartPointer(state.get(), + _raw_md4_lib.md4_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Repeated calls are equivalent to a single call with the concatenation + of all the arguments. In other words: + + >>> m.update(a); m.update(b) + + is equivalent to: + + >>> m.update(a+b) + + :Parameters: + data : byte string/byte array/memoryview + The next chunk of the message being hashed. + """ + + result = _raw_md4_lib.md4_update(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while instantiating MD4" + % result) + + def digest(self): + """Return the **binary** (non-printable) digest of the message that + has been hashed so far. + + This method does not change the state of the hash object. + You can continue updating the object after calling this function. + + :Return: A byte string of `digest_size` bytes. It may contain non-ASCII + characters, including null bytes. + """ + + bfr = create_string_buffer(self.digest_size) + result = _raw_md4_lib.md4_digest(self._state.get(), + bfr) + if result: + raise ValueError("Error %d while instantiating MD4" + % result) + + return get_raw_buffer(bfr) + + def hexdigest(self): + """Return the **printable** digest of the message that has been + hashed so far. + + This method does not change the state of the hash object. + + :Return: A string of 2* `digest_size` characters. It contains only + hexadecimal ASCII digits. + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def copy(self): + """Return a copy ("clone") of the hash object. + + The copy will have the same internal state as the original hash + object. + This can be used to efficiently compute the digests of strings that + share a common initial substring. + + :Return: A hash object of the same type + """ + + clone = MD4Hash() + result = _raw_md4_lib.md4_copy(self._state.get(), + clone._state.get()) + if result: + raise ValueError("Error %d while copying MD4" % result) + return clone + + def new(self, data=None): + return MD4Hash(data) + + +def new(data=None): + """Return a fresh instance of the hash object. + + :Parameters: + data : byte string/byte array/memoryview + The very first chunk of the message to hash. + It is equivalent to an early call to `MD4Hash.update()`. + Optional. + + :Return: A `MD4Hash` object + """ + return MD4Hash().new(data) + +#: The size of the resulting hash in bytes. +digest_size = MD4Hash.digest_size + +#: The internal block size of the hash algorithm in bytes. +block_size = MD4Hash.block_size diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/MD4.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/MD4.pyi new file mode 100644 index 000000000..a9a729539 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/MD4.pyi @@ -0,0 +1,19 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +class MD4Hash(object): + digest_size: int + block_size: int + oid: str + + def __init__(self, data: Optional[Buffer] = ...) -> None: ... + def update(self, data: Buffer) -> None: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def copy(self) -> MD4Hash: ... + def new(self, data: Optional[Buffer] = ...) -> MD4Hash: ... + +def new(data: Optional[Buffer] = ...) -> MD4Hash: ... +digest_size: int +block_size: int diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/MD5.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/MD5.py new file mode 100644 index 000000000..554b77720 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/MD5.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.py3compat import * + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +_raw_md5_lib = load_pycryptodome_raw_lib("Crypto.Hash._MD5", + """ + #define MD5_DIGEST_SIZE 16 + + int MD5_init(void **shaState); + int MD5_destroy(void *shaState); + int MD5_update(void *hs, + const uint8_t *buf, + size_t len); + int MD5_digest(const void *shaState, + uint8_t digest[MD5_DIGEST_SIZE]); + int MD5_copy(const void *src, void *dst); + + int MD5_pbkdf2_hmac_assist(const void *inner, + const void *outer, + const uint8_t first_digest[MD5_DIGEST_SIZE], + uint8_t final_digest[MD5_DIGEST_SIZE], + size_t iterations); + """) + +class MD5Hash(object): + """A MD5 hash object. + Do not instantiate directly. + Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar block_size: the size in bytes of the internal message block, + input to the compression function + :vartype block_size: integer + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The size of the resulting hash in bytes. + digest_size = 16 + # The internal block size of the hash algorithm in bytes. + block_size = 64 + # ASN.1 Object ID + oid = "1.2.840.113549.2.5" + + def __init__(self, data=None): + state = VoidPointer() + result = _raw_md5_lib.MD5_init(state.address_of()) + if result: + raise ValueError("Error %d while instantiating MD5" + % result) + self._state = SmartPointer(state.get(), + _raw_md5_lib.MD5_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + result = _raw_md5_lib.MD5_update(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while instantiating MD5" + % result) + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + bfr = create_string_buffer(self.digest_size) + result = _raw_md5_lib.MD5_digest(self._state.get(), + bfr) + if result: + raise ValueError("Error %d while instantiating MD5" + % result) + + return get_raw_buffer(bfr) + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def copy(self): + """Return a copy ("clone") of the hash object. + + The copy will have the same internal state as the original hash + object. + This can be used to efficiently compute the digests of strings that + share a common initial substring. + + :return: A hash object of the same type + """ + + clone = MD5Hash() + result = _raw_md5_lib.MD5_copy(self._state.get(), + clone._state.get()) + if result: + raise ValueError("Error %d while copying MD5" % result) + return clone + + def new(self, data=None): + """Create a fresh SHA-1 hash object.""" + + return MD5Hash(data) + + +def new(data=None): + """Create a new hash object. + + :parameter data: + Optional. The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`MD5Hash.update`. + :type data: byte string/byte array/memoryview + + :Return: A :class:`MD5Hash` hash object + """ + return MD5Hash().new(data) + +# The size of the resulting hash in bytes. +digest_size = 16 + +# The internal block size of the hash algorithm in bytes. +block_size = 64 + + +def _pbkdf2_hmac_assist(inner, outer, first_digest, iterations): + """Compute the expensive inner loop in PBKDF-HMAC.""" + + assert len(first_digest) == digest_size + assert iterations > 0 + + bfr = create_string_buffer(digest_size); + result = _raw_md5_lib.MD5_pbkdf2_hmac_assist( + inner._state.get(), + outer._state.get(), + first_digest, + bfr, + c_size_t(iterations)) + + if result: + raise ValueError("Error %d with PBKDF2-HMAC assis for MD5" % result) + + return get_raw_buffer(bfr) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/MD5.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/MD5.pyi new file mode 100644 index 000000000..d8195562a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/MD5.pyi @@ -0,0 +1,19 @@ +from typing import Union + +Buffer = Union[bytes, bytearray, memoryview] + +class MD5Hash(object): + digest_size: int + block_size: int + oid: str + + def __init__(self, data: Buffer = ...) -> None: ... + def update(self, data: Buffer) -> None: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def copy(self) -> MD5Hash: ... + def new(self, data: Buffer = ...) -> MD5Hash: ... + +def new(data: Buffer = ...) -> MD5Hash: ... +digest_size: int +block_size: int diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/Poly1305.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/Poly1305.py new file mode 100644 index 000000000..eb5e0dadb --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/Poly1305.py @@ -0,0 +1,217 @@ +# -*- coding: utf-8 -*- +# +# Hash/Poly1305.py - Implements the Poly1305 MAC +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from binascii import unhexlify + +from Crypto.Util.py3compat import bord, tobytes, _copy_bytes + +from Crypto.Hash import BLAKE2s +from Crypto.Random import get_random_bytes +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + + +_raw_poly1305 = load_pycryptodome_raw_lib("Crypto.Hash._poly1305", + """ + int poly1305_init(void **state, + const uint8_t *r, + size_t r_len, + const uint8_t *s, + size_t s_len); + int poly1305_destroy(void *state); + int poly1305_update(void *state, + const uint8_t *in, + size_t len); + int poly1305_digest(const void *state, + uint8_t *digest, + size_t len); + """) + + +class Poly1305_MAC(object): + """An Poly1305 MAC object. + Do not instantiate directly. Use the :func:`new` function. + + :ivar digest_size: the size in bytes of the resulting MAC tag + :vartype digest_size: integer + """ + + digest_size = 16 + + def __init__(self, r, s, data): + + if len(r) != 16: + raise ValueError("Parameter r is not 16 bytes long") + if len(s) != 16: + raise ValueError("Parameter s is not 16 bytes long") + + self._mac_tag = None + + state = VoidPointer() + result = _raw_poly1305.poly1305_init(state.address_of(), + c_uint8_ptr(r), + c_size_t(len(r)), + c_uint8_ptr(s), + c_size_t(len(s)) + ) + if result: + raise ValueError("Error %d while instantiating Poly1305" % result) + self._state = SmartPointer(state.get(), + _raw_poly1305.poly1305_destroy) + if data: + self.update(data) + + def update(self, data): + """Authenticate the next chunk of message. + + Args: + data (byte string/byte array/memoryview): The next chunk of data + """ + + if self._mac_tag: + raise TypeError("You can only call 'digest' or 'hexdigest' on this object") + + result = _raw_poly1305.poly1305_update(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while hashing Poly1305 data" % result) + return self + + def copy(self): + raise NotImplementedError() + + def digest(self): + """Return the **binary** (non-printable) MAC tag of the message + authenticated so far. + + :return: The MAC tag digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + if self._mac_tag: + return self._mac_tag + + bfr = create_string_buffer(16) + result = _raw_poly1305.poly1305_digest(self._state.get(), + bfr, + c_size_t(len(bfr))) + if result: + raise ValueError("Error %d while creating Poly1305 digest" % result) + + self._mac_tag = get_raw_buffer(bfr) + return self._mac_tag + + def hexdigest(self): + """Return the **printable** MAC tag of the message authenticated so far. + + :return: The MAC tag, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) + for x in tuple(self.digest())]) + + def verify(self, mac_tag): + """Verify that a given **binary** MAC (computed by another party) + is valid. + + Args: + mac_tag (byte string/byte string/memoryview): the expected MAC of the message. + + Raises: + ValueError: if the MAC does not match. It means that the message + has been tampered with or that the MAC key is incorrect. + """ + + secret = get_random_bytes(16) + + mac1 = BLAKE2s.new(digest_bits=160, key=secret, data=mac_tag) + mac2 = BLAKE2s.new(digest_bits=160, key=secret, data=self.digest()) + + if mac1.digest() != mac2.digest(): + raise ValueError("MAC check failed") + + def hexverify(self, hex_mac_tag): + """Verify that a given **printable** MAC (computed by another party) + is valid. + + Args: + hex_mac_tag (string): the expected MAC of the message, + as a hexadecimal string. + + Raises: + ValueError: if the MAC does not match. It means that the message + has been tampered with or that the MAC key is incorrect. + """ + + self.verify(unhexlify(tobytes(hex_mac_tag))) + + + +def new(**kwargs): + """Create a new Poly1305 MAC object. + + Args: + key (bytes/bytearray/memoryview): + The 32-byte key for the Poly1305 object. + cipher (module from ``Crypto.Cipher``): + The cipher algorithm to use for deriving the Poly1305 + key pair *(r, s)*. + It can only be ``Crypto.Cipher.AES`` or ``Crypto.Cipher.ChaCha20``. + nonce (bytes/bytearray/memoryview): + Optional. The non-repeatable value to use for the MAC of this message. + It must be 16 bytes long for ``AES`` and 8 or 12 bytes for ``ChaCha20``. + If not passed, a random nonce is created; you will find it in the + ``nonce`` attribute of the new object. + data (bytes/bytearray/memoryview): + Optional. The very first chunk of the message to authenticate. + It is equivalent to an early call to ``update()``. + + Returns: + A :class:`Poly1305_MAC` object + """ + + cipher = kwargs.pop("cipher", None) + if not hasattr(cipher, '_derive_Poly1305_key_pair'): + raise ValueError("Parameter 'cipher' must be AES or ChaCha20") + + cipher_key = kwargs.pop("key", None) + if cipher_key is None: + raise TypeError("You must pass a parameter 'key'") + + nonce = kwargs.pop("nonce", None) + data = kwargs.pop("data", None) + + if kwargs: + raise TypeError("Unknown parameters: " + str(kwargs)) + + r, s, nonce = cipher._derive_Poly1305_key_pair(cipher_key, nonce) + + new_mac = Poly1305_MAC(r, s, data) + new_mac.nonce = _copy_bytes(None, None, nonce) # nonce may still be just a memoryview + return new_mac diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/Poly1305.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/Poly1305.pyi new file mode 100644 index 000000000..f97a14a01 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/Poly1305.pyi @@ -0,0 +1,24 @@ +from types import ModuleType +from typing import Union + +Buffer = Union[bytes, bytearray, memoryview] + +class Poly1305_MAC(object): + block_size: int + digest_size: int + oid: str + + def __init__(self, + r : int, + s : int, + data : Buffer) -> None: ... + def update(self, data: Buffer) -> Poly1305_MAC: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def verify(self, mac_tag: Buffer) -> None: ... + def hexverify(self, hex_mac_tag: str) -> None: ... + +def new(key: Buffer, + cipher: ModuleType, + nonce: Buffer = ..., + data: Buffer = ...) -> Poly1305_MAC: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/RIPEMD.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/RIPEMD.py new file mode 100644 index 000000000..4e8023585 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/RIPEMD.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +# This file exists for backward compatibility with old code that refers to +# Crypto.Hash.RIPEMD + +"""Deprecated alias for `Crypto.Hash.RIPEMD160`""" + +from Crypto.Hash.RIPEMD160 import new, block_size, digest_size diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/RIPEMD.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/RIPEMD.pyi new file mode 100644 index 000000000..e33eb2df1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/RIPEMD.pyi @@ -0,0 +1,3 @@ +# This file exists for backward compatibility with old code that refers to +# Crypto.Hash.SHA + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/RIPEMD160.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/RIPEMD160.py new file mode 100644 index 000000000..820b57dd7 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/RIPEMD160.py @@ -0,0 +1,169 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +_raw_ripemd160_lib = load_pycryptodome_raw_lib( + "Crypto.Hash._RIPEMD160", + """ + int ripemd160_init(void **shaState); + int ripemd160_destroy(void *shaState); + int ripemd160_update(void *hs, + const uint8_t *buf, + size_t len); + int ripemd160_digest(const void *shaState, + uint8_t digest[20]); + int ripemd160_copy(const void *src, void *dst); + """) + + +class RIPEMD160Hash(object): + """A RIPEMD-160 hash object. + Do not instantiate directly. + Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar block_size: the size in bytes of the internal message block, + input to the compression function + :vartype block_size: integer + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The size of the resulting hash in bytes. + digest_size = 20 + # The internal block size of the hash algorithm in bytes. + block_size = 64 + # ASN.1 Object ID + oid = "1.3.36.3.2.1" + + def __init__(self, data=None): + state = VoidPointer() + result = _raw_ripemd160_lib.ripemd160_init(state.address_of()) + if result: + raise ValueError("Error %d while instantiating RIPEMD160" + % result) + self._state = SmartPointer(state.get(), + _raw_ripemd160_lib.ripemd160_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + result = _raw_ripemd160_lib.ripemd160_update(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while instantiating ripemd160" + % result) + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + bfr = create_string_buffer(self.digest_size) + result = _raw_ripemd160_lib.ripemd160_digest(self._state.get(), + bfr) + if result: + raise ValueError("Error %d while instantiating ripemd160" + % result) + + return get_raw_buffer(bfr) + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def copy(self): + """Return a copy ("clone") of the hash object. + + The copy will have the same internal state as the original hash + object. + This can be used to efficiently compute the digests of strings that + share a common initial substring. + + :return: A hash object of the same type + """ + + clone = RIPEMD160Hash() + result = _raw_ripemd160_lib.ripemd160_copy(self._state.get(), + clone._state.get()) + if result: + raise ValueError("Error %d while copying ripemd160" % result) + return clone + + def new(self, data=None): + """Create a fresh RIPEMD-160 hash object.""" + + return RIPEMD160Hash(data) + + +def new(data=None): + """Create a new hash object. + + :parameter data: + Optional. The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`RIPEMD160Hash.update`. + :type data: byte string/byte array/memoryview + + :Return: A :class:`RIPEMD160Hash` hash object + """ + + return RIPEMD160Hash().new(data) + +# The size of the resulting hash in bytes. +digest_size = RIPEMD160Hash.digest_size + +# The internal block size of the hash algorithm in bytes. +block_size = RIPEMD160Hash.block_size diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/RIPEMD160.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/RIPEMD160.pyi new file mode 100644 index 000000000..b61947377 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/RIPEMD160.pyi @@ -0,0 +1,19 @@ +from typing import Union + +Buffer = Union[bytes, bytearray, memoryview] + +class RIPEMD160Hash(object): + digest_size: int + block_size: int + oid: str + + def __init__(self, data: Buffer = ...) -> None: ... + def update(self, data: Buffer) -> None: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def copy(self) -> RIPEMD160Hash: ... + def new(self, data: Buffer = ...) -> RIPEMD160Hash: ... + +def new(data: Buffer = ...) -> RIPEMD160Hash: ... +digest_size: int +block_size: int diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA.py new file mode 100644 index 000000000..0cc141c90 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +# This file exists for backward compatibility with old code that refers to +# Crypto.Hash.SHA + +from Crypto.Hash.SHA1 import __doc__, new, block_size, digest_size diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA.pyi new file mode 100644 index 000000000..4d7d57e2c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA.pyi @@ -0,0 +1,4 @@ +# This file exists for backward compatibility with old code that refers to +# Crypto.Hash.SHA + +from Crypto.Hash.SHA1 import __doc__, new, block_size, digest_size diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA1.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA1.py new file mode 100644 index 000000000..f79d82573 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA1.py @@ -0,0 +1,185 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.py3compat import * + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +_raw_sha1_lib = load_pycryptodome_raw_lib("Crypto.Hash._SHA1", + """ + #define SHA1_DIGEST_SIZE 20 + + int SHA1_init(void **shaState); + int SHA1_destroy(void *shaState); + int SHA1_update(void *hs, + const uint8_t *buf, + size_t len); + int SHA1_digest(const void *shaState, + uint8_t digest[SHA1_DIGEST_SIZE]); + int SHA1_copy(const void *src, void *dst); + + int SHA1_pbkdf2_hmac_assist(const void *inner, + const void *outer, + const uint8_t first_digest[SHA1_DIGEST_SIZE], + uint8_t final_digest[SHA1_DIGEST_SIZE], + size_t iterations); + """) + +class SHA1Hash(object): + """A SHA-1 hash object. + Do not instantiate directly. + Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar block_size: the size in bytes of the internal message block, + input to the compression function + :vartype block_size: integer + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The size of the resulting hash in bytes. + digest_size = 20 + # The internal block size of the hash algorithm in bytes. + block_size = 64 + # ASN.1 Object ID + oid = "1.3.14.3.2.26" + + def __init__(self, data=None): + state = VoidPointer() + result = _raw_sha1_lib.SHA1_init(state.address_of()) + if result: + raise ValueError("Error %d while instantiating SHA1" + % result) + self._state = SmartPointer(state.get(), + _raw_sha1_lib.SHA1_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + result = _raw_sha1_lib.SHA1_update(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while instantiating SHA1" + % result) + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + bfr = create_string_buffer(self.digest_size) + result = _raw_sha1_lib.SHA1_digest(self._state.get(), + bfr) + if result: + raise ValueError("Error %d while instantiating SHA1" + % result) + + return get_raw_buffer(bfr) + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def copy(self): + """Return a copy ("clone") of the hash object. + + The copy will have the same internal state as the original hash + object. + This can be used to efficiently compute the digests of strings that + share a common initial substring. + + :return: A hash object of the same type + """ + + clone = SHA1Hash() + result = _raw_sha1_lib.SHA1_copy(self._state.get(), + clone._state.get()) + if result: + raise ValueError("Error %d while copying SHA1" % result) + return clone + + def new(self, data=None): + """Create a fresh SHA-1 hash object.""" + + return SHA1Hash(data) + + +def new(data=None): + """Create a new hash object. + + :parameter data: + Optional. The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`SHA1Hash.update`. + :type data: byte string/byte array/memoryview + + :Return: A :class:`SHA1Hash` hash object + """ + return SHA1Hash().new(data) + + +# The size of the resulting hash in bytes. +digest_size = SHA1Hash.digest_size + +# The internal block size of the hash algorithm in bytes. +block_size = SHA1Hash.block_size + + +def _pbkdf2_hmac_assist(inner, outer, first_digest, iterations): + """Compute the expensive inner loop in PBKDF-HMAC.""" + + assert len(first_digest) == digest_size + assert iterations > 0 + + bfr = create_string_buffer(digest_size); + result = _raw_sha1_lib.SHA1_pbkdf2_hmac_assist( + inner._state.get(), + outer._state.get(), + first_digest, + bfr, + c_size_t(iterations)) + + if result: + raise ValueError("Error %d with PBKDF2-HMAC assis for SHA1" % result) + + return get_raw_buffer(bfr) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA1.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA1.pyi new file mode 100644 index 000000000..d6c8e25ea --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA1.pyi @@ -0,0 +1,19 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +class SHA1Hash(object): + digest_size: int + block_size: int + oid: str + + def __init__(self, data: Optional[Buffer] = ...) -> None: ... + def update(self, data: Buffer) -> None: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def copy(self) -> SHA1Hash: ... + def new(self, data: Optional[Buffer] = ...) -> SHA1Hash: ... + +def new(data: Optional[Buffer] = ...) -> SHA1Hash: ... +digest_size: int +block_size: int diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA224.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA224.py new file mode 100644 index 000000000..f788b063a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA224.py @@ -0,0 +1,186 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +_raw_sha224_lib = load_pycryptodome_raw_lib("Crypto.Hash._SHA224", + """ + int SHA224_init(void **shaState); + int SHA224_destroy(void *shaState); + int SHA224_update(void *hs, + const uint8_t *buf, + size_t len); + int SHA224_digest(const void *shaState, + uint8_t *digest, + size_t digest_size); + int SHA224_copy(const void *src, void *dst); + + int SHA224_pbkdf2_hmac_assist(const void *inner, + const void *outer, + const uint8_t *first_digest, + uint8_t *final_digest, + size_t iterations, + size_t digest_size); + """) + +class SHA224Hash(object): + """A SHA-224 hash object. + Do not instantiate directly. + Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar block_size: the size in bytes of the internal message block, + input to the compression function + :vartype block_size: integer + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The size of the resulting hash in bytes. + digest_size = 28 + # The internal block size of the hash algorithm in bytes. + block_size = 64 + # ASN.1 Object ID + oid = '2.16.840.1.101.3.4.2.4' + + def __init__(self, data=None): + state = VoidPointer() + result = _raw_sha224_lib.SHA224_init(state.address_of()) + if result: + raise ValueError("Error %d while instantiating SHA224" + % result) + self._state = SmartPointer(state.get(), + _raw_sha224_lib.SHA224_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + result = _raw_sha224_lib.SHA224_update(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while hashing data with SHA224" + % result) + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + bfr = create_string_buffer(self.digest_size) + result = _raw_sha224_lib.SHA224_digest(self._state.get(), + bfr, + c_size_t(self.digest_size)) + if result: + raise ValueError("Error %d while making SHA224 digest" + % result) + + return get_raw_buffer(bfr) + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def copy(self): + """Return a copy ("clone") of the hash object. + + The copy will have the same internal state as the original hash + object. + This can be used to efficiently compute the digests of strings that + share a common initial substring. + + :return: A hash object of the same type + """ + + clone = SHA224Hash() + result = _raw_sha224_lib.SHA224_copy(self._state.get(), + clone._state.get()) + if result: + raise ValueError("Error %d while copying SHA224" % result) + return clone + + def new(self, data=None): + """Create a fresh SHA-224 hash object.""" + + return SHA224Hash(data) + + +def new(data=None): + """Create a new hash object. + + :parameter data: + Optional. The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`SHA224Hash.update`. + :type data: byte string/byte array/memoryview + + :Return: A :class:`SHA224Hash` hash object + """ + return SHA224Hash().new(data) + + +# The size of the resulting hash in bytes. +digest_size = SHA224Hash.digest_size + +# The internal block size of the hash algorithm in bytes. +block_size = SHA224Hash.block_size + + +def _pbkdf2_hmac_assist(inner, outer, first_digest, iterations): + """Compute the expensive inner loop in PBKDF-HMAC.""" + + assert iterations > 0 + + bfr = create_string_buffer(len(first_digest)); + result = _raw_sha224_lib.SHA224_pbkdf2_hmac_assist( + inner._state.get(), + outer._state.get(), + first_digest, + bfr, + c_size_t(iterations), + c_size_t(len(first_digest))) + + if result: + raise ValueError("Error %d with PBKDF2-HMAC assist for SHA224" % result) + + return get_raw_buffer(bfr) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA224.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA224.pyi new file mode 100644 index 000000000..613a7f9a7 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA224.pyi @@ -0,0 +1,19 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +class SHA224Hash(object): + digest_size: int + block_size: int + oid: str + + def __init__(self, data: Optional[Buffer] = ...) -> None: ... + def update(self, data: Buffer) -> None: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def copy(self) -> SHA224Hash: ... + def new(self, data: Optional[Buffer] = ...) -> SHA224Hash: ... + +def new(data: Optional[Buffer] = ...) -> SHA224Hash: ... +digest_size: int +block_size: int diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA256.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA256.py new file mode 100644 index 000000000..957aa37e0 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA256.py @@ -0,0 +1,185 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +_raw_sha256_lib = load_pycryptodome_raw_lib("Crypto.Hash._SHA256", + """ + int SHA256_init(void **shaState); + int SHA256_destroy(void *shaState); + int SHA256_update(void *hs, + const uint8_t *buf, + size_t len); + int SHA256_digest(const void *shaState, + uint8_t *digest, + size_t digest_size); + int SHA256_copy(const void *src, void *dst); + + int SHA256_pbkdf2_hmac_assist(const void *inner, + const void *outer, + const uint8_t *first_digest, + uint8_t *final_digest, + size_t iterations, + size_t digest_size); + """) + +class SHA256Hash(object): + """A SHA-256 hash object. + Do not instantiate directly. Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar block_size: the size in bytes of the internal message block, + input to the compression function + :vartype block_size: integer + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The size of the resulting hash in bytes. + digest_size = 32 + # The internal block size of the hash algorithm in bytes. + block_size = 64 + # ASN.1 Object ID + oid = "2.16.840.1.101.3.4.2.1" + + def __init__(self, data=None): + state = VoidPointer() + result = _raw_sha256_lib.SHA256_init(state.address_of()) + if result: + raise ValueError("Error %d while instantiating SHA256" + % result) + self._state = SmartPointer(state.get(), + _raw_sha256_lib.SHA256_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + result = _raw_sha256_lib.SHA256_update(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while hashing data with SHA256" + % result) + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + bfr = create_string_buffer(self.digest_size) + result = _raw_sha256_lib.SHA256_digest(self._state.get(), + bfr, + c_size_t(self.digest_size)) + if result: + raise ValueError("Error %d while making SHA256 digest" + % result) + + return get_raw_buffer(bfr) + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def copy(self): + """Return a copy ("clone") of the hash object. + + The copy will have the same internal state as the original hash + object. + This can be used to efficiently compute the digests of strings that + share a common initial substring. + + :return: A hash object of the same type + """ + + clone = SHA256Hash() + result = _raw_sha256_lib.SHA256_copy(self._state.get(), + clone._state.get()) + if result: + raise ValueError("Error %d while copying SHA256" % result) + return clone + + def new(self, data=None): + """Create a fresh SHA-256 hash object.""" + + return SHA256Hash(data) + +def new(data=None): + """Create a new hash object. + + :parameter data: + Optional. The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`SHA256Hash.update`. + :type data: byte string/byte array/memoryview + + :Return: A :class:`SHA256Hash` hash object + """ + + return SHA256Hash().new(data) + + +# The size of the resulting hash in bytes. +digest_size = SHA256Hash.digest_size + +# The internal block size of the hash algorithm in bytes. +block_size = SHA256Hash.block_size + + +def _pbkdf2_hmac_assist(inner, outer, first_digest, iterations): + """Compute the expensive inner loop in PBKDF-HMAC.""" + + assert iterations > 0 + + bfr = create_string_buffer(len(first_digest)); + result = _raw_sha256_lib.SHA256_pbkdf2_hmac_assist( + inner._state.get(), + outer._state.get(), + first_digest, + bfr, + c_size_t(iterations), + c_size_t(len(first_digest))) + + if result: + raise ValueError("Error %d with PBKDF2-HMAC assist for SHA256" % result) + + return get_raw_buffer(bfr) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA256.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA256.pyi new file mode 100644 index 000000000..cbf21bfb5 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA256.pyi @@ -0,0 +1,18 @@ +from typing import Union, Optional + + +class SHA256Hash(object): + digest_size: int + block_size: int + oid: str + def __init__(self, data: Optional[Union[bytes, bytearray, memoryview]]=None) -> None: ... + def update(self, data: Union[bytes, bytearray, memoryview]) -> None: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def copy(self) -> SHA256Hash: ... + def new(self, data: Optional[Union[bytes, bytearray, memoryview]]=None) -> SHA256Hash: ... + +def new(data: Optional[Union[bytes, bytearray, memoryview]]=None) -> SHA256Hash: ... + +digest_size: int +block_size: int diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA384.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA384.py new file mode 100644 index 000000000..a98fa9a3c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA384.py @@ -0,0 +1,186 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +_raw_sha384_lib = load_pycryptodome_raw_lib("Crypto.Hash._SHA384", + """ + int SHA384_init(void **shaState); + int SHA384_destroy(void *shaState); + int SHA384_update(void *hs, + const uint8_t *buf, + size_t len); + int SHA384_digest(const void *shaState, + uint8_t *digest, + size_t digest_size); + int SHA384_copy(const void *src, void *dst); + + int SHA384_pbkdf2_hmac_assist(const void *inner, + const void *outer, + const uint8_t *first_digest, + uint8_t *final_digest, + size_t iterations, + size_t digest_size); + """) + +class SHA384Hash(object): + """A SHA-384 hash object. + Do not instantiate directly. Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar block_size: the size in bytes of the internal message block, + input to the compression function + :vartype block_size: integer + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The size of the resulting hash in bytes. + digest_size = 48 + # The internal block size of the hash algorithm in bytes. + block_size = 128 + # ASN.1 Object ID + oid = '2.16.840.1.101.3.4.2.2' + + def __init__(self, data=None): + state = VoidPointer() + result = _raw_sha384_lib.SHA384_init(state.address_of()) + if result: + raise ValueError("Error %d while instantiating SHA384" + % result) + self._state = SmartPointer(state.get(), + _raw_sha384_lib.SHA384_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + result = _raw_sha384_lib.SHA384_update(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while hashing data with SHA384" + % result) + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + bfr = create_string_buffer(self.digest_size) + result = _raw_sha384_lib.SHA384_digest(self._state.get(), + bfr, + c_size_t(self.digest_size)) + if result: + raise ValueError("Error %d while making SHA384 digest" + % result) + + return get_raw_buffer(bfr) + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def copy(self): + """Return a copy ("clone") of the hash object. + + The copy will have the same internal state as the original hash + object. + This can be used to efficiently compute the digests of strings that + share a common initial substring. + + :return: A hash object of the same type + """ + + clone = SHA384Hash() + result = _raw_sha384_lib.SHA384_copy(self._state.get(), + clone._state.get()) + if result: + raise ValueError("Error %d while copying SHA384" % result) + return clone + + def new(self, data=None): + """Create a fresh SHA-384 hash object.""" + + return SHA384Hash(data) + + +def new(data=None): + """Create a new hash object. + + :parameter data: + Optional. The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`SHA384Hash.update`. + :type data: byte string/byte array/memoryview + + :Return: A :class:`SHA384Hash` hash object + """ + + return SHA384Hash().new(data) + + +# The size of the resulting hash in bytes. +digest_size = SHA384Hash.digest_size + +# The internal block size of the hash algorithm in bytes. +block_size = SHA384Hash.block_size + + +def _pbkdf2_hmac_assist(inner, outer, first_digest, iterations): + """Compute the expensive inner loop in PBKDF-HMAC.""" + + assert iterations > 0 + + bfr = create_string_buffer(len(first_digest)); + result = _raw_sha384_lib.SHA384_pbkdf2_hmac_assist( + inner._state.get(), + outer._state.get(), + first_digest, + bfr, + c_size_t(iterations), + c_size_t(len(first_digest))) + + if result: + raise ValueError("Error %d with PBKDF2-HMAC assist for SHA384" % result) + + return get_raw_buffer(bfr) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA384.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA384.pyi new file mode 100644 index 000000000..c2aab9e08 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA384.pyi @@ -0,0 +1,19 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +class SHA384Hash(object): + digest_size: int + block_size: int + oid: str + + def __init__(self, data: Optional[Buffer] = ...) -> None: ... + def update(self, data: Buffer) -> None: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def copy(self) -> SHA384Hash: ... + def new(self, data: Optional[Buffer] = ...) -> SHA384Hash: ... + +def new(data: Optional[Buffer] = ...) -> SHA384Hash: ... +digest_size: int +block_size: int diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA3_224.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA3_224.py new file mode 100644 index 000000000..54556d002 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA3_224.py @@ -0,0 +1,174 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr, c_ubyte) + +from Crypto.Hash.keccak import _raw_keccak_lib + +class SHA3_224_Hash(object): + """A SHA3-224 hash object. + Do not instantiate directly. + Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The size of the resulting hash in bytes. + digest_size = 28 + + # ASN.1 Object ID + oid = "2.16.840.1.101.3.4.2.7" + + # Input block size for HMAC + block_size = 144 + + def __init__(self, data, update_after_digest): + self._update_after_digest = update_after_digest + self._digest_done = False + self._padding = 0x06 + + state = VoidPointer() + result = _raw_keccak_lib.keccak_init(state.address_of(), + c_size_t(self.digest_size * 2), + c_ubyte(24)) + if result: + raise ValueError("Error %d while instantiating SHA-3/224" + % result) + self._state = SmartPointer(state.get(), + _raw_keccak_lib.keccak_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + if self._digest_done and not self._update_after_digest: + raise TypeError("You can only call 'digest' or 'hexdigest' on this object") + + result = _raw_keccak_lib.keccak_absorb(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data)) + ) + if result: + raise ValueError("Error %d while updating SHA-3/224" + % result) + return self + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + self._digest_done = True + + bfr = create_string_buffer(self.digest_size) + result = _raw_keccak_lib.keccak_digest(self._state.get(), + bfr, + c_size_t(self.digest_size), + c_ubyte(self._padding)) + if result: + raise ValueError("Error %d while instantiating SHA-3/224" + % result) + + self._digest_value = get_raw_buffer(bfr) + return self._digest_value + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def copy(self): + """Return a copy ("clone") of the hash object. + + The copy will have the same internal state as the original hash + object. + This can be used to efficiently compute the digests of strings that + share a common initial substring. + + :return: A hash object of the same type + """ + + clone = self.new() + result = _raw_keccak_lib.keccak_copy(self._state.get(), + clone._state.get()) + if result: + raise ValueError("Error %d while copying SHA3-224" % result) + return clone + + def new(self, data=None): + """Create a fresh SHA3-224 hash object.""" + + return type(self)(data, self._update_after_digest) + + +def new(*args, **kwargs): + """Create a new hash object. + + Args: + data (byte string/byte array/memoryview): + The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`update`. + update_after_digest (boolean): + Whether :meth:`digest` can be followed by another :meth:`update` + (default: ``False``). + + :Return: A :class:`SHA3_224_Hash` hash object + """ + + data = kwargs.pop("data", None) + update_after_digest = kwargs.pop("update_after_digest", False) + if len(args) == 1: + if data: + raise ValueError("Initial data for hash specified twice") + data = args[0] + + if kwargs: + raise TypeError("Unknown parameters: " + str(kwargs)) + + return SHA3_224_Hash(data, update_after_digest) + +# The size of the resulting hash in bytes. +digest_size = SHA3_224_Hash.digest_size + +# Input block size for HMAC +block_size = 144 diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA3_224.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA3_224.pyi new file mode 100644 index 000000000..218082199 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA3_224.pyi @@ -0,0 +1,19 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +class SHA3_224_Hash(object): + digest_size: int + block_size: int + oid: str + def __init__(self, data: Optional[Buffer], update_after_digest: bool) -> None: ... + def update(self, data: Buffer) -> SHA3_224_Hash: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def copy(self) -> SHA3_224_Hash: ... + def new(self, data: Optional[Buffer]) -> SHA3_224_Hash: ... + +def new(__data: Buffer = ..., update_after_digest: bool = ...) -> SHA3_224_Hash: ... + +digest_size: int +block_size: int diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA3_256.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA3_256.py new file mode 100644 index 000000000..b4f11ee1f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA3_256.py @@ -0,0 +1,174 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr, c_ubyte) + +from Crypto.Hash.keccak import _raw_keccak_lib + +class SHA3_256_Hash(object): + """A SHA3-256 hash object. + Do not instantiate directly. + Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The size of the resulting hash in bytes. + digest_size = 32 + + # ASN.1 Object ID + oid = "2.16.840.1.101.3.4.2.8" + + # Input block size for HMAC + block_size = 136 + + def __init__(self, data, update_after_digest): + self._update_after_digest = update_after_digest + self._digest_done = False + self._padding = 0x06 + + state = VoidPointer() + result = _raw_keccak_lib.keccak_init(state.address_of(), + c_size_t(self.digest_size * 2), + c_ubyte(24)) + if result: + raise ValueError("Error %d while instantiating SHA-3/256" + % result) + self._state = SmartPointer(state.get(), + _raw_keccak_lib.keccak_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + if self._digest_done and not self._update_after_digest: + raise TypeError("You can only call 'digest' or 'hexdigest' on this object") + + result = _raw_keccak_lib.keccak_absorb(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data)) + ) + if result: + raise ValueError("Error %d while updating SHA-3/256" + % result) + return self + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + self._digest_done = True + + bfr = create_string_buffer(self.digest_size) + result = _raw_keccak_lib.keccak_digest(self._state.get(), + bfr, + c_size_t(self.digest_size), + c_ubyte(self._padding)) + if result: + raise ValueError("Error %d while instantiating SHA-3/256" + % result) + + self._digest_value = get_raw_buffer(bfr) + return self._digest_value + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def copy(self): + """Return a copy ("clone") of the hash object. + + The copy will have the same internal state as the original hash + object. + This can be used to efficiently compute the digests of strings that + share a common initial substring. + + :return: A hash object of the same type + """ + + clone = self.new() + result = _raw_keccak_lib.keccak_copy(self._state.get(), + clone._state.get()) + if result: + raise ValueError("Error %d while copying SHA3-256" % result) + return clone + + def new(self, data=None): + """Create a fresh SHA3-256 hash object.""" + + return type(self)(data, self._update_after_digest) + + +def new(*args, **kwargs): + """Create a new hash object. + + Args: + data (byte string/byte array/memoryview): + The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`update`. + update_after_digest (boolean): + Whether :meth:`digest` can be followed by another :meth:`update` + (default: ``False``). + + :Return: A :class:`SHA3_256_Hash` hash object + """ + + data = kwargs.pop("data", None) + update_after_digest = kwargs.pop("update_after_digest", False) + if len(args) == 1: + if data: + raise ValueError("Initial data for hash specified twice") + data = args[0] + + if kwargs: + raise TypeError("Unknown parameters: " + str(kwargs)) + + return SHA3_256_Hash(data, update_after_digest) + +# The size of the resulting hash in bytes. +digest_size = SHA3_256_Hash.digest_size + +# Input block size for HMAC +block_size = 136 diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA3_256.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA3_256.pyi new file mode 100644 index 000000000..88436bd8f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA3_256.pyi @@ -0,0 +1,19 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +class SHA3_256_Hash(object): + digest_size: int + block_size: int + oid: str + def __init__(self, data: Optional[Buffer], update_after_digest: bool) -> None: ... + def update(self, data: Buffer) -> SHA3_256_Hash: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def copy(self) -> SHA3_256_Hash: ... + def new(self, data: Optional[Buffer]) -> SHA3_256_Hash: ... + +def new(__data: Buffer = ..., update_after_digest: bool = ...) -> SHA3_256_Hash: ... + +digest_size: int +block_size: int diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA3_384.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA3_384.py new file mode 100644 index 000000000..12f61ce57 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA3_384.py @@ -0,0 +1,179 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr, c_ubyte) + +from Crypto.Hash.keccak import _raw_keccak_lib + +class SHA3_384_Hash(object): + """A SHA3-384 hash object. + Do not instantiate directly. + Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The size of the resulting hash in bytes. + digest_size = 48 + + # ASN.1 Object ID + oid = "2.16.840.1.101.3.4.2.9" + + # Input block size for HMAC + block_size = 104 + + def __init__(self, data, update_after_digest): + self._update_after_digest = update_after_digest + self._digest_done = False + self._padding = 0x06 + + state = VoidPointer() + result = _raw_keccak_lib.keccak_init(state.address_of(), + c_size_t(self.digest_size * 2), + c_ubyte(24)) + if result: + raise ValueError("Error %d while instantiating SHA-3/384" + % result) + self._state = SmartPointer(state.get(), + _raw_keccak_lib.keccak_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + if self._digest_done and not self._update_after_digest: + raise TypeError("You can only call 'digest' or 'hexdigest' on this object") + + result = _raw_keccak_lib.keccak_absorb(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while updating SHA-3/384" + % result) + return self + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + self._digest_done = True + + bfr = create_string_buffer(self.digest_size) + result = _raw_keccak_lib.keccak_digest(self._state.get(), + bfr, + c_size_t(self.digest_size), + c_ubyte(self._padding)) + if result: + raise ValueError("Error %d while instantiating SHA-3/384" + % result) + + self._digest_value = get_raw_buffer(bfr) + return self._digest_value + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def copy(self): + """Return a copy ("clone") of the hash object. + + The copy will have the same internal state as the original hash + object. + This can be used to efficiently compute the digests of strings that + share a common initial substring. + + :return: A hash object of the same type + """ + + clone = self.new() + result = _raw_keccak_lib.keccak_copy(self._state.get(), + clone._state.get()) + if result: + raise ValueError("Error %d while copying SHA3-384" % result) + return clone + + def new(self, data=None): + """Create a fresh SHA3-256 hash object.""" + + return type(self)(data, self._update_after_digest) + + + def new(self, data=None): + """Create a fresh SHA3-384 hash object.""" + + return type(self)(data, self._update_after_digest) + + +def new(*args, **kwargs): + """Create a new hash object. + + Args: + data (byte string/byte array/memoryview): + The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`update`. + update_after_digest (boolean): + Whether :meth:`digest` can be followed by another :meth:`update` + (default: ``False``). + + :Return: A :class:`SHA3_384_Hash` hash object + """ + + data = kwargs.pop("data", None) + update_after_digest = kwargs.pop("update_after_digest", False) + if len(args) == 1: + if data: + raise ValueError("Initial data for hash specified twice") + data = args[0] + + if kwargs: + raise TypeError("Unknown parameters: " + str(kwargs)) + + return SHA3_384_Hash(data, update_after_digest) + +# The size of the resulting hash in bytes. +digest_size = SHA3_384_Hash.digest_size + +# Input block size for HMAC +block_size = 104 diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA3_384.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA3_384.pyi new file mode 100644 index 000000000..98d00c6c3 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA3_384.pyi @@ -0,0 +1,19 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +class SHA3_384_Hash(object): + digest_size: int + block_size: int + oid: str + def __init__(self, data: Optional[Buffer], update_after_digest: bool) -> None: ... + def update(self, data: Buffer) -> SHA3_384_Hash: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def copy(self) -> SHA3_384_Hash: ... + def new(self, data: Optional[Buffer]) -> SHA3_384_Hash: ... + +def new(__data: Buffer = ..., update_after_digest: bool = ...) -> SHA3_384_Hash: ... + +digest_size: int +block_size: int diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA3_512.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA3_512.py new file mode 100644 index 000000000..de8880c75 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA3_512.py @@ -0,0 +1,174 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr, c_ubyte) + +from Crypto.Hash.keccak import _raw_keccak_lib + +class SHA3_512_Hash(object): + """A SHA3-512 hash object. + Do not instantiate directly. + Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The size of the resulting hash in bytes. + digest_size = 64 + + # ASN.1 Object ID + oid = "2.16.840.1.101.3.4.2.10" + + # Input block size for HMAC + block_size = 72 + + def __init__(self, data, update_after_digest): + self._update_after_digest = update_after_digest + self._digest_done = False + self._padding = 0x06 + + state = VoidPointer() + result = _raw_keccak_lib.keccak_init(state.address_of(), + c_size_t(self.digest_size * 2), + c_ubyte(24)) + if result: + raise ValueError("Error %d while instantiating SHA-3/512" + % result) + self._state = SmartPointer(state.get(), + _raw_keccak_lib.keccak_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + if self._digest_done and not self._update_after_digest: + raise TypeError("You can only call 'digest' or 'hexdigest' on this object") + + result = _raw_keccak_lib.keccak_absorb(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while updating SHA-3/512" + % result) + return self + + def digest(self): + + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + self._digest_done = True + + bfr = create_string_buffer(self.digest_size) + result = _raw_keccak_lib.keccak_digest(self._state.get(), + bfr, + c_size_t(self.digest_size), + c_ubyte(self._padding)) + if result: + raise ValueError("Error %d while instantiating SHA-3/512" + % result) + + self._digest_value = get_raw_buffer(bfr) + return self._digest_value + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def copy(self): + """Return a copy ("clone") of the hash object. + + The copy will have the same internal state as the original hash + object. + This can be used to efficiently compute the digests of strings that + share a common initial substring. + + :return: A hash object of the same type + """ + + clone = self.new() + result = _raw_keccak_lib.keccak_copy(self._state.get(), + clone._state.get()) + if result: + raise ValueError("Error %d while copying SHA3-512" % result) + return clone + + def new(self, data=None): + """Create a fresh SHA3-521 hash object.""" + + return type(self)(data, self._update_after_digest) + + +def new(*args, **kwargs): + """Create a new hash object. + + Args: + data (byte string/byte array/memoryview): + The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`update`. + update_after_digest (boolean): + Whether :meth:`digest` can be followed by another :meth:`update` + (default: ``False``). + + :Return: A :class:`SHA3_512_Hash` hash object + """ + + data = kwargs.pop("data", None) + update_after_digest = kwargs.pop("update_after_digest", False) + if len(args) == 1: + if data: + raise ValueError("Initial data for hash specified twice") + data = args[0] + + if kwargs: + raise TypeError("Unknown parameters: " + str(kwargs)) + + return SHA3_512_Hash(data, update_after_digest) + +# The size of the resulting hash in bytes. +digest_size = SHA3_512_Hash.digest_size + +# Input block size for HMAC +block_size = 72 diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA3_512.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA3_512.pyi new file mode 100644 index 000000000..cdeec1656 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA3_512.pyi @@ -0,0 +1,19 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +class SHA3_512_Hash(object): + digest_size: int + block_size: int + oid: str + def __init__(self, data: Optional[Buffer], update_after_digest: bool) -> None: ... + def update(self, data: Buffer) -> SHA3_512_Hash: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def copy(self) -> SHA3_512_Hash: ... + def new(self, data: Optional[Buffer]) -> SHA3_512_Hash: ... + +def new(__data: Buffer = ..., update_after_digest: bool = ...) -> SHA3_512_Hash: ... + +digest_size: int +block_size: int diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA512.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA512.py new file mode 100644 index 000000000..403fe45e3 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA512.py @@ -0,0 +1,204 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr) + +_raw_sha512_lib = load_pycryptodome_raw_lib("Crypto.Hash._SHA512", + """ + int SHA512_init(void **shaState, + size_t digest_size); + int SHA512_destroy(void *shaState); + int SHA512_update(void *hs, + const uint8_t *buf, + size_t len); + int SHA512_digest(const void *shaState, + uint8_t *digest, + size_t digest_size); + int SHA512_copy(const void *src, void *dst); + + int SHA512_pbkdf2_hmac_assist(const void *inner, + const void *outer, + const uint8_t *first_digest, + uint8_t *final_digest, + size_t iterations, + size_t digest_size); + """) + +class SHA512Hash(object): + """A SHA-512 hash object (possibly in its truncated version SHA-512/224 or + SHA-512/256. + Do not instantiate directly. Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + + :ivar block_size: the size in bytes of the internal message block, + input to the compression function + :vartype block_size: integer + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + # The internal block size of the hash algorithm in bytes. + block_size = 128 + + def __init__(self, data, truncate): + self._truncate = truncate + + if truncate is None: + self.oid = "2.16.840.1.101.3.4.2.3" + self.digest_size = 64 + elif truncate == "224": + self.oid = "2.16.840.1.101.3.4.2.5" + self.digest_size = 28 + elif truncate == "256": + self.oid = "2.16.840.1.101.3.4.2.6" + self.digest_size = 32 + else: + raise ValueError("Incorrect truncation length. It must be '224' or '256'.") + + state = VoidPointer() + result = _raw_sha512_lib.SHA512_init(state.address_of(), + c_size_t(self.digest_size)) + if result: + raise ValueError("Error %d while instantiating SHA-512" + % result) + self._state = SmartPointer(state.get(), + _raw_sha512_lib.SHA512_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + result = _raw_sha512_lib.SHA512_update(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while hashing data with SHA512" + % result) + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + bfr = create_string_buffer(self.digest_size) + result = _raw_sha512_lib.SHA512_digest(self._state.get(), + bfr, + c_size_t(self.digest_size)) + if result: + raise ValueError("Error %d while making SHA512 digest" + % result) + + return get_raw_buffer(bfr) + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def copy(self): + """Return a copy ("clone") of the hash object. + + The copy will have the same internal state as the original hash + object. + This can be used to efficiently compute the digests of strings that + share a common initial substring. + + :return: A hash object of the same type + """ + + clone = SHA512Hash(None, self._truncate) + result = _raw_sha512_lib.SHA512_copy(self._state.get(), + clone._state.get()) + if result: + raise ValueError("Error %d while copying SHA512" % result) + return clone + + def new(self, data=None): + """Create a fresh SHA-512 hash object.""" + + return SHA512Hash(data, self._truncate) + + +def new(data=None, truncate=None): + """Create a new hash object. + + Args: + data (bytes/bytearray/memoryview): + Optional. The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`SHA512Hash.update`. + truncate (string): + Optional. The desired length of the digest. It can be either "224" or + "256". If not present, the digest is 512 bits long. + Passing this parameter is **not** equivalent to simply truncating + the output digest. + + :Return: A :class:`SHA512Hash` hash object + """ + + return SHA512Hash(data, truncate) + + +# The size of the full SHA-512 hash in bytes. +digest_size = 64 + +# The internal block size of the hash algorithm in bytes. +block_size = 128 + + +def _pbkdf2_hmac_assist(inner, outer, first_digest, iterations): + """Compute the expensive inner loop in PBKDF-HMAC.""" + + assert iterations > 0 + + bfr = create_string_buffer(len(first_digest)); + result = _raw_sha512_lib.SHA512_pbkdf2_hmac_assist( + inner._state.get(), + outer._state.get(), + first_digest, + bfr, + c_size_t(iterations), + c_size_t(len(first_digest))) + + if result: + raise ValueError("Error %d with PBKDF2-HMAC assist for SHA512" % result) + + return get_raw_buffer(bfr) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA512.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA512.pyi new file mode 100644 index 000000000..f219ee94d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHA512.pyi @@ -0,0 +1,22 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +class SHA512Hash(object): + digest_size: int + block_size: int + oid: str + + def __init__(self, + data: Optional[Buffer], + truncate: Optional[str]) -> None: ... + def update(self, data: Buffer) -> None: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def copy(self) -> SHA512Hash: ... + def new(self, data: Optional[Buffer] = ...) -> SHA512Hash: ... + +def new(data: Optional[Buffer] = ..., + truncate: Optional[str] = ...) -> SHA512Hash: ... +digest_size: int +block_size: int diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHAKE128.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHAKE128.py new file mode 100644 index 000000000..894a41b1f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHAKE128.py @@ -0,0 +1,129 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr, c_ubyte) + +from Crypto.Hash.keccak import _raw_keccak_lib + +class SHAKE128_XOF(object): + """A SHAKE128 hash object. + Do not instantiate directly. + Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + """ + + # ASN.1 Object ID + oid = "2.16.840.1.101.3.4.2.11" + + def __init__(self, data=None): + state = VoidPointer() + result = _raw_keccak_lib.keccak_init(state.address_of(), + c_size_t(32), + c_ubyte(24)) + if result: + raise ValueError("Error %d while instantiating SHAKE128" + % result) + self._state = SmartPointer(state.get(), + _raw_keccak_lib.keccak_destroy) + self._is_squeezing = False + self._padding = 0x1F + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + if self._is_squeezing: + raise TypeError("You cannot call 'update' after the first 'read'") + + result = _raw_keccak_lib.keccak_absorb(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while updating SHAKE128 state" + % result) + return self + + def read(self, length): + """ + Compute the next piece of XOF output. + + .. note:: + You cannot use :meth:`update` anymore after the first call to + :meth:`read`. + + Args: + length (integer): the amount of bytes this method must return + + :return: the next piece of XOF output (of the given length) + :rtype: byte string + """ + + self._is_squeezing = True + bfr = create_string_buffer(length) + result = _raw_keccak_lib.keccak_squeeze(self._state.get(), + bfr, + c_size_t(length), + c_ubyte(self._padding)) + if result: + raise ValueError("Error %d while extracting from SHAKE128" + % result) + + return get_raw_buffer(bfr) + + def new(self, data=None): + return type(self)(data=data) + + +def new(data=None): + """Return a fresh instance of a SHAKE128 object. + + Args: + data (bytes/bytearray/memoryview): + The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`update`. + Optional. + + :Return: A :class:`SHAKE128_XOF` object + """ + + return SHAKE128_XOF(data=data) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHAKE128.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHAKE128.pyi new file mode 100644 index 000000000..f6188816a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHAKE128.pyi @@ -0,0 +1,13 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +class SHAKE128_XOF(object): + oid: str + def __init__(self, + data: Optional[Buffer] = ...) -> None: ... + def update(self, data: Buffer) -> SHAKE128_XOF: ... + def read(self, length: int) -> bytes: ... + def new(self, data: Optional[Buffer] = ...) -> SHAKE128_XOF: ... + +def new(data: Optional[Buffer] = ...) -> SHAKE128_XOF: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHAKE256.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHAKE256.py new file mode 100644 index 000000000..f75b8221d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHAKE256.py @@ -0,0 +1,130 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr, c_ubyte) + +from Crypto.Hash.keccak import _raw_keccak_lib + +class SHAKE256_XOF(object): + """A SHAKE256 hash object. + Do not instantiate directly. + Use the :func:`new` function. + + :ivar oid: ASN.1 Object ID + :vartype oid: string + """ + + # ASN.1 Object ID + oid = "2.16.840.1.101.3.4.2.12" + + def __init__(self, data=None): + state = VoidPointer() + result = _raw_keccak_lib.keccak_init(state.address_of(), + c_size_t(64), + c_ubyte(24)) + if result: + raise ValueError("Error %d while instantiating SHAKE256" + % result) + self._state = SmartPointer(state.get(), + _raw_keccak_lib.keccak_destroy) + self._is_squeezing = False + self._padding = 0x1F + + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + if self._is_squeezing: + raise TypeError("You cannot call 'update' after the first 'read'") + + result = _raw_keccak_lib.keccak_absorb(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while updating SHAKE256 state" + % result) + return self + + def read(self, length): + """ + Compute the next piece of XOF output. + + .. note:: + You cannot use :meth:`update` anymore after the first call to + :meth:`read`. + + Args: + length (integer): the amount of bytes this method must return + + :return: the next piece of XOF output (of the given length) + :rtype: byte string + """ + + self._is_squeezing = True + bfr = create_string_buffer(length) + result = _raw_keccak_lib.keccak_squeeze(self._state.get(), + bfr, + c_size_t(length), + c_ubyte(self._padding)) + if result: + raise ValueError("Error %d while extracting from SHAKE256" + % result) + + return get_raw_buffer(bfr) + + def new(self, data=None): + return type(self)(data=data) + + +def new(data=None): + """Return a fresh instance of a SHAKE256 object. + + Args: + data (bytes/bytearray/memoryview): + The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`update`. + Optional. + + :Return: A :class:`SHAKE256_XOF` object + """ + + return SHAKE256_XOF(data=data) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHAKE256.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHAKE256.pyi new file mode 100644 index 000000000..029347af3 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/SHAKE256.pyi @@ -0,0 +1,13 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +class SHAKE256_XOF(object): + oid: str + def __init__(self, + data: Optional[Buffer] = ...) -> None: ... + def update(self, data: Buffer) -> SHAKE256_XOF: ... + def read(self, length: int) -> bytes: ... + def new(self, data: Optional[Buffer] = ...) -> SHAKE256_XOF: ... + +def new(data: Optional[Buffer] = ...) -> SHAKE256_XOF: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/TupleHash128.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/TupleHash128.py new file mode 100644 index 000000000..a3fa96a13 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/TupleHash128.py @@ -0,0 +1,136 @@ +# =================================================================== +# +# Copyright (c) 2021, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util.py3compat import bord, is_bytes, tobytes + +from . import cSHAKE128 +from .cSHAKE128 import _encode_str, _right_encode + + +class TupleHash(object): + """A Tuple hash object. + Do not instantiate directly. + Use the :func:`new` function. + """ + + def __init__(self, custom, cshake, digest_size): + + self.digest_size = digest_size + + self._cshake = cshake._new(b'', custom, b'TupleHash') + self._digest = None + + def update(self, *data): + """Authenticate the next tuple of byte strings. + TupleHash guarantees the logical separation between each byte string. + + Args: + data (bytes/bytearray/memoryview): One or more items to hash. + """ + + if self._digest is not None: + raise TypeError("You cannot call 'update' after 'digest' or 'hexdigest'") + + for item in data: + if not is_bytes(item): + raise TypeError("You can only call 'update' on bytes" ) + self._cshake.update(_encode_str(item)) + + return self + + def digest(self): + """Return the **binary** (non-printable) digest of the tuple of byte strings. + + :return: The hash digest. Binary form. + :rtype: byte string + """ + + if self._digest is None: + self._cshake.update(_right_encode(self.digest_size * 8)) + self._digest = self._cshake.read(self.digest_size) + + return self._digest + + def hexdigest(self): + """Return the **printable** digest of the tuple of byte strings. + + :return: The hash digest. Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in tuple(self.digest())]) + + def new(self, **kwargs): + """Return a new instance of a TupleHash object. + See :func:`new`. + """ + + if "digest_bytes" not in kwargs and "digest_bits" not in kwargs: + kwargs["digest_bytes"] = self.digest_size + + return new(**kwargs) + + +def new(**kwargs): + """Create a new TupleHash128 object. + + Args: + digest_bytes (integer): + Optional. The size of the digest, in bytes. + Default is 64. Minimum is 8. + digest_bits (integer): + Optional and alternative to ``digest_bytes``. + The size of the digest, in bits (and in steps of 8). + Default is 512. Minimum is 64. + custom (bytes): + Optional. + A customization bytestring (``S`` in SP 800-185). + + :Return: A :class:`TupleHash` object + """ + + digest_bytes = kwargs.pop("digest_bytes", None) + digest_bits = kwargs.pop("digest_bits", None) + if None not in (digest_bytes, digest_bits): + raise TypeError("Only one digest parameter must be provided") + if (None, None) == (digest_bytes, digest_bits): + digest_bytes = 64 + if digest_bytes is not None: + if digest_bytes < 8: + raise ValueError("'digest_bytes' must be at least 8") + else: + if digest_bits < 64 or digest_bits % 8: + raise ValueError("'digest_bytes' must be at least 64 " + "in steps of 8") + digest_bytes = digest_bits // 8 + + custom = kwargs.pop("custom", b'') + + return TupleHash(custom, cSHAKE128, digest_bytes) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/TupleHash128.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/TupleHash128.pyi new file mode 100644 index 000000000..2e0ea833e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/TupleHash128.pyi @@ -0,0 +1,22 @@ +from typing import Any, Union, List, Tuple +from types import ModuleType + +Buffer = Union[bytes, bytearray, memoryview] + +class TupleHash(object): + digest_size: int + def __init__(self, + custom: bytes, + cshake: ModuleType, + digest_size: int) -> None: ... + def update(self, *data: Buffer) -> TupleHash: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def new(self, + digest_bytes: int = ..., + digest_bits: int = ..., + custom: int = ...) -> TupleHash: ... + +def new(digest_bytes: int = ..., + digest_bits: int = ..., + custom: int = ...) -> TupleHash: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/TupleHash256.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/TupleHash256.py new file mode 100644 index 000000000..40a824a9c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/TupleHash256.py @@ -0,0 +1,70 @@ +# =================================================================== +# +# Copyright (c) 2021, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from . import cSHAKE256 +from .TupleHash128 import TupleHash + + +def new(**kwargs): + """Create a new TupleHash256 object. + + Args: + digest_bytes (integer): + Optional. The size of the digest, in bytes. + Default is 64. Minimum is 8. + digest_bits (integer): + Optional and alternative to ``digest_bytes``. + The size of the digest, in bits (and in steps of 8). + Default is 512. Minimum is 64. + custom (bytes): + Optional. + A customization bytestring (``S`` in SP 800-185). + + :Return: A :class:`TupleHash` object + """ + + digest_bytes = kwargs.pop("digest_bytes", None) + digest_bits = kwargs.pop("digest_bits", None) + if None not in (digest_bytes, digest_bits): + raise TypeError("Only one digest parameter must be provided") + if (None, None) == (digest_bytes, digest_bits): + digest_bytes = 64 + if digest_bytes is not None: + if digest_bytes < 8: + raise ValueError("'digest_bytes' must be at least 8") + else: + if digest_bits < 64 or digest_bits % 8: + raise ValueError("'digest_bytes' must be at least 64 " + "in steps of 8") + digest_bytes = digest_bits // 8 + + custom = kwargs.pop("custom", b'') + + return TupleHash(custom, cSHAKE256, digest_bytes) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/TupleHash256.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/TupleHash256.pyi new file mode 100644 index 000000000..82d943f06 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/TupleHash256.pyi @@ -0,0 +1,5 @@ +from .TupleHash128 import TupleHash + +def new(digest_bytes: int = ..., + digest_bits: int = ..., + custom: int = ...) -> TupleHash: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/TurboSHAKE128.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/TurboSHAKE128.py new file mode 100644 index 000000000..ab3e4e497 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/TurboSHAKE128.py @@ -0,0 +1,112 @@ +from Crypto.Util._raw_api import (VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr, c_ubyte) + +from Crypto.Util.number import long_to_bytes +from Crypto.Util.py3compat import bchr + +from .keccak import _raw_keccak_lib + + +class TurboSHAKE(object): + """A TurboSHAKE hash object. + Do not instantiate directly. + Use the :func:`new` function. + """ + + def __init__(self, capacity, domain_separation, data): + + state = VoidPointer() + result = _raw_keccak_lib.keccak_init(state.address_of(), + c_size_t(capacity), + c_ubyte(12)) # Reduced number of rounds + if result: + raise ValueError("Error %d while instantiating TurboSHAKE" + % result) + self._state = SmartPointer(state.get(), _raw_keccak_lib.keccak_destroy) + + self._is_squeezing = False + self._capacity = capacity + self._domain = domain_separation + + if data: + self.update(data) + + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + if self._is_squeezing: + raise TypeError("You cannot call 'update' after the first 'read'") + + result = _raw_keccak_lib.keccak_absorb(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while updating TurboSHAKE state" + % result) + return self + + def read(self, length): + """ + Compute the next piece of XOF output. + + .. note:: + You cannot use :meth:`update` anymore after the first call to + :meth:`read`. + + Args: + length (integer): the amount of bytes this method must return + + :return: the next piece of XOF output (of the given length) + :rtype: byte string + """ + + self._is_squeezing = True + bfr = create_string_buffer(length) + result = _raw_keccak_lib.keccak_squeeze(self._state.get(), + bfr, + c_size_t(length), + c_ubyte(self._domain)) + if result: + raise ValueError("Error %d while extracting from TurboSHAKE" + % result) + + return get_raw_buffer(bfr) + + def new(self, data=None): + return type(self)(self._capacity, self._domain, data) + + def _reset(self): + result = _raw_keccak_lib.keccak_reset(self._state.get()) + if result: + raise ValueError("Error %d while resetting TurboSHAKE state" + % result) + self._is_squeezing = False + + +def new(**kwargs): + """Create a new TurboSHAKE128 object. + + Args: + domain (integer): + Optional - A domain separation byte, between 0x01 and 0x7F. + The default value is 0x1F. + data (bytes/bytearray/memoryview): + Optional - The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`update`. + + :Return: A :class:`TurboSHAKE` object + """ + + domain_separation = kwargs.get('domain', 0x1F) + if not (0x01 <= domain_separation <= 0x7F): + raise ValueError("Incorrect domain separation value (%d)" % + domain_separation) + data = kwargs.get('data') + return TurboSHAKE(32, domain_separation, data=data) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/TurboSHAKE128.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/TurboSHAKE128.pyi new file mode 100644 index 000000000..d74c9c08c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/TurboSHAKE128.pyi @@ -0,0 +1,17 @@ +from typing import Union, Optional +from typing_extensions import TypedDict, Unpack, NotRequired + +Buffer = Union[bytes, bytearray, memoryview] + +class TurboSHAKE(object): + + def __init__(self, capacity: int, domain_separation: int, data: Union[Buffer, None]) -> None: ... + def update(self, data: Buffer) -> TurboSHAKE : ... + def read(self, length: int) -> bytes: ... + def new(self, data: Optional[Buffer]=None) -> TurboSHAKE: ... + +class Args(TypedDict): + domain: NotRequired[int] + data: NotRequired[Buffer] + +def new(**kwargs: Unpack[Args]) -> TurboSHAKE: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/TurboSHAKE256.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/TurboSHAKE256.py new file mode 100644 index 000000000..ce27a485b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/TurboSHAKE256.py @@ -0,0 +1,22 @@ +from .TurboSHAKE128 import TurboSHAKE + +def new(**kwargs): + """Create a new TurboSHAKE256 object. + + Args: + domain (integer): + Optional - A domain separation byte, between 0x01 and 0x7F. + The default value is 0x1F. + data (bytes/bytearray/memoryview): + Optional - The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`update`. + + :Return: A :class:`TurboSHAKE` object + """ + + domain_separation = kwargs.get('domain', 0x1F) + if not (0x01 <= domain_separation <= 0x7F): + raise ValueError("Incorrect domain separation value (%d)" % + domain_separation) + data = kwargs.get('data') + return TurboSHAKE(64, domain_separation, data=data) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/TurboSHAKE256.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/TurboSHAKE256.pyi new file mode 100644 index 000000000..561e946cd --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/TurboSHAKE256.pyi @@ -0,0 +1,12 @@ +from typing import Union +from typing_extensions import TypedDict, Unpack, NotRequired + +from .TurboSHAKE128 import TurboSHAKE + +Buffer = Union[bytes, bytearray, memoryview] + +class Args(TypedDict): + domain: NotRequired[int] + data: NotRequired[Buffer] + +def new(**kwargs: Unpack[Args]) -> TurboSHAKE: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_BLAKE2b.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_BLAKE2b.abi3.so new file mode 100755 index 000000000..40cf664bb Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_BLAKE2b.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_BLAKE2s.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_BLAKE2s.abi3.so new file mode 100755 index 000000000..04a1ace0a Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_BLAKE2s.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_MD2.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_MD2.abi3.so new file mode 100755 index 000000000..1573ca3db Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_MD2.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_MD4.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_MD4.abi3.so new file mode 100755 index 000000000..8f41e31d7 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_MD4.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_MD5.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_MD5.abi3.so new file mode 100755 index 000000000..b22cf365e Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_MD5.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_RIPEMD160.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_RIPEMD160.abi3.so new file mode 100755 index 000000000..78faa00b2 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_RIPEMD160.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_SHA1.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_SHA1.abi3.so new file mode 100755 index 000000000..acd08adca Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_SHA1.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_SHA224.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_SHA224.abi3.so new file mode 100755 index 000000000..9cf3ef6de Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_SHA224.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_SHA256.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_SHA256.abi3.so new file mode 100755 index 000000000..6dffb1789 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_SHA256.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_SHA384.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_SHA384.abi3.so new file mode 100755 index 000000000..7c72fd09c Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_SHA384.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_SHA512.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_SHA512.abi3.so new file mode 100755 index 000000000..058653c46 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_SHA512.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/__init__.py new file mode 100644 index 000000000..80446e4f2 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/__init__.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__all__ = ['HMAC', 'MD2', 'MD4', 'MD5', 'RIPEMD160', 'SHA1', + 'SHA224', 'SHA256', 'SHA384', 'SHA512', + 'SHA3_224', 'SHA3_256', 'SHA3_384', 'SHA3_512', + 'CMAC', 'Poly1305', + 'cSHAKE128', 'cSHAKE256', 'KMAC128', 'KMAC256', + 'TupleHash128', 'TupleHash256', 'KangarooTwelve', + 'TurboSHAKE128', 'TurboSHAKE256'] + +def new(name): + """Return a new hash instance, based on its name or + on its ASN.1 Object ID""" + + name = name.upper() + if name in ("1.3.14.3.2.26", "SHA1", "SHA-1"): + from . import SHA1 + return SHA1.new() + if name in ("2.16.840.1.101.3.4.2.4", "SHA224", "SHA-224"): + from . import SHA224 + return SHA224.new() + if name in ("2.16.840.1.101.3.4.2.1", "SHA256", "SHA-256"): + from . import SHA256 + return SHA256.new() + if name in ("2.16.840.1.101.3.4.2.2", "SHA384", "SHA-384"): + from . import SHA384 + return SHA384.new() + if name in ("2.16.840.1.101.3.4.2.3", "SHA512", "SHA-512"): + from . import SHA512 + return SHA512.new() + if name in ("2.16.840.1.101.3.4.2.5", "SHA512-224", "SHA-512-224"): + from . import SHA512 + return SHA512.new(truncate='224') + if name in ("2.16.840.1.101.3.4.2.6", "SHA512-256", "SHA-512-256"): + from . import SHA512 + return SHA512.new(truncate='256') + if name in ("2.16.840.1.101.3.4.2.7", "SHA3-224", "SHA-3-224"): + from . import SHA3_224 + return SHA3_224.new() + if name in ("2.16.840.1.101.3.4.2.8", "SHA3-256", "SHA-3-256"): + from . import SHA3_256 + return SHA3_256.new() + if name in ("2.16.840.1.101.3.4.2.9", "SHA3-384", "SHA-3-384"): + from . import SHA3_384 + return SHA3_384.new() + if name in ("2.16.840.1.101.3.4.2.10", "SHA3-512", "SHA-3-512"): + from . import SHA3_512 + return SHA3_512.new() + else: + raise ValueError("Unknown hash %s" % str(name)) + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/__init__.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/__init__.pyi new file mode 100644 index 000000000..36ad48d15 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/__init__.pyi @@ -0,0 +1,57 @@ +from typing import overload +from typing_extensions import Literal + +from Crypto.Hash.SHA1 import SHA1Hash +from Crypto.Hash.SHA224 import SHA224Hash +from Crypto.Hash.SHA256 import SHA256Hash +from Crypto.Hash.SHA384 import SHA384Hash +from Crypto.Hash.SHA512 import SHA512Hash +from Crypto.Hash.SHA3_224 import SHA3_224_Hash +from Crypto.Hash.SHA3_256 import SHA3_256_Hash +from Crypto.Hash.SHA3_384 import SHA3_384_Hash +from Crypto.Hash.SHA3_512 import SHA3_512_Hash + +@overload +def new(name: Literal["1.3.14.3.2.26"]) -> SHA1Hash: ... +@overload +def new(name: Literal["SHA1"]) -> SHA1Hash: ... +@overload +def new(name: Literal["2.16.840.1.101.3.4.2.4"]) -> SHA224Hash: ... +@overload +def new(name: Literal["SHA224"]) -> SHA224Hash: ... +@overload +def new(name: Literal["2.16.840.1.101.3.4.2.1"]) -> SHA256Hash: ... +@overload +def new(name: Literal["SHA256"]) -> SHA256Hash: ... +@overload +def new(name: Literal["2.16.840.1.101.3.4.2.2"]) -> SHA384Hash: ... +@overload +def new(name: Literal["SHA384"]) -> SHA384Hash: ... +@overload +def new(name: Literal["2.16.840.1.101.3.4.2.3"]) -> SHA512Hash: ... +@overload +def new(name: Literal["SHA512"]) -> SHA512Hash: ... +@overload +def new(name: Literal["2.16.840.1.101.3.4.2.5"]) -> SHA512Hash: ... +@overload +def new(name: Literal["SHA512-224"]) -> SHA512Hash: ... +@overload +def new(name: Literal["2.16.840.1.101.3.4.2.6"]) -> SHA512Hash: ... +@overload +def new(name: Literal["SHA512-256"]) -> SHA512Hash: ... +@overload +def new(name: Literal["2.16.840.1.101.3.4.2.7"]) -> SHA3_224_Hash: ... +@overload +def new(name: Literal["SHA3-224"]) -> SHA3_224_Hash: ... +@overload +def new(name: Literal["2.16.840.1.101.3.4.2.8"]) -> SHA3_256_Hash: ... +@overload +def new(name: Literal["SHA3-256"]) -> SHA3_256_Hash: ... +@overload +def new(name: Literal["2.16.840.1.101.3.4.2.9"]) -> SHA3_384_Hash: ... +@overload +def new(name: Literal["SHA3-384"]) -> SHA3_384_Hash: ... +@overload +def new(name: Literal["2.16.840.1.101.3.4.2.10"]) -> SHA3_512_Hash: ... +@overload +def new(name: Literal["SHA3-512"]) -> SHA3_512_Hash: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_ghash_clmul.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_ghash_clmul.abi3.so new file mode 100755 index 000000000..d13832ca9 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_ghash_clmul.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_ghash_portable.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_ghash_portable.abi3.so new file mode 100755 index 000000000..555c6fccf Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_ghash_portable.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_keccak.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_keccak.abi3.so new file mode 100755 index 000000000..4e494b0f7 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_keccak.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_poly1305.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_poly1305.abi3.so new file mode 100755 index 000000000..901b8c2c5 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/_poly1305.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/cSHAKE128.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/cSHAKE128.py new file mode 100644 index 000000000..3907975fc --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/cSHAKE128.py @@ -0,0 +1,187 @@ +# =================================================================== +# +# Copyright (c) 2021, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util.py3compat import bchr, concat_buffers + +from Crypto.Util._raw_api import (VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr, c_ubyte) + +from Crypto.Util.number import long_to_bytes + +from Crypto.Hash.keccak import _raw_keccak_lib + + +def _left_encode(x): + """Left encode function as defined in NIST SP 800-185""" + + assert (x < (1 << 2040) and x >= 0) + + # Get number of bytes needed to represent this integer. + num = 1 if x == 0 else (x.bit_length() + 7) // 8 + + return bchr(num) + long_to_bytes(x) + + +def _right_encode(x): + """Right encode function as defined in NIST SP 800-185""" + + assert (x < (1 << 2040) and x >= 0) + + # Get number of bytes needed to represent this integer. + num = 1 if x == 0 else (x.bit_length() + 7) // 8 + + return long_to_bytes(x) + bchr(num) + + +def _encode_str(x): + """Encode string function as defined in NIST SP 800-185""" + + bitlen = len(x) * 8 + if bitlen >= (1 << 2040): + raise ValueError("String too large to encode in cSHAKE") + + return concat_buffers(_left_encode(bitlen), x) + + +def _bytepad(x, length): + """Zero pad byte string as defined in NIST SP 800-185""" + + to_pad = concat_buffers(_left_encode(length), x) + + # Note: this implementation works with byte aligned strings, + # hence no additional bit padding is needed at this point. + npad = (length - len(to_pad) % length) % length + + return to_pad + b'\x00' * npad + + +class cSHAKE_XOF(object): + """A cSHAKE hash object. + Do not instantiate directly. + Use the :func:`new` function. + """ + + def __init__(self, data, custom, capacity, function): + state = VoidPointer() + + if custom or function: + prefix_unpad = _encode_str(function) + _encode_str(custom) + prefix = _bytepad(prefix_unpad, (1600 - capacity)//8) + self._padding = 0x04 + else: + prefix = None + self._padding = 0x1F # for SHAKE + + result = _raw_keccak_lib.keccak_init(state.address_of(), + c_size_t(capacity//8), + c_ubyte(24)) + if result: + raise ValueError("Error %d while instantiating cSHAKE" + % result) + self._state = SmartPointer(state.get(), + _raw_keccak_lib.keccak_destroy) + self._is_squeezing = False + + if prefix: + self.update(prefix) + + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + if self._is_squeezing: + raise TypeError("You cannot call 'update' after the first 'read'") + + result = _raw_keccak_lib.keccak_absorb(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while updating %s state" + % (result, self.name)) + return self + + def read(self, length): + """ + Compute the next piece of XOF output. + + .. note:: + You cannot use :meth:`update` anymore after the first call to + :meth:`read`. + + Args: + length (integer): the amount of bytes this method must return + + :return: the next piece of XOF output (of the given length) + :rtype: byte string + """ + + self._is_squeezing = True + bfr = create_string_buffer(length) + result = _raw_keccak_lib.keccak_squeeze(self._state.get(), + bfr, + c_size_t(length), + c_ubyte(self._padding)) + if result: + raise ValueError("Error %d while extracting from %s" + % (result, self.name)) + + return get_raw_buffer(bfr) + + +def _new(data, custom, function): + # Use Keccak[256] + return cSHAKE_XOF(data, custom, 256, function) + + +def new(data=None, custom=None): + """Return a fresh instance of a cSHAKE128 object. + + Args: + data (bytes/bytearray/memoryview): + Optional. + The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`update`. + custom (bytes): + Optional. + A customization bytestring (``S`` in SP 800-185). + + :Return: A :class:`cSHAKE_XOF` object + """ + + # Use Keccak[256] + return cSHAKE_XOF(data, custom, 256, b'') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/cSHAKE128.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/cSHAKE128.pyi new file mode 100644 index 000000000..1452feafb --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/cSHAKE128.pyi @@ -0,0 +1,14 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +class cSHAKE_XOF(object): + def __init__(self, + data: Optional[Buffer] = ..., + function: Optional[bytes] = ..., + custom: Optional[bytes] = ...) -> None: ... + def update(self, data: Buffer) -> cSHAKE_XOF: ... + def read(self, length: int) -> bytes: ... + +def new(data: Optional[Buffer] = ..., + custom: Optional[Buffer] = ...) -> cSHAKE_XOF: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/cSHAKE256.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/cSHAKE256.py new file mode 100644 index 000000000..b3b31d6de --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/cSHAKE256.py @@ -0,0 +1,56 @@ +# =================================================================== +# +# Copyright (c) 2021, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util._raw_api import c_size_t +from Crypto.Hash.cSHAKE128 import cSHAKE_XOF + + +def _new(data, custom, function): + # Use Keccak[512] + return cSHAKE_XOF(data, custom, 512, function) + + +def new(data=None, custom=None): + """Return a fresh instance of a cSHAKE256 object. + + Args: + data (bytes/bytearray/memoryview): + The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`update`. + Optional. + custom (bytes): + Optional. + A customization bytestring (``S`` in SP 800-185). + + :Return: A :class:`cSHAKE_XOF` object + """ + + # Use Keccak[512] + return cSHAKE_XOF(data, custom, 512, b'') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/cSHAKE256.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/cSHAKE256.pyi new file mode 100644 index 000000000..205b816a8 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/cSHAKE256.pyi @@ -0,0 +1,8 @@ +from typing import Union, Optional + +from Crypto.Hash.cSHAKE128 import cSHAKE_XOF + +Buffer = Union[bytes, bytearray, memoryview] + +def new(data: Optional[Buffer] = ..., + custom: Optional[Buffer] = ...) -> cSHAKE_XOF: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/keccak.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/keccak.py new file mode 100644 index 000000000..f3f8bb5ab --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/keccak.py @@ -0,0 +1,181 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util.py3compat import bord + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + VoidPointer, SmartPointer, + create_string_buffer, + get_raw_buffer, c_size_t, + c_uint8_ptr, c_ubyte) + +_raw_keccak_lib = load_pycryptodome_raw_lib("Crypto.Hash._keccak", + """ + int keccak_init(void **state, + size_t capacity_bytes, + uint8_t rounds); + int keccak_destroy(void *state); + int keccak_absorb(void *state, + const uint8_t *in, + size_t len); + int keccak_squeeze(const void *state, + uint8_t *out, + size_t len, + uint8_t padding); + int keccak_digest(void *state, + uint8_t *digest, + size_t len, + uint8_t padding); + int keccak_copy(const void *src, void *dst); + int keccak_reset(void *state); + """) + +class Keccak_Hash(object): + """A Keccak hash object. + Do not instantiate directly. + Use the :func:`new` function. + + :ivar digest_size: the size in bytes of the resulting hash + :vartype digest_size: integer + """ + + def __init__(self, data, digest_bytes, update_after_digest): + # The size of the resulting hash in bytes. + self.digest_size = digest_bytes + + self._update_after_digest = update_after_digest + self._digest_done = False + self._padding = 0x01 + + state = VoidPointer() + result = _raw_keccak_lib.keccak_init(state.address_of(), + c_size_t(self.digest_size * 2), + c_ubyte(24)) + if result: + raise ValueError("Error %d while instantiating keccak" % result) + self._state = SmartPointer(state.get(), + _raw_keccak_lib.keccak_destroy) + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Args: + data (byte string/byte array/memoryview): The next chunk of the message being hashed. + """ + + if self._digest_done and not self._update_after_digest: + raise TypeError("You can only call 'digest' or 'hexdigest' on this object") + + result = _raw_keccak_lib.keccak_absorb(self._state.get(), + c_uint8_ptr(data), + c_size_t(len(data))) + if result: + raise ValueError("Error %d while updating keccak" % result) + return self + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Binary form. + :rtype: byte string + """ + + self._digest_done = True + bfr = create_string_buffer(self.digest_size) + result = _raw_keccak_lib.keccak_digest(self._state.get(), + bfr, + c_size_t(self.digest_size), + c_ubyte(self._padding)) + if result: + raise ValueError("Error %d while squeezing keccak" % result) + + return get_raw_buffer(bfr) + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + :return: The hash digest, computed over the data processed so far. + Hexadecimal encoded. + :rtype: string + """ + + return "".join(["%02x" % bord(x) for x in self.digest()]) + + def new(self, **kwargs): + """Create a fresh Keccak hash object.""" + + if "digest_bytes" not in kwargs and "digest_bits" not in kwargs: + kwargs["digest_bytes"] = self.digest_size + + return new(**kwargs) + + +def new(**kwargs): + """Create a new hash object. + + Args: + data (bytes/bytearray/memoryview): + The very first chunk of the message to hash. + It is equivalent to an early call to :meth:`Keccak_Hash.update`. + digest_bytes (integer): + The size of the digest, in bytes (28, 32, 48, 64). + digest_bits (integer): + The size of the digest, in bits (224, 256, 384, 512). + update_after_digest (boolean): + Whether :meth:`Keccak.digest` can be followed by another + :meth:`Keccak.update` (default: ``False``). + + :Return: A :class:`Keccak_Hash` hash object + """ + + data = kwargs.pop("data", None) + update_after_digest = kwargs.pop("update_after_digest", False) + + digest_bytes = kwargs.pop("digest_bytes", None) + digest_bits = kwargs.pop("digest_bits", None) + if None not in (digest_bytes, digest_bits): + raise TypeError("Only one digest parameter must be provided") + if (None, None) == (digest_bytes, digest_bits): + raise TypeError("Digest size (bits, bytes) not provided") + if digest_bytes is not None: + if digest_bytes not in (28, 32, 48, 64): + raise ValueError("'digest_bytes' must be: 28, 32, 48 or 64") + else: + if digest_bits not in (224, 256, 384, 512): + raise ValueError("'digest_bytes' must be: 224, 256, 384 or 512") + digest_bytes = digest_bits // 8 + + if kwargs: + raise TypeError("Unknown parameters: " + str(kwargs)) + + return Keccak_Hash(data, digest_bytes, update_after_digest) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/keccak.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/keccak.pyi new file mode 100644 index 000000000..844d2565e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Hash/keccak.pyi @@ -0,0 +1,23 @@ +from typing import Union, Any + +Buffer = Union[bytes, bytearray, memoryview] + +class Keccak_Hash(object): + digest_size: int + def __init__(self, + data: Buffer, + digest_bytes: int, + update_after_digest: bool) -> None: ... + def update(self, data: Buffer) -> Keccak_Hash: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def new(self, + data: Buffer = ..., + digest_bytes: int = ..., + digest_bits: int = ..., + update_after_digest: bool = ...) -> Keccak_Hash: ... + +def new(data: Buffer = ..., + digest_bytes: int = ..., + digest_bits: int = ..., + update_after_digest: bool = ...) -> Keccak_Hash: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/IO/PEM.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/IO/PEM.py new file mode 100644 index 000000000..4c07b2574 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/IO/PEM.py @@ -0,0 +1,189 @@ +# +# Util/PEM.py : Privacy Enhanced Mail utilities +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +__all__ = ['encode', 'decode'] + +import re +from binascii import a2b_base64, b2a_base64, hexlify, unhexlify + +from Crypto.Hash import MD5 +from Crypto.Util.Padding import pad, unpad +from Crypto.Cipher import DES, DES3, AES +from Crypto.Protocol.KDF import PBKDF1 +from Crypto.Random import get_random_bytes +from Crypto.Util.py3compat import tobytes, tostr + + +def encode(data, marker, passphrase=None, randfunc=None): + """Encode a piece of binary data into PEM format. + + Args: + data (byte string): + The piece of binary data to encode. + marker (string): + The marker for the PEM block (e.g. "PUBLIC KEY"). + Note that there is no official master list for all allowed markers. + Still, you can refer to the OpenSSL_ source code. + passphrase (byte string): + If given, the PEM block will be encrypted. The key is derived from + the passphrase. + randfunc (callable): + Random number generation function; it accepts an integer N and returns + a byte string of random data, N bytes long. If not given, a new one is + instantiated. + + Returns: + The PEM block, as a string. + + .. _OpenSSL: https://github.com/openssl/openssl/blob/master/include/openssl/pem.h + """ + + if randfunc is None: + randfunc = get_random_bytes + + out = "-----BEGIN %s-----\n" % marker + if passphrase: + # We only support 3DES for encryption + salt = randfunc(8) + key = PBKDF1(passphrase, salt, 16, 1, MD5) + key += PBKDF1(key + passphrase, salt, 8, 1, MD5) + objenc = DES3.new(key, DES3.MODE_CBC, salt) + out += "Proc-Type: 4,ENCRYPTED\nDEK-Info: DES-EDE3-CBC,%s\n\n" %\ + tostr(hexlify(salt).upper()) + # Encrypt with PKCS#7 padding + data = objenc.encrypt(pad(data, objenc.block_size)) + elif passphrase is not None: + raise ValueError("Empty password") + + # Each BASE64 line can take up to 64 characters (=48 bytes of data) + # b2a_base64 adds a new line character! + chunks = [tostr(b2a_base64(data[i:i + 48])) + for i in range(0, len(data), 48)] + out += "".join(chunks) + out += "-----END %s-----" % marker + return out + + +def _EVP_BytesToKey(data, salt, key_len): + d = [ b'' ] + m = (key_len + 15 ) // 16 + for _ in range(m): + nd = MD5.new(d[-1] + data + salt).digest() + d.append(nd) + return b"".join(d)[:key_len] + + +def decode(pem_data, passphrase=None): + """Decode a PEM block into binary. + + Args: + pem_data (string): + The PEM block. + passphrase (byte string): + If given and the PEM block is encrypted, + the key will be derived from the passphrase. + + Returns: + A tuple with the binary data, the marker string, and a boolean to + indicate if decryption was performed. + + Raises: + ValueError: if decoding fails, if the PEM file is encrypted and no passphrase has + been provided or if the passphrase is incorrect. + """ + + # Verify Pre-Encapsulation Boundary + r = re.compile(r"\s*-----BEGIN (.*)-----\s+") + m = r.match(pem_data) + if not m: + raise ValueError("Not a valid PEM pre boundary") + marker = m.group(1) + + # Verify Post-Encapsulation Boundary + r = re.compile(r"-----END (.*)-----\s*$") + m = r.search(pem_data) + if not m or m.group(1) != marker: + raise ValueError("Not a valid PEM post boundary") + + # Removes spaces and slit on lines + lines = pem_data.replace(" ", '').split() + + # Decrypts, if necessary + if lines[1].startswith('Proc-Type:4,ENCRYPTED'): + if not passphrase: + raise ValueError("PEM is encrypted, but no passphrase available") + DEK = lines[2].split(':') + if len(DEK) != 2 or DEK[0] != 'DEK-Info': + raise ValueError("PEM encryption format not supported.") + algo, salt = DEK[1].split(',') + salt = unhexlify(tobytes(salt)) + + padding = True + + if algo == "DES-CBC": + key = _EVP_BytesToKey(passphrase, salt, 8) + objdec = DES.new(key, DES.MODE_CBC, salt) + elif algo == "DES-EDE3-CBC": + key = _EVP_BytesToKey(passphrase, salt, 24) + objdec = DES3.new(key, DES3.MODE_CBC, salt) + elif algo == "AES-128-CBC": + key = _EVP_BytesToKey(passphrase, salt[:8], 16) + objdec = AES.new(key, AES.MODE_CBC, salt) + elif algo == "AES-192-CBC": + key = _EVP_BytesToKey(passphrase, salt[:8], 24) + objdec = AES.new(key, AES.MODE_CBC, salt) + elif algo == "AES-256-CBC": + key = _EVP_BytesToKey(passphrase, salt[:8], 32) + objdec = AES.new(key, AES.MODE_CBC, salt) + elif algo.lower() == "id-aes256-gcm": + key = _EVP_BytesToKey(passphrase, salt[:8], 32) + objdec = AES.new(key, AES.MODE_GCM, nonce=salt) + padding = False + else: + raise ValueError("Unsupport PEM encryption algorithm (%s)." % algo) + lines = lines[2:] + else: + objdec = None + + # Decode body + data = a2b_base64(''.join(lines[1:-1])) + enc_flag = False + if objdec: + if padding: + data = unpad(objdec.decrypt(data), objdec.block_size) + else: + # There is no tag, so we don't use decrypt_and_verify + data = objdec.decrypt(data) + enc_flag = True + + return (data, marker, enc_flag) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/IO/PEM.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/IO/PEM.pyi new file mode 100644 index 000000000..2e324c450 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/IO/PEM.pyi @@ -0,0 +1,10 @@ +from typing import Tuple, Optional, Callable + +def encode(data: bytes, + marke: str, + passphrase: Optional[bytes] = ..., + randfunc: Optional[Callable[[int],bytes]] = ...) -> str: ... + + +def decode(pem_data: str, + passphrase: Optional[bytes] = ...) -> Tuple[bytes, str, bool]: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/IO/PKCS8.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/IO/PKCS8.py new file mode 100644 index 000000000..0747dfc1d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/IO/PKCS8.py @@ -0,0 +1,226 @@ +# +# PublicKey/PKCS8.py : PKCS#8 functions +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + + +from Crypto.Util.py3compat import * + +from Crypto.Util.asn1 import ( + DerNull, + DerSequence, + DerObjectId, + DerOctetString, + ) + +from Crypto.IO._PBES import PBES1, PBES2, PbesError + + +__all__ = ['wrap', 'unwrap'] + + +def wrap(private_key, key_oid, passphrase=None, protection=None, + prot_params=None, key_params=DerNull(), randfunc=None): + """Wrap a private key into a PKCS#8 blob (clear or encrypted). + + Args: + + private_key (bytes): + The private key encoded in binary form. The actual encoding is + algorithm specific. In most cases, it is DER. + + key_oid (string): + The object identifier (OID) of the private key to wrap. + It is a dotted string, like ``'1.2.840.113549.1.1.1'`` (for RSA keys) + or ``'1.2.840.10045.2.1'`` (for ECC keys). + + Keyword Args: + + passphrase (bytes or string): + The secret passphrase from which the wrapping key is derived. + Set it only if encryption is required. + + protection (string): + The identifier of the algorithm to use for securely wrapping the key. + Refer to :ref:`the encryption parameters` . + The default value is ``'PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC'``. + + prot_params (dictionary): + Parameters for the key derivation function (KDF). + Refer to :ref:`the encryption parameters` . + + key_params (DER object or None): + The ``parameters`` field to use in the ``AlgorithmIdentifier`` + SEQUENCE. If ``None``, no ``parameters`` field will be added. + By default, the ASN.1 type ``NULL`` is used. + + randfunc (callable): + Random number generation function; it should accept a single integer + N and return a string of random data, N bytes long. + If not specified, a new RNG will be instantiated + from :mod:`Crypto.Random`. + + Returns: + bytes: The PKCS#8-wrapped private key (possibly encrypted). + """ + + # + # PrivateKeyInfo ::= SEQUENCE { + # version Version, + # privateKeyAlgorithm PrivateKeyAlgorithmIdentifier, + # privateKey PrivateKey, + # attributes [0] IMPLICIT Attributes OPTIONAL + # } + # + if key_params is None: + algorithm = DerSequence([DerObjectId(key_oid)]) + else: + algorithm = DerSequence([DerObjectId(key_oid), key_params]) + + pk_info = DerSequence([ + 0, + algorithm, + DerOctetString(private_key) + ]) + pk_info_der = pk_info.encode() + + if passphrase is None: + return pk_info_der + + if not passphrase: + raise ValueError("Empty passphrase") + + # Encryption with PBES2 + passphrase = tobytes(passphrase) + if protection is None: + protection = 'PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC' + return PBES2.encrypt(pk_info_der, passphrase, + protection, prot_params, randfunc) + + +def unwrap(p8_private_key, passphrase=None): + """Unwrap a private key from a PKCS#8 blob (clear or encrypted). + + Args: + p8_private_key (bytes): + The private key wrapped into a PKCS#8 container, DER encoded. + + Keyword Args: + passphrase (byte string or string): + The passphrase to use to decrypt the blob (if it is encrypted). + + Return: + A tuple containing + + #. the algorithm identifier of the wrapped key (OID, dotted string) + #. the private key (bytes, DER encoded) + #. the associated parameters (bytes, DER encoded) or ``None`` + + Raises: + ValueError : if decoding fails + """ + + if passphrase: + passphrase = tobytes(passphrase) + + found = False + try: + p8_private_key = PBES1.decrypt(p8_private_key, passphrase) + found = True + except PbesError as e: + error_str = "PBES1[%s]" % str(e) + except ValueError: + error_str = "PBES1[Invalid]" + + if not found: + try: + p8_private_key = PBES2.decrypt(p8_private_key, passphrase) + found = True + except PbesError as e: + error_str += ",PBES2[%s]" % str(e) + except ValueError: + error_str += ",PBES2[Invalid]" + + if not found: + raise ValueError("Error decoding PKCS#8 (%s)" % error_str) + + pk_info = DerSequence().decode(p8_private_key, nr_elements=(2, 3, 4, 5)) + if len(pk_info) == 2 and not passphrase: + raise ValueError("Not a valid clear PKCS#8 structure " + "(maybe it is encrypted?)") + + # RFC5208, PKCS#8, version is v1(0) + # + # PrivateKeyInfo ::= SEQUENCE { + # version Version, + # privateKeyAlgorithm PrivateKeyAlgorithmIdentifier, + # privateKey PrivateKey, + # attributes [0] IMPLICIT Attributes OPTIONAL + # } + # + # RFC5915, Asymmetric Key Package, version is v2(1) + # + # OneAsymmetricKey ::= SEQUENCE { + # version Version, + # privateKeyAlgorithm PrivateKeyAlgorithmIdentifier, + # privateKey PrivateKey, + # attributes [0] Attributes OPTIONAL, + # ..., + # [[2: publicKey [1] PublicKey OPTIONAL ]], + # ... + # } + + if pk_info[0] == 0: + if len(pk_info) not in (3, 4): + raise ValueError("Not a valid PrivateKeyInfo SEQUENCE") + elif pk_info[0] == 1: + if len(pk_info) not in (3, 4, 5): + raise ValueError("Not a valid PrivateKeyInfo SEQUENCE") + else: + raise ValueError("Not a valid PrivateKeyInfo SEQUENCE") + + algo = DerSequence().decode(pk_info[1], nr_elements=(1, 2)) + algo_oid = DerObjectId().decode(algo[0]).value + if len(algo) == 1: + algo_params = None + else: + try: + DerNull().decode(algo[1]) + algo_params = None + except: + algo_params = algo[1] + + # PrivateKey ::= OCTET STRING + private_key = DerOctetString().decode(pk_info[2]).payload + + # We ignore attributes and (for v2 only) publickey + + return (algo_oid, private_key, algo_params) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/IO/PKCS8.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/IO/PKCS8.pyi new file mode 100644 index 000000000..cf6aa8ae3 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/IO/PKCS8.pyi @@ -0,0 +1,17 @@ +from typing import Tuple, Optional, Union, Callable +from typing_extensions import NotRequired + +from Crypto.Util.asn1 import DerObject +from Crypto.IO._PBES import ProtParams + + +def wrap(private_key: bytes, + key_oid: str, + passphrase: Union[bytes, str] = ..., + protection: str = ..., + prot_params: Optional[ProtParams] = ..., + key_params: Optional[DerObject] = ..., + randfunc: Optional[Callable[[int], str]] = ...) -> bytes: ... + + +def unwrap(p8_private_key: bytes, passphrase: Optional[Union[bytes, str]] = ...) -> Tuple[str, bytes, Optional[bytes]]: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/IO/_PBES.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/IO/_PBES.py new file mode 100644 index 000000000..93d39446f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/IO/_PBES.py @@ -0,0 +1,546 @@ +# +# PublicKey/_PBES.py : Password-Based Encryption functions +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import re + +from Crypto import Hash +from Crypto import Random +from Crypto.Util.asn1 import ( + DerSequence, DerOctetString, + DerObjectId, DerInteger, + ) + +from Crypto.Cipher import AES +from Crypto.Util.Padding import pad, unpad +from Crypto.Protocol.KDF import PBKDF1, PBKDF2, scrypt + +_OID_PBE_WITH_MD5_AND_DES_CBC = "1.2.840.113549.1.5.3" +_OID_PBE_WITH_MD5_AND_RC2_CBC = "1.2.840.113549.1.5.6" +_OID_PBE_WITH_SHA1_AND_DES_CBC = "1.2.840.113549.1.5.10" +_OID_PBE_WITH_SHA1_AND_RC2_CBC = "1.2.840.113549.1.5.11" + +_OID_PBES2 = "1.2.840.113549.1.5.13" + +_OID_PBKDF2 = "1.2.840.113549.1.5.12" +_OID_SCRYPT = "1.3.6.1.4.1.11591.4.11" + +_OID_HMAC_SHA1 = "1.2.840.113549.2.7" + +_OID_DES_EDE3_CBC = "1.2.840.113549.3.7" +_OID_AES128_CBC = "2.16.840.1.101.3.4.1.2" +_OID_AES192_CBC = "2.16.840.1.101.3.4.1.22" +_OID_AES256_CBC = "2.16.840.1.101.3.4.1.42" +_OID_AES128_GCM = "2.16.840.1.101.3.4.1.6" +_OID_AES192_GCM = "2.16.840.1.101.3.4.1.26" +_OID_AES256_GCM = "2.16.840.1.101.3.4.1.46" + +class PbesError(ValueError): + pass + +# These are the ASN.1 definitions used by the PBES1/2 logic: +# +# EncryptedPrivateKeyInfo ::= SEQUENCE { +# encryptionAlgorithm EncryptionAlgorithmIdentifier, +# encryptedData EncryptedData +# } +# +# EncryptionAlgorithmIdentifier ::= AlgorithmIdentifier +# +# EncryptedData ::= OCTET STRING +# +# AlgorithmIdentifier ::= SEQUENCE { +# algorithm OBJECT IDENTIFIER, +# parameters ANY DEFINED BY algorithm OPTIONAL +# } +# +# PBEParameter ::= SEQUENCE { +# salt OCTET STRING (SIZE(8)), +# iterationCount INTEGER +# } +# +# PBES2-params ::= SEQUENCE { +# keyDerivationFunc AlgorithmIdentifier {{PBES2-KDFs}}, +# encryptionScheme AlgorithmIdentifier {{PBES2-Encs}} +# } +# +# PBKDF2-params ::= SEQUENCE { +# salt CHOICE { +# specified OCTET STRING, +# otherSource AlgorithmIdentifier {{PBKDF2-SaltSources}} +# }, +# iterationCount INTEGER (1..MAX), +# keyLength INTEGER (1..MAX) OPTIONAL, +# prf AlgorithmIdentifier {{PBKDF2-PRFs}} DEFAULT algid-hmacWithSHA1 +# } +# +# PBKDF2-PRFs ALGORITHM-IDENTIFIER ::= { +# {NULL IDENTIFIED BY id-hmacWithSHA1}, +# {NULL IDENTIFIED BY id-hmacWithSHA224}, +# {NULL IDENTIFIED BY id-hmacWithSHA256}, +# {NULL IDENTIFIED BY id-hmacWithSHA384}, +# {NULL IDENTIFIED BY id-hmacWithSHA512}, +# {NULL IDENTIFIED BY id-hmacWithSHA512-224}, +# {NULL IDENTIFIED BY id-hmacWithSHA512-256}, +# ... +# } +# scrypt-params ::= SEQUENCE { +# salt OCTET STRING, +# costParameter INTEGER (1..MAX), +# blockSize INTEGER (1..MAX), +# parallelizationParameter INTEGER (1..MAX), +# keyLength INTEGER (1..MAX) OPTIONAL +# } + + +class PBES1(object): + """Deprecated encryption scheme with password-based key derivation + (originally defined in PKCS#5 v1.5, but still present in `v2.0`__). + + .. __: http://www.ietf.org/rfc/rfc2898.txt + """ + + @staticmethod + def decrypt(data, passphrase): + """Decrypt a piece of data using a passphrase and *PBES1*. + + The algorithm to use is automatically detected. + + :Parameters: + data : byte string + The piece of data to decrypt. + passphrase : byte string + The passphrase to use for decrypting the data. + :Returns: + The decrypted data, as a binary string. + """ + + enc_private_key_info = DerSequence().decode(data) + encrypted_algorithm = DerSequence().decode(enc_private_key_info[0]) + encrypted_data = DerOctetString().decode(enc_private_key_info[1]).payload + + pbe_oid = DerObjectId().decode(encrypted_algorithm[0]).value + cipher_params = {} + if pbe_oid == _OID_PBE_WITH_MD5_AND_DES_CBC: + # PBE_MD5_DES_CBC + from Crypto.Hash import MD5 + from Crypto.Cipher import DES + hashmod = MD5 + module = DES + elif pbe_oid == _OID_PBE_WITH_MD5_AND_RC2_CBC: + # PBE_MD5_RC2_CBC + from Crypto.Hash import MD5 + from Crypto.Cipher import ARC2 + hashmod = MD5 + module = ARC2 + cipher_params['effective_keylen'] = 64 + elif pbe_oid == _OID_PBE_WITH_SHA1_AND_DES_CBC: + # PBE_SHA1_DES_CBC + from Crypto.Hash import SHA1 + from Crypto.Cipher import DES + hashmod = SHA1 + module = DES + elif pbe_oid == _OID_PBE_WITH_SHA1_AND_RC2_CBC: + # PBE_SHA1_RC2_CBC + from Crypto.Hash import SHA1 + from Crypto.Cipher import ARC2 + hashmod = SHA1 + module = ARC2 + cipher_params['effective_keylen'] = 64 + else: + raise PbesError("Unknown OID for PBES1") + + pbe_params = DerSequence().decode(encrypted_algorithm[1], nr_elements=2) + salt = DerOctetString().decode(pbe_params[0]).payload + iterations = pbe_params[1] + + key_iv = PBKDF1(passphrase, salt, 16, iterations, hashmod) + key, iv = key_iv[:8], key_iv[8:] + + cipher = module.new(key, module.MODE_CBC, iv, **cipher_params) + pt = cipher.decrypt(encrypted_data) + return unpad(pt, cipher.block_size) + + +class PBES2(object): + """Encryption scheme with password-based key derivation + (defined in `PKCS#5 v2.0`__). + + .. __: http://www.ietf.org/rfc/rfc2898.txt.""" + + @staticmethod + def encrypt(data, passphrase, protection, prot_params=None, randfunc=None): + """Encrypt a piece of data using a passphrase and *PBES2*. + + :Parameters: + data : byte string + The piece of data to encrypt. + passphrase : byte string + The passphrase to use for encrypting the data. + protection : string + The identifier of the encryption algorithm to use. + The default value is '``PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC``'. + prot_params : dictionary + Parameters of the protection algorithm. + + +------------------+-----------------------------------------------+ + | Key | Description | + +==================+===============================================+ + | iteration_count | The KDF algorithm is repeated several times to| + | | slow down brute force attacks on passwords | + | | (called *N* or CPU/memory cost in scrypt). | + | | | + | | The default value for PBKDF2 is 1 000. | + | | The default value for scrypt is 16 384. | + +------------------+-----------------------------------------------+ + | salt_size | Salt is used to thwart dictionary and rainbow | + | | attacks on passwords. The default value is 8 | + | | bytes. | + +------------------+-----------------------------------------------+ + | block_size | *(scrypt only)* Memory-cost (r). The default | + | | value is 8. | + +------------------+-----------------------------------------------+ + | parallelization | *(scrypt only)* CPU-cost (p). The default | + | | value is 1. | + +------------------+-----------------------------------------------+ + + + randfunc : callable + Random number generation function; it should accept + a single integer N and return a string of random data, + N bytes long. If not specified, a new RNG will be + instantiated from ``Crypto.Random``. + + :Returns: + The encrypted data, as a binary string. + """ + + if prot_params is None: + prot_params = {} + + if randfunc is None: + randfunc = Random.new().read + + pattern = re.compile(r'^(PBKDF2WithHMAC-([0-9A-Z-]+)|scrypt)And([0-9A-Z-]+)$') + res = pattern.match(protection) + if res is None: + raise ValueError("Unknown protection %s" % protection) + + if protection.startswith("PBKDF"): + pbkdf = "pbkdf2" + pbkdf2_hmac_algo = res.group(2) + enc_algo = res.group(3) + else: + pbkdf = "scrypt" + enc_algo = res.group(3) + + aead = False + if enc_algo == 'DES-EDE3-CBC': + from Crypto.Cipher import DES3 + key_size = 24 + module = DES3 + cipher_mode = DES3.MODE_CBC + enc_oid = _OID_DES_EDE3_CBC + enc_param = {'iv': randfunc(8)} + elif enc_algo == 'AES128-CBC': + key_size = 16 + module = AES + cipher_mode = AES.MODE_CBC + enc_oid = _OID_AES128_CBC + enc_param = {'iv': randfunc(16)} + elif enc_algo == 'AES192-CBC': + key_size = 24 + module = AES + cipher_mode = AES.MODE_CBC + enc_oid = _OID_AES192_CBC + enc_param = {'iv': randfunc(16)} + elif enc_algo == 'AES256-CBC': + key_size = 32 + module = AES + cipher_mode = AES.MODE_CBC + enc_oid = _OID_AES256_CBC + enc_param = {'iv': randfunc(16)} + elif enc_algo == 'AES128-GCM': + key_size = 16 + module = AES + cipher_mode = AES.MODE_GCM + enc_oid = _OID_AES128_GCM + enc_param = {'nonce': randfunc(12)} + aead = True + elif enc_algo == 'AES192-GCM': + key_size = 24 + module = AES + cipher_mode = AES.MODE_GCM + enc_oid = _OID_AES192_GCM + enc_param = {'nonce': randfunc(12)} + aead = True + elif enc_algo == 'AES256-GCM': + key_size = 32 + module = AES + cipher_mode = AES.MODE_GCM + enc_oid = _OID_AES256_GCM + enc_param = {'nonce': randfunc(12)} + aead = True + else: + raise ValueError("Unknown encryption mode '%s'" % enc_algo) + + iv_nonce = list(enc_param.values())[0] + salt = randfunc(prot_params.get("salt_size", 8)) + + # Derive key from password + if pbkdf == 'pbkdf2': + + count = prot_params.get("iteration_count", 1000) + digestmod = Hash.new(pbkdf2_hmac_algo) + + key = PBKDF2(passphrase, + salt, + key_size, + count, + hmac_hash_module=digestmod) + + pbkdf2_params = DerSequence([ + DerOctetString(salt), + DerInteger(count) + ]) + + if pbkdf2_hmac_algo != 'SHA1': + try: + hmac_oid = Hash.HMAC.new(b'', digestmod=digestmod).oid + except KeyError: + raise ValueError("No OID for HMAC hash algorithm") + pbkdf2_params.append(DerSequence([DerObjectId(hmac_oid)])) + + kdf_info = DerSequence([ + DerObjectId(_OID_PBKDF2), # PBKDF2 + pbkdf2_params + ]) + + elif pbkdf == 'scrypt': + + count = prot_params.get("iteration_count", 16384) + scrypt_r = prot_params.get('block_size', 8) + scrypt_p = prot_params.get('parallelization', 1) + key = scrypt(passphrase, salt, key_size, + count, scrypt_r, scrypt_p) + kdf_info = DerSequence([ + DerObjectId(_OID_SCRYPT), # scrypt + DerSequence([ + DerOctetString(salt), + DerInteger(count), + DerInteger(scrypt_r), + DerInteger(scrypt_p) + ]) + ]) + + else: + raise ValueError("Unknown KDF " + res.group(1)) + + # Create cipher and use it + cipher = module.new(key, cipher_mode, **enc_param) + if aead: + ct, tag = cipher.encrypt_and_digest(data) + encrypted_data = ct + tag + else: + encrypted_data = cipher.encrypt(pad(data, cipher.block_size)) + enc_info = DerSequence([ + DerObjectId(enc_oid), + DerOctetString(iv_nonce) + ]) + + # Result + enc_private_key_info = DerSequence([ + # encryptionAlgorithm + DerSequence([ + DerObjectId(_OID_PBES2), + DerSequence([ + kdf_info, + enc_info + ]), + ]), + DerOctetString(encrypted_data) + ]) + return enc_private_key_info.encode() + + @staticmethod + def decrypt(data, passphrase): + """Decrypt a piece of data using a passphrase and *PBES2*. + + The algorithm to use is automatically detected. + + :Parameters: + data : byte string + The piece of data to decrypt. + passphrase : byte string + The passphrase to use for decrypting the data. + :Returns: + The decrypted data, as a binary string. + """ + + enc_private_key_info = DerSequence().decode(data, nr_elements=2) + enc_algo = DerSequence().decode(enc_private_key_info[0]) + encrypted_data = DerOctetString().decode(enc_private_key_info[1]).payload + + pbe_oid = DerObjectId().decode(enc_algo[0]).value + if pbe_oid != _OID_PBES2: + raise PbesError("Not a PBES2 object") + + pbes2_params = DerSequence().decode(enc_algo[1], nr_elements=2) + + # Key Derivation Function selection + kdf_info = DerSequence().decode(pbes2_params[0], nr_elements=2) + kdf_oid = DerObjectId().decode(kdf_info[0]).value + + kdf_key_length = None + + # We only support PBKDF2 or scrypt + if kdf_oid == _OID_PBKDF2: + + pbkdf2_params = DerSequence().decode(kdf_info[1], nr_elements=(2, 3, 4)) + salt = DerOctetString().decode(pbkdf2_params[0]).payload + iteration_count = pbkdf2_params[1] + + left = len(pbkdf2_params) - 2 + idx = 2 + + if left > 0: + try: + # Check if it's an INTEGER + kdf_key_length = pbkdf2_params[idx] - 0 + left -= 1 + idx += 1 + except TypeError: + # keyLength is not present + pass + + # Default is HMAC-SHA1 + pbkdf2_prf_oid = _OID_HMAC_SHA1 + if left > 0: + pbkdf2_prf_algo_id = DerSequence().decode(pbkdf2_params[idx]) + pbkdf2_prf_oid = DerObjectId().decode(pbkdf2_prf_algo_id[0]).value + + elif kdf_oid == _OID_SCRYPT: + + scrypt_params = DerSequence().decode(kdf_info[1], nr_elements=(4, 5)) + salt = DerOctetString().decode(scrypt_params[0]).payload + iteration_count, scrypt_r, scrypt_p = [scrypt_params[x] + for x in (1, 2, 3)] + if len(scrypt_params) > 4: + kdf_key_length = scrypt_params[4] + else: + kdf_key_length = None + else: + raise PbesError("Unsupported PBES2 KDF") + + # Cipher selection + enc_info = DerSequence().decode(pbes2_params[1]) + enc_oid = DerObjectId().decode(enc_info[0]).value + + aead = False + if enc_oid == _OID_DES_EDE3_CBC: + # DES_EDE3_CBC + from Crypto.Cipher import DES3 + module = DES3 + cipher_mode = DES3.MODE_CBC + key_size = 24 + cipher_param = 'iv' + elif enc_oid == _OID_AES128_CBC: + module = AES + cipher_mode = AES.MODE_CBC + key_size = 16 + cipher_param = 'iv' + elif enc_oid == _OID_AES192_CBC: + module = AES + cipher_mode = AES.MODE_CBC + key_size = 24 + cipher_param = 'iv' + elif enc_oid == _OID_AES256_CBC: + module = AES + cipher_mode = AES.MODE_CBC + key_size = 32 + cipher_param = 'iv' + elif enc_oid == _OID_AES128_GCM: + module = AES + cipher_mode = AES.MODE_GCM + key_size = 16 + cipher_param = 'nonce' + aead = True + elif enc_oid == _OID_AES192_GCM: + module = AES + cipher_mode = AES.MODE_GCM + key_size = 24 + cipher_param = 'nonce' + aead = True + elif enc_oid == _OID_AES256_GCM: + module = AES + cipher_mode = AES.MODE_GCM + key_size = 32 + cipher_param = 'nonce' + aead = True + else: + raise PbesError("Unsupported PBES2 cipher " + enc_algo) + + if kdf_key_length and kdf_key_length != key_size: + raise PbesError("Mismatch between PBES2 KDF parameters" + " and selected cipher") + + iv_nonce = DerOctetString().decode(enc_info[1]).payload + + # Create cipher + if kdf_oid == _OID_PBKDF2: + + try: + hmac_hash_module_oid = Hash.HMAC._hmac2hash_oid[pbkdf2_prf_oid] + except KeyError: + raise PbesError("Unsupported HMAC %s" % pbkdf2_prf_oid) + hmac_hash_module = Hash.new(hmac_hash_module_oid) + + key = PBKDF2(passphrase, salt, key_size, iteration_count, + hmac_hash_module=hmac_hash_module) + else: + key = scrypt(passphrase, salt, key_size, iteration_count, + scrypt_r, scrypt_p) + cipher = module.new(key, cipher_mode, **{cipher_param:iv_nonce}) + + # Decrypt data + if len(encrypted_data) < cipher.block_size: + raise ValueError("Too little data to decrypt") + + if aead: + tag_len = cipher.block_size + pt = cipher.decrypt_and_verify(encrypted_data[:-tag_len], + encrypted_data[-tag_len:]) + else: + pt_padded = cipher.decrypt(encrypted_data) + pt = unpad(pt_padded, cipher.block_size) + + return pt diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/IO/_PBES.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/IO/_PBES.pyi new file mode 100644 index 000000000..067336409 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/IO/_PBES.pyi @@ -0,0 +1,26 @@ +from typing import Optional, Callable, TypedDict +from typing_extensions import NotRequired + +class PbesError(ValueError): + ... + +class PBES1(object): + @staticmethod + def decrypt(data: bytes, passphrase: bytes) -> bytes: ... + +class ProtParams(TypedDict): + iteration_count: NotRequired[int] + salt_size: NotRequired[int] + block_size: NotRequired[int] + parallelization: NotRequired[int] + +class PBES2(object): + @staticmethod + def encrypt(data: bytes, + passphrase: bytes, + protection: str, + prot_params: Optional[ProtParams] = ..., + randfunc: Optional[Callable[[int],bytes]] = ...) -> bytes: ... + + @staticmethod + def decrypt(data:bytes, passphrase: bytes) -> bytes: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/IO/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/IO/__init__.py new file mode 100644 index 000000000..85a0d0b1f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/IO/__init__.py @@ -0,0 +1,31 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +__all__ = ['PEM', 'PKCS8'] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/Numbers.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/Numbers.py new file mode 100644 index 000000000..c2c4483d6 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/Numbers.py @@ -0,0 +1,42 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +__all__ = ["Integer"] + +try: + from Crypto.Math._IntegerGMP import IntegerGMP as Integer + from Crypto.Math._IntegerGMP import implementation as _implementation +except (ImportError, OSError, AttributeError): + try: + from Crypto.Math._IntegerCustom import IntegerCustom as Integer + from Crypto.Math._IntegerCustom import implementation as _implementation + except (ImportError, OSError): + from Crypto.Math._IntegerNative import IntegerNative as Integer + _implementation = {} diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/Numbers.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/Numbers.pyi new file mode 100644 index 000000000..24e234b88 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/Numbers.pyi @@ -0,0 +1,2 @@ +from Crypto.Math._IntegerBase import IntegerBase as Integer +__all__ = ['Integer'] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/Primality.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/Primality.py new file mode 100644 index 000000000..884c41855 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/Primality.py @@ -0,0 +1,369 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Functions to create and test prime numbers. + +:undocumented: __package__ +""" + +from Crypto import Random +from Crypto.Math.Numbers import Integer + +from Crypto.Util.py3compat import iter_range + +COMPOSITE = 0 +PROBABLY_PRIME = 1 + + +def miller_rabin_test(candidate, iterations, randfunc=None): + """Perform a Miller-Rabin primality test on an integer. + + The test is specified in Section C.3.1 of `FIPS PUB 186-4`__. + + :Parameters: + candidate : integer + The number to test for primality. + iterations : integer + The maximum number of iterations to perform before + declaring a candidate a probable prime. + randfunc : callable + An RNG function where bases are taken from. + + :Returns: + ``Primality.COMPOSITE`` or ``Primality.PROBABLY_PRIME``. + + .. __: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf + """ + + if not isinstance(candidate, Integer): + candidate = Integer(candidate) + + if candidate in (1, 2, 3, 5): + return PROBABLY_PRIME + + if candidate.is_even(): + return COMPOSITE + + one = Integer(1) + minus_one = Integer(candidate - 1) + + if randfunc is None: + randfunc = Random.new().read + + # Step 1 and 2 + m = Integer(minus_one) + a = 0 + while m.is_even(): + m >>= 1 + a += 1 + + # Skip step 3 + + # Step 4 + for i in iter_range(iterations): + + # Step 4.1-2 + base = 1 + while base in (one, minus_one): + base = Integer.random_range(min_inclusive=2, + max_inclusive=candidate - 2, + randfunc=randfunc) + assert(2 <= base <= candidate - 2) + + # Step 4.3-4.4 + z = pow(base, m, candidate) + if z in (one, minus_one): + continue + + # Step 4.5 + for j in iter_range(1, a): + z = pow(z, 2, candidate) + if z == minus_one: + break + if z == one: + return COMPOSITE + else: + return COMPOSITE + + # Step 5 + return PROBABLY_PRIME + + +def lucas_test(candidate): + """Perform a Lucas primality test on an integer. + + The test is specified in Section C.3.3 of `FIPS PUB 186-4`__. + + :Parameters: + candidate : integer + The number to test for primality. + + :Returns: + ``Primality.COMPOSITE`` or ``Primality.PROBABLY_PRIME``. + + .. __: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf + """ + + if not isinstance(candidate, Integer): + candidate = Integer(candidate) + + # Step 1 + if candidate in (1, 2, 3, 5): + return PROBABLY_PRIME + if candidate.is_even() or candidate.is_perfect_square(): + return COMPOSITE + + # Step 2 + def alternate(): + value = 5 + while True: + yield value + if value > 0: + value += 2 + else: + value -= 2 + value = -value + + for D in alternate(): + if candidate in (D, -D): + continue + js = Integer.jacobi_symbol(D, candidate) + if js == 0: + return COMPOSITE + if js == -1: + break + # Found D. P=1 and Q=(1-D)/4 (note that Q is guaranteed to be an integer) + + # Step 3 + # This is \delta(n) = n - jacobi(D/n) + K = candidate + 1 + # Step 4 + r = K.size_in_bits() - 1 + # Step 5 + # U_1=1 and V_1=P + U_i = Integer(1) + V_i = Integer(1) + U_temp = Integer(0) + V_temp = Integer(0) + # Step 6 + for i in iter_range(r - 1, -1, -1): + # Square + # U_temp = U_i * V_i % candidate + U_temp.set(U_i) + U_temp *= V_i + U_temp %= candidate + # V_temp = (((V_i ** 2 + (U_i ** 2 * D)) * K) >> 1) % candidate + V_temp.set(U_i) + V_temp *= U_i + V_temp *= D + V_temp.multiply_accumulate(V_i, V_i) + if V_temp.is_odd(): + V_temp += candidate + V_temp >>= 1 + V_temp %= candidate + # Multiply + if K.get_bit(i): + # U_i = (((U_temp + V_temp) * K) >> 1) % candidate + U_i.set(U_temp) + U_i += V_temp + if U_i.is_odd(): + U_i += candidate + U_i >>= 1 + U_i %= candidate + # V_i = (((V_temp + U_temp * D) * K) >> 1) % candidate + V_i.set(V_temp) + V_i.multiply_accumulate(U_temp, D) + if V_i.is_odd(): + V_i += candidate + V_i >>= 1 + V_i %= candidate + else: + U_i.set(U_temp) + V_i.set(V_temp) + # Step 7 + if U_i == 0: + return PROBABLY_PRIME + return COMPOSITE + + +from Crypto.Util.number import sieve_base as _sieve_base_large +## The optimal number of small primes to use for the sieve +## is probably dependent on the platform and the candidate size +_sieve_base = set(_sieve_base_large[:100]) + + +def test_probable_prime(candidate, randfunc=None): + """Test if a number is prime. + + A number is qualified as prime if it passes a certain + number of Miller-Rabin tests (dependent on the size + of the number, but such that probability of a false + positive is less than 10^-30) and a single Lucas test. + + For instance, a 1024-bit candidate will need to pass + 4 Miller-Rabin tests. + + :Parameters: + candidate : integer + The number to test for primality. + randfunc : callable + The routine to draw random bytes from to select Miller-Rabin bases. + :Returns: + ``PROBABLE_PRIME`` if the number if prime with very high probability. + ``COMPOSITE`` if the number is a composite. + For efficiency reasons, ``COMPOSITE`` is also returned for small primes. + """ + + if randfunc is None: + randfunc = Random.new().read + + if not isinstance(candidate, Integer): + candidate = Integer(candidate) + + # First, check trial division by the smallest primes + if int(candidate) in _sieve_base: + return PROBABLY_PRIME + try: + map(candidate.fail_if_divisible_by, _sieve_base) + except ValueError: + return COMPOSITE + + # These are the number of Miller-Rabin iterations s.t. p(k, t) < 1E-30, + # with p(k, t) being the probability that a randomly chosen k-bit number + # is composite but still survives t MR iterations. + mr_ranges = ((220, 30), (280, 20), (390, 15), (512, 10), + (620, 7), (740, 6), (890, 5), (1200, 4), + (1700, 3), (3700, 2)) + + bit_size = candidate.size_in_bits() + try: + mr_iterations = list(filter(lambda x: bit_size < x[0], + mr_ranges))[0][1] + except IndexError: + mr_iterations = 1 + + if miller_rabin_test(candidate, mr_iterations, + randfunc=randfunc) == COMPOSITE: + return COMPOSITE + if lucas_test(candidate) == COMPOSITE: + return COMPOSITE + return PROBABLY_PRIME + + +def generate_probable_prime(**kwargs): + """Generate a random probable prime. + + The prime will not have any specific properties + (e.g. it will not be a *strong* prime). + + Random numbers are evaluated for primality until one + passes all tests, consisting of a certain number of + Miller-Rabin tests with random bases followed by + a single Lucas test. + + The number of Miller-Rabin iterations is chosen such that + the probability that the output number is a non-prime is + less than 1E-30 (roughly 2^{-100}). + + This approach is compliant to `FIPS PUB 186-4`__. + + :Keywords: + exact_bits : integer + The desired size in bits of the probable prime. + It must be at least 160. + randfunc : callable + An RNG function where candidate primes are taken from. + prime_filter : callable + A function that takes an Integer as parameter and returns + True if the number can be passed to further primality tests, + False if it should be immediately discarded. + + :Return: + A probable prime in the range 2^exact_bits > p > 2^(exact_bits-1). + + .. __: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf + """ + + exact_bits = kwargs.pop("exact_bits", None) + randfunc = kwargs.pop("randfunc", None) + prime_filter = kwargs.pop("prime_filter", lambda x: True) + if kwargs: + raise ValueError("Unknown parameters: " + kwargs.keys()) + + if exact_bits is None: + raise ValueError("Missing exact_bits parameter") + if exact_bits < 160: + raise ValueError("Prime number is not big enough.") + + if randfunc is None: + randfunc = Random.new().read + + result = COMPOSITE + while result == COMPOSITE: + candidate = Integer.random(exact_bits=exact_bits, + randfunc=randfunc) | 1 + if not prime_filter(candidate): + continue + result = test_probable_prime(candidate, randfunc) + return candidate + + +def generate_probable_safe_prime(**kwargs): + """Generate a random, probable safe prime. + + Note this operation is much slower than generating a simple prime. + + :Keywords: + exact_bits : integer + The desired size in bits of the probable safe prime. + randfunc : callable + An RNG function where candidate primes are taken from. + + :Return: + A probable safe prime in the range + 2^exact_bits > p > 2^(exact_bits-1). + """ + + exact_bits = kwargs.pop("exact_bits", None) + randfunc = kwargs.pop("randfunc", None) + if kwargs: + raise ValueError("Unknown parameters: " + kwargs.keys()) + + if randfunc is None: + randfunc = Random.new().read + + result = COMPOSITE + while result == COMPOSITE: + q = generate_probable_prime(exact_bits=exact_bits - 1, randfunc=randfunc) + candidate = q * 2 + 1 + if candidate.size_in_bits() != exact_bits: + continue + result = test_probable_prime(candidate, randfunc=randfunc) + return candidate diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/Primality.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/Primality.pyi new file mode 100644 index 000000000..781348307 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/Primality.pyi @@ -0,0 +1,18 @@ +from typing import Callable, Optional, Union, Set + +PrimeResult = int + +COMPOSITE: PrimeResult +PROBABLY_PRIME: PrimeResult + +def miller_rabin_test(candidate: int, iterations: int, randfunc: Optional[Callable[[int],bytes]]=None) -> PrimeResult: ... +def lucas_test(candidate: int) -> PrimeResult: ... +_sieve_base: Set[int] +def test_probable_prime(candidate: int, randfunc: Optional[Callable[[int],bytes]]=None) -> PrimeResult: ... +def generate_probable_prime(*, + exact_bits: int = ..., + randfunc: Callable[[int],bytes] = ..., + prime_filter: Callable[[int],bool] = ...) -> int: ... +def generate_probable_safe_prime(*, + exact_bits: int = ..., + randfunc: Callable[[int],bytes] = ...) -> int: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_IntegerBase.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_IntegerBase.py new file mode 100644 index 000000000..931743aa1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_IntegerBase.py @@ -0,0 +1,412 @@ +# =================================================================== +# +# Copyright (c) 2018, Helder Eijs +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import abc + +from Crypto.Util.py3compat import iter_range, bord, bchr, ABC + +from Crypto import Random + + +class IntegerBase(ABC): + + # Conversions + @abc.abstractmethod + def __int__(self): + pass + + @abc.abstractmethod + def __str__(self): + pass + + @abc.abstractmethod + def __repr__(self): + pass + + @abc.abstractmethod + def to_bytes(self, block_size=0, byteorder='big'): + pass + + @staticmethod + @abc.abstractmethod + def from_bytes(byte_string, byteorder='big'): + pass + + # Relations + @abc.abstractmethod + def __eq__(self, term): + pass + + @abc.abstractmethod + def __ne__(self, term): + pass + + @abc.abstractmethod + def __lt__(self, term): + pass + + @abc.abstractmethod + def __le__(self, term): + pass + + @abc.abstractmethod + def __gt__(self, term): + pass + + @abc.abstractmethod + def __ge__(self, term): + pass + + @abc.abstractmethod + def __nonzero__(self): + pass + __bool__ = __nonzero__ + + @abc.abstractmethod + def is_negative(self): + pass + + # Arithmetic operations + @abc.abstractmethod + def __add__(self, term): + pass + + @abc.abstractmethod + def __sub__(self, term): + pass + + @abc.abstractmethod + def __mul__(self, factor): + pass + + @abc.abstractmethod + def __floordiv__(self, divisor): + pass + + @abc.abstractmethod + def __mod__(self, divisor): + pass + + @abc.abstractmethod + def inplace_pow(self, exponent, modulus=None): + pass + + @abc.abstractmethod + def __pow__(self, exponent, modulus=None): + pass + + @abc.abstractmethod + def __abs__(self): + pass + + @abc.abstractmethod + def sqrt(self, modulus=None): + pass + + @abc.abstractmethod + def __iadd__(self, term): + pass + + @abc.abstractmethod + def __isub__(self, term): + pass + + @abc.abstractmethod + def __imul__(self, term): + pass + + @abc.abstractmethod + def __imod__(self, term): + pass + + # Boolean/bit operations + @abc.abstractmethod + def __and__(self, term): + pass + + @abc.abstractmethod + def __or__(self, term): + pass + + @abc.abstractmethod + def __rshift__(self, pos): + pass + + @abc.abstractmethod + def __irshift__(self, pos): + pass + + @abc.abstractmethod + def __lshift__(self, pos): + pass + + @abc.abstractmethod + def __ilshift__(self, pos): + pass + + @abc.abstractmethod + def get_bit(self, n): + pass + + # Extra + @abc.abstractmethod + def is_odd(self): + pass + + @abc.abstractmethod + def is_even(self): + pass + + @abc.abstractmethod + def size_in_bits(self): + pass + + @abc.abstractmethod + def size_in_bytes(self): + pass + + @abc.abstractmethod + def is_perfect_square(self): + pass + + @abc.abstractmethod + def fail_if_divisible_by(self, small_prime): + pass + + @abc.abstractmethod + def multiply_accumulate(self, a, b): + pass + + @abc.abstractmethod + def set(self, source): + pass + + @abc.abstractmethod + def inplace_inverse(self, modulus): + pass + + @abc.abstractmethod + def inverse(self, modulus): + pass + + @abc.abstractmethod + def gcd(self, term): + pass + + @abc.abstractmethod + def lcm(self, term): + pass + + @staticmethod + @abc.abstractmethod + def jacobi_symbol(a, n): + pass + + @staticmethod + def _tonelli_shanks(n, p): + """Tonelli-shanks algorithm for computing the square root + of n modulo a prime p. + + n must be in the range [0..p-1]. + p must be at least even. + + The return value r is the square root of modulo p. If non-zero, + another solution will also exist (p-r). + + Note we cannot assume that p is really a prime: if it's not, + we can either raise an exception or return the correct value. + """ + + # See https://rosettacode.org/wiki/Tonelli-Shanks_algorithm + + if n in (0, 1): + return n + + if p % 4 == 3: + root = pow(n, (p + 1) // 4, p) + if pow(root, 2, p) != n: + raise ValueError("Cannot compute square root") + return root + + s = 1 + q = (p - 1) // 2 + while not (q & 1): + s += 1 + q >>= 1 + + z = n.__class__(2) + while True: + euler = pow(z, (p - 1) // 2, p) + if euler == 1: + z += 1 + continue + if euler == p - 1: + break + # Most probably p is not a prime + raise ValueError("Cannot compute square root") + + m = s + c = pow(z, q, p) + t = pow(n, q, p) + r = pow(n, (q + 1) // 2, p) + + while t != 1: + for i in iter_range(0, m): + if pow(t, 2**i, p) == 1: + break + if i == m: + raise ValueError("Cannot compute square root of %d mod %d" % (n, p)) + b = pow(c, 2**(m - i - 1), p) + m = i + c = b**2 % p + t = (t * b**2) % p + r = (r * b) % p + + if pow(r, 2, p) != n: + raise ValueError("Cannot compute square root") + + return r + + @classmethod + def random(cls, **kwargs): + """Generate a random natural integer of a certain size. + + :Keywords: + exact_bits : positive integer + The length in bits of the resulting random Integer number. + The number is guaranteed to fulfil the relation: + + 2^bits > result >= 2^(bits - 1) + + max_bits : positive integer + The maximum length in bits of the resulting random Integer number. + The number is guaranteed to fulfil the relation: + + 2^bits > result >=0 + + randfunc : callable + A function that returns a random byte string. The length of the + byte string is passed as parameter. Optional. + If not provided (or ``None``), randomness is read from the system RNG. + + :Return: a Integer object + """ + + exact_bits = kwargs.pop("exact_bits", None) + max_bits = kwargs.pop("max_bits", None) + randfunc = kwargs.pop("randfunc", None) + + if randfunc is None: + randfunc = Random.new().read + + if exact_bits is None and max_bits is None: + raise ValueError("Either 'exact_bits' or 'max_bits' must be specified") + + if exact_bits is not None and max_bits is not None: + raise ValueError("'exact_bits' and 'max_bits' are mutually exclusive") + + bits = exact_bits or max_bits + bytes_needed = ((bits - 1) // 8) + 1 + significant_bits_msb = 8 - (bytes_needed * 8 - bits) + msb = bord(randfunc(1)[0]) + if exact_bits is not None: + msb |= 1 << (significant_bits_msb - 1) + msb &= (1 << significant_bits_msb) - 1 + + return cls.from_bytes(bchr(msb) + randfunc(bytes_needed - 1)) + + @classmethod + def random_range(cls, **kwargs): + """Generate a random integer within a given internal. + + :Keywords: + min_inclusive : integer + The lower end of the interval (inclusive). + max_inclusive : integer + The higher end of the interval (inclusive). + max_exclusive : integer + The higher end of the interval (exclusive). + randfunc : callable + A function that returns a random byte string. The length of the + byte string is passed as parameter. Optional. + If not provided (or ``None``), randomness is read from the system RNG. + :Returns: + An Integer randomly taken in the given interval. + """ + + min_inclusive = kwargs.pop("min_inclusive", None) + max_inclusive = kwargs.pop("max_inclusive", None) + max_exclusive = kwargs.pop("max_exclusive", None) + randfunc = kwargs.pop("randfunc", None) + + if kwargs: + raise ValueError("Unknown keywords: " + str(kwargs.keys)) + if None not in (max_inclusive, max_exclusive): + raise ValueError("max_inclusive and max_exclusive cannot be both" + " specified") + if max_exclusive is not None: + max_inclusive = max_exclusive - 1 + if None in (min_inclusive, max_inclusive): + raise ValueError("Missing keyword to identify the interval") + + if randfunc is None: + randfunc = Random.new().read + + norm_maximum = max_inclusive - min_inclusive + bits_needed = cls(norm_maximum).size_in_bits() + + norm_candidate = -1 + while not 0 <= norm_candidate <= norm_maximum: + norm_candidate = cls.random( + max_bits=bits_needed, + randfunc=randfunc + ) + return norm_candidate + min_inclusive + + @staticmethod + @abc.abstractmethod + def _mult_modulo_bytes(term1, term2, modulus): + """Multiply two integers, take the modulo, and encode as big endian. + This specialized method is used for RSA decryption. + + Args: + term1 : integer + The first term of the multiplication, non-negative. + term2 : integer + The second term of the multiplication, non-negative. + modulus: integer + The modulus, a positive odd number. + :Returns: + A byte string, with the result of the modular multiplication + encoded in big endian mode. + It is as long as the modulus would be, with zero padding + on the left if needed. + """ + pass diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_IntegerBase.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_IntegerBase.pyi new file mode 100644 index 000000000..ea235326f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_IntegerBase.pyi @@ -0,0 +1,67 @@ +from typing import Optional, Union, Callable + +RandFunc = Callable[[int],int] + +class IntegerBase: + + def __init__(self, value: Union[IntegerBase, int]): ... + + def __int__(self) -> int: ... + def __str__(self) -> str: ... + def __repr__(self) -> str: ... + def to_bytes(self, block_size: Optional[int]=0, byteorder: str= ...) -> bytes: ... + @staticmethod + def from_bytes(byte_string: bytes, byteorder: Optional[str] = ...) -> IntegerBase: ... + def __eq__(self, term: object) -> bool: ... + def __ne__(self, term: object) -> bool: ... + def __lt__(self, term: Union[IntegerBase, int]) -> bool: ... + def __le__(self, term: Union[IntegerBase, int]) -> bool: ... + def __gt__(self, term: Union[IntegerBase, int]) -> bool: ... + def __ge__(self, term: Union[IntegerBase, int]) -> bool: ... + def __nonzero__(self) -> bool: ... + def is_negative(self) -> bool: ... + def __add__(self, term: Union[IntegerBase, int]) -> IntegerBase: ... + def __sub__(self, term: Union[IntegerBase, int]) -> IntegerBase: ... + def __mul__(self, term: Union[IntegerBase, int]) -> IntegerBase: ... + def __floordiv__(self, divisor: Union[IntegerBase, int]) -> IntegerBase: ... + def __mod__(self, divisor: Union[IntegerBase, int]) -> IntegerBase: ... + def inplace_pow(self, exponent: int, modulus: Optional[Union[IntegerBase, int]]=None) -> IntegerBase: ... + def __pow__(self, exponent: int, modulus: Optional[int]) -> IntegerBase: ... + def __abs__(self) -> IntegerBase: ... + def sqrt(self, modulus: Optional[int]) -> IntegerBase: ... + def __iadd__(self, term: Union[IntegerBase, int]) -> IntegerBase: ... + def __isub__(self, term: Union[IntegerBase, int]) -> IntegerBase: ... + def __imul__(self, term: Union[IntegerBase, int]) -> IntegerBase: ... + def __imod__(self, divisor: Union[IntegerBase, int]) -> IntegerBase: ... + def __and__(self, term: Union[IntegerBase, int]) -> IntegerBase: ... + def __or__(self, term: Union[IntegerBase, int]) -> IntegerBase: ... + def __rshift__(self, pos: Union[IntegerBase, int]) -> IntegerBase: ... + def __irshift__(self, pos: Union[IntegerBase, int]) -> IntegerBase: ... + def __lshift__(self, pos: Union[IntegerBase, int]) -> IntegerBase: ... + def __ilshift__(self, pos: Union[IntegerBase, int]) -> IntegerBase: ... + def get_bit(self, n: int) -> bool: ... + def is_odd(self) -> bool: ... + def is_even(self) -> bool: ... + def size_in_bits(self) -> int: ... + def size_in_bytes(self) -> int: ... + def is_perfect_square(self) -> bool: ... + def fail_if_divisible_by(self, small_prime: Union[IntegerBase, int]) -> None: ... + def multiply_accumulate(self, a: Union[IntegerBase, int], b: Union[IntegerBase, int]) -> IntegerBase: ... + def set(self, source: Union[IntegerBase, int]) -> IntegerBase: ... + def inplace_inverse(self, modulus: Union[IntegerBase, int]) -> IntegerBase: ... + def inverse(self, modulus: Union[IntegerBase, int]) -> IntegerBase: ... + def gcd(self, term: Union[IntegerBase, int]) -> IntegerBase: ... + def lcm(self, term: Union[IntegerBase, int]) -> IntegerBase: ... + @staticmethod + def jacobi_symbol(a: Union[IntegerBase, int], n: Union[IntegerBase, int]) -> IntegerBase: ... + @staticmethod + def _tonelli_shanks(n: Union[IntegerBase, int], p: Union[IntegerBase, int]) -> IntegerBase : ... + @classmethod + def random(cls, **kwargs: Union[int,RandFunc]) -> IntegerBase : ... + @classmethod + def random_range(cls, **kwargs: Union[int,RandFunc]) -> IntegerBase : ... + @staticmethod + def _mult_modulo_bytes(term1: Union[IntegerBase, int], + term2: Union[IntegerBase, int], + modulus: Union[IntegerBase, int]) -> bytes: ... + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_IntegerCustom.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_IntegerCustom.py new file mode 100644 index 000000000..7dfc2350d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_IntegerCustom.py @@ -0,0 +1,162 @@ +# =================================================================== +# +# Copyright (c) 2018, Helder Eijs +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from ._IntegerNative import IntegerNative + +from Crypto.Util.number import long_to_bytes, bytes_to_long + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + create_string_buffer, + get_raw_buffer, backend, + c_size_t, c_ulonglong) + + +from Crypto.Random.random import getrandbits + +c_defs = """ +int monty_pow(uint8_t *out, + const uint8_t *base, + const uint8_t *exp, + const uint8_t *modulus, + size_t len, + uint64_t seed); + +int monty_multiply(uint8_t *out, + const uint8_t *term1, + const uint8_t *term2, + const uint8_t *modulus, + size_t len); +""" + + +_raw_montgomery = load_pycryptodome_raw_lib("Crypto.Math._modexp", c_defs) +implementation = {"library": "custom", "api": backend} + + +class IntegerCustom(IntegerNative): + + @staticmethod + def from_bytes(byte_string, byteorder='big'): + if byteorder == 'big': + pass + elif byteorder == 'little': + byte_string = bytearray(byte_string) + byte_string.reverse() + else: + raise ValueError("Incorrect byteorder") + return IntegerCustom(bytes_to_long(byte_string)) + + def inplace_pow(self, exponent, modulus=None): + exp_value = int(exponent) + if exp_value < 0: + raise ValueError("Exponent must not be negative") + + # No modular reduction + if modulus is None: + self._value = pow(self._value, exp_value) + return self + + # With modular reduction + mod_value = int(modulus) + if mod_value < 0: + raise ValueError("Modulus must be positive") + if mod_value == 0: + raise ZeroDivisionError("Modulus cannot be zero") + + # C extension only works with odd moduli + if (mod_value & 1) == 0: + self._value = pow(self._value, exp_value, mod_value) + return self + + # C extension only works with bases smaller than modulus + if self._value >= mod_value: + self._value %= mod_value + + max_len = len(long_to_bytes(max(self._value, exp_value, mod_value))) + + base_b = long_to_bytes(self._value, max_len) + exp_b = long_to_bytes(exp_value, max_len) + modulus_b = long_to_bytes(mod_value, max_len) + + out = create_string_buffer(max_len) + + error = _raw_montgomery.monty_pow( + out, + base_b, + exp_b, + modulus_b, + c_size_t(max_len), + c_ulonglong(getrandbits(64)) + ) + + if error: + raise ValueError("monty_pow failed with error: %d" % error) + + result = bytes_to_long(get_raw_buffer(out)) + self._value = result + return self + + @staticmethod + def _mult_modulo_bytes(term1, term2, modulus): + + # With modular reduction + mod_value = int(modulus) + if mod_value < 0: + raise ValueError("Modulus must be positive") + if mod_value == 0: + raise ZeroDivisionError("Modulus cannot be zero") + + # C extension only works with odd moduli + if (mod_value & 1) == 0: + raise ValueError("Odd modulus is required") + + # C extension only works with non-negative terms smaller than modulus + if term1 >= mod_value or term1 < 0: + term1 %= mod_value + if term2 >= mod_value or term2 < 0: + term2 %= mod_value + + modulus_b = long_to_bytes(mod_value) + numbers_len = len(modulus_b) + term1_b = long_to_bytes(term1, numbers_len) + term2_b = long_to_bytes(term2, numbers_len) + out = create_string_buffer(numbers_len) + + error = _raw_montgomery.monty_multiply( + out, + term1_b, + term2_b, + modulus_b, + c_size_t(numbers_len) + ) + if error: + raise ValueError("monty_multiply failed with error: %d" % error) + + return get_raw_buffer(out) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_IntegerCustom.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_IntegerCustom.pyi new file mode 100644 index 000000000..2dd75c742 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_IntegerCustom.pyi @@ -0,0 +1,8 @@ +from typing import Any + +from ._IntegerNative import IntegerNative + +_raw_montgomery = Any + +class IntegerCustom(IntegerNative): + pass diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_IntegerGMP.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_IntegerGMP.py new file mode 100644 index 000000000..e1b6d66cb --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_IntegerGMP.py @@ -0,0 +1,782 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import sys + +from Crypto.Util.py3compat import tobytes, is_native_int + +from Crypto.Util._raw_api import (backend, load_lib, + get_raw_buffer, get_c_string, + null_pointer, create_string_buffer, + c_ulong, c_size_t, c_uint8_ptr) + +from ._IntegerBase import IntegerBase + +gmp_defs = """typedef unsigned long UNIX_ULONG; + typedef struct { int a; int b; void *c; } MPZ; + typedef MPZ mpz_t[1]; + typedef UNIX_ULONG mp_bitcnt_t; + + void __gmpz_init (mpz_t x); + void __gmpz_init_set (mpz_t rop, const mpz_t op); + void __gmpz_init_set_ui (mpz_t rop, UNIX_ULONG op); + + UNIX_ULONG __gmpz_get_ui (const mpz_t op); + void __gmpz_set (mpz_t rop, const mpz_t op); + void __gmpz_set_ui (mpz_t rop, UNIX_ULONG op); + void __gmpz_add (mpz_t rop, const mpz_t op1, const mpz_t op2); + void __gmpz_add_ui (mpz_t rop, const mpz_t op1, UNIX_ULONG op2); + void __gmpz_sub_ui (mpz_t rop, const mpz_t op1, UNIX_ULONG op2); + void __gmpz_addmul (mpz_t rop, const mpz_t op1, const mpz_t op2); + void __gmpz_addmul_ui (mpz_t rop, const mpz_t op1, UNIX_ULONG op2); + void __gmpz_submul_ui (mpz_t rop, const mpz_t op1, UNIX_ULONG op2); + void __gmpz_import (mpz_t rop, size_t count, int order, size_t size, + int endian, size_t nails, const void *op); + void * __gmpz_export (void *rop, size_t *countp, int order, + size_t size, + int endian, size_t nails, const mpz_t op); + size_t __gmpz_sizeinbase (const mpz_t op, int base); + void __gmpz_sub (mpz_t rop, const mpz_t op1, const mpz_t op2); + void __gmpz_mul (mpz_t rop, const mpz_t op1, const mpz_t op2); + void __gmpz_mul_ui (mpz_t rop, const mpz_t op1, UNIX_ULONG op2); + int __gmpz_cmp (const mpz_t op1, const mpz_t op2); + void __gmpz_powm (mpz_t rop, const mpz_t base, const mpz_t exp, const + mpz_t mod); + void __gmpz_powm_ui (mpz_t rop, const mpz_t base, UNIX_ULONG exp, + const mpz_t mod); + void __gmpz_pow_ui (mpz_t rop, const mpz_t base, UNIX_ULONG exp); + void __gmpz_sqrt(mpz_t rop, const mpz_t op); + void __gmpz_mod (mpz_t r, const mpz_t n, const mpz_t d); + void __gmpz_neg (mpz_t rop, const mpz_t op); + void __gmpz_abs (mpz_t rop, const mpz_t op); + void __gmpz_and (mpz_t rop, const mpz_t op1, const mpz_t op2); + void __gmpz_ior (mpz_t rop, const mpz_t op1, const mpz_t op2); + void __gmpz_clear (mpz_t x); + void __gmpz_tdiv_q_2exp (mpz_t q, const mpz_t n, mp_bitcnt_t b); + void __gmpz_fdiv_q (mpz_t q, const mpz_t n, const mpz_t d); + void __gmpz_mul_2exp (mpz_t rop, const mpz_t op1, mp_bitcnt_t op2); + int __gmpz_tstbit (const mpz_t op, mp_bitcnt_t bit_index); + int __gmpz_perfect_square_p (const mpz_t op); + int __gmpz_jacobi (const mpz_t a, const mpz_t b); + void __gmpz_gcd (mpz_t rop, const mpz_t op1, const mpz_t op2); + UNIX_ULONG __gmpz_gcd_ui (mpz_t rop, const mpz_t op1, + UNIX_ULONG op2); + void __gmpz_lcm (mpz_t rop, const mpz_t op1, const mpz_t op2); + int __gmpz_invert (mpz_t rop, const mpz_t op1, const mpz_t op2); + int __gmpz_divisible_p (const mpz_t n, const mpz_t d); + int __gmpz_divisible_ui_p (const mpz_t n, UNIX_ULONG d); + """ + +if sys.platform == "win32": + raise ImportError("Not using GMP on Windows") + +lib = load_lib("gmp", gmp_defs) +implementation = {"library": "gmp", "api": backend} + +if hasattr(lib, "__mpir_version"): + raise ImportError("MPIR library detected") + +# In order to create a function that returns a pointer to +# a new MPZ structure, we need to break the abstraction +# and know exactly what ffi backend we have +if implementation["api"] == "ctypes": + from ctypes import Structure, c_int, c_void_p, byref + + class _MPZ(Structure): + _fields_ = [('_mp_alloc', c_int), + ('_mp_size', c_int), + ('_mp_d', c_void_p)] + + def new_mpz(): + return byref(_MPZ()) + +else: + # We are using CFFI + from Crypto.Util._raw_api import ffi + + def new_mpz(): + return ffi.new("MPZ*") + + +# Lazy creation of GMP methods +class _GMP(object): + + def __getattr__(self, name): + if name.startswith("mpz_"): + func_name = "__gmpz_" + name[4:] + elif name.startswith("gmp_"): + func_name = "__gmp_" + name[4:] + else: + raise AttributeError("Attribute %s is invalid" % name) + func = getattr(lib, func_name) + setattr(self, name, func) + return func + + +_gmp = _GMP() + + +class IntegerGMP(IntegerBase): + """A fast, arbitrary precision integer""" + + _zero_mpz_p = new_mpz() + _gmp.mpz_init_set_ui(_zero_mpz_p, c_ulong(0)) + + def __init__(self, value): + """Initialize the integer to the given value.""" + + self._mpz_p = new_mpz() + self._initialized = False + + if isinstance(value, float): + raise ValueError("A floating point type is not a natural number") + + if is_native_int(value): + _gmp.mpz_init(self._mpz_p) + self._initialized = True + if value == 0: + return + + tmp = new_mpz() + _gmp.mpz_init(tmp) + + try: + positive = value >= 0 + reduce = abs(value) + slots = (reduce.bit_length() - 1) // 32 + 1 + + while slots > 0: + slots = slots - 1 + _gmp.mpz_set_ui(tmp, + c_ulong(0xFFFFFFFF & (reduce >> (slots * 32)))) + _gmp.mpz_mul_2exp(tmp, tmp, c_ulong(slots * 32)) + _gmp.mpz_add(self._mpz_p, self._mpz_p, tmp) + finally: + _gmp.mpz_clear(tmp) + + if not positive: + _gmp.mpz_neg(self._mpz_p, self._mpz_p) + + elif isinstance(value, IntegerGMP): + _gmp.mpz_init_set(self._mpz_p, value._mpz_p) + self._initialized = True + else: + raise NotImplementedError + + + # Conversions + def __int__(self): + tmp = new_mpz() + _gmp.mpz_init_set(tmp, self._mpz_p) + + try: + value = 0 + slot = 0 + while _gmp.mpz_cmp(tmp, self._zero_mpz_p) != 0: + lsb = _gmp.mpz_get_ui(tmp) & 0xFFFFFFFF + value |= lsb << (slot * 32) + _gmp.mpz_tdiv_q_2exp(tmp, tmp, c_ulong(32)) + slot = slot + 1 + finally: + _gmp.mpz_clear(tmp) + + if self < 0: + value = -value + return int(value) + + def __str__(self): + return str(int(self)) + + def __repr__(self): + return "Integer(%s)" % str(self) + + # Only Python 2.x + def __hex__(self): + return hex(int(self)) + + # Only Python 3.x + def __index__(self): + return int(self) + + def to_bytes(self, block_size=0, byteorder='big'): + """Convert the number into a byte string. + + This method encodes the number in network order and prepends + as many zero bytes as required. It only works for non-negative + values. + + :Parameters: + block_size : integer + The exact size the output byte string must have. + If zero, the string has the minimal length. + byteorder : string + 'big' for big-endian integers (default), 'little' for litte-endian. + :Returns: + A byte string. + :Raise ValueError: + If the value is negative or if ``block_size`` is + provided and the length of the byte string would exceed it. + """ + + if self < 0: + raise ValueError("Conversion only valid for non-negative numbers") + + buf_len = (_gmp.mpz_sizeinbase(self._mpz_p, 2) + 7) // 8 + if buf_len > block_size > 0: + raise ValueError("Number is too big to convert to byte string" + " of prescribed length") + buf = create_string_buffer(buf_len) + + + _gmp.mpz_export( + buf, + null_pointer, # Ignore countp + 1, # Big endian + c_size_t(1), # Each word is 1 byte long + 0, # Endianess within a word - not relevant + c_size_t(0), # No nails + self._mpz_p) + + result = b'\x00' * max(0, block_size - buf_len) + get_raw_buffer(buf) + if byteorder == 'big': + pass + elif byteorder == 'little': + result = bytearray(result) + result.reverse() + result = bytes(result) + else: + raise ValueError("Incorrect byteorder") + return result + + @staticmethod + def from_bytes(byte_string, byteorder='big'): + """Convert a byte string into a number. + + :Parameters: + byte_string : byte string + The input number, encoded in network order. + It can only be non-negative. + byteorder : string + 'big' for big-endian integers (default), 'little' for litte-endian. + + :Return: + The ``Integer`` object carrying the same value as the input. + """ + result = IntegerGMP(0) + if byteorder == 'big': + pass + elif byteorder == 'little': + byte_string = bytearray(byte_string) + byte_string.reverse() + else: + raise ValueError("Incorrect byteorder") + _gmp.mpz_import( + result._mpz_p, + c_size_t(len(byte_string)), # Amount of words to read + 1, # Big endian + c_size_t(1), # Each word is 1 byte long + 0, # Endianess within a word - not relevant + c_size_t(0), # No nails + c_uint8_ptr(byte_string)) + return result + + # Relations + def _apply_and_return(self, func, term): + if not isinstance(term, IntegerGMP): + term = IntegerGMP(term) + return func(self._mpz_p, term._mpz_p) + + def __eq__(self, term): + if not (isinstance(term, IntegerGMP) or is_native_int(term)): + return False + return self._apply_and_return(_gmp.mpz_cmp, term) == 0 + + def __ne__(self, term): + if not (isinstance(term, IntegerGMP) or is_native_int(term)): + return True + return self._apply_and_return(_gmp.mpz_cmp, term) != 0 + + def __lt__(self, term): + return self._apply_and_return(_gmp.mpz_cmp, term) < 0 + + def __le__(self, term): + return self._apply_and_return(_gmp.mpz_cmp, term) <= 0 + + def __gt__(self, term): + return self._apply_and_return(_gmp.mpz_cmp, term) > 0 + + def __ge__(self, term): + return self._apply_and_return(_gmp.mpz_cmp, term) >= 0 + + def __nonzero__(self): + return _gmp.mpz_cmp(self._mpz_p, self._zero_mpz_p) != 0 + __bool__ = __nonzero__ + + def is_negative(self): + return _gmp.mpz_cmp(self._mpz_p, self._zero_mpz_p) < 0 + + # Arithmetic operations + def __add__(self, term): + result = IntegerGMP(0) + if not isinstance(term, IntegerGMP): + try: + term = IntegerGMP(term) + except NotImplementedError: + return NotImplemented + _gmp.mpz_add(result._mpz_p, + self._mpz_p, + term._mpz_p) + return result + + def __sub__(self, term): + result = IntegerGMP(0) + if not isinstance(term, IntegerGMP): + try: + term = IntegerGMP(term) + except NotImplementedError: + return NotImplemented + _gmp.mpz_sub(result._mpz_p, + self._mpz_p, + term._mpz_p) + return result + + def __mul__(self, term): + result = IntegerGMP(0) + if not isinstance(term, IntegerGMP): + try: + term = IntegerGMP(term) + except NotImplementedError: + return NotImplemented + _gmp.mpz_mul(result._mpz_p, + self._mpz_p, + term._mpz_p) + return result + + def __floordiv__(self, divisor): + if not isinstance(divisor, IntegerGMP): + divisor = IntegerGMP(divisor) + if _gmp.mpz_cmp(divisor._mpz_p, + self._zero_mpz_p) == 0: + raise ZeroDivisionError("Division by zero") + result = IntegerGMP(0) + _gmp.mpz_fdiv_q(result._mpz_p, + self._mpz_p, + divisor._mpz_p) + return result + + def __mod__(self, divisor): + if not isinstance(divisor, IntegerGMP): + divisor = IntegerGMP(divisor) + comp = _gmp.mpz_cmp(divisor._mpz_p, + self._zero_mpz_p) + if comp == 0: + raise ZeroDivisionError("Division by zero") + if comp < 0: + raise ValueError("Modulus must be positive") + result = IntegerGMP(0) + _gmp.mpz_mod(result._mpz_p, + self._mpz_p, + divisor._mpz_p) + return result + + def inplace_pow(self, exponent, modulus=None): + + if modulus is None: + if exponent < 0: + raise ValueError("Exponent must not be negative") + + # Normal exponentiation + if exponent > 256: + raise ValueError("Exponent is too big") + _gmp.mpz_pow_ui(self._mpz_p, + self._mpz_p, # Base + c_ulong(int(exponent)) + ) + else: + # Modular exponentiation + if not isinstance(modulus, IntegerGMP): + modulus = IntegerGMP(modulus) + if not modulus: + raise ZeroDivisionError("Division by zero") + if modulus.is_negative(): + raise ValueError("Modulus must be positive") + if is_native_int(exponent): + if exponent < 0: + raise ValueError("Exponent must not be negative") + if exponent < 65536: + _gmp.mpz_powm_ui(self._mpz_p, + self._mpz_p, + c_ulong(exponent), + modulus._mpz_p) + return self + exponent = IntegerGMP(exponent) + elif exponent.is_negative(): + raise ValueError("Exponent must not be negative") + _gmp.mpz_powm(self._mpz_p, + self._mpz_p, + exponent._mpz_p, + modulus._mpz_p) + return self + + def __pow__(self, exponent, modulus=None): + result = IntegerGMP(self) + return result.inplace_pow(exponent, modulus) + + def __abs__(self): + result = IntegerGMP(0) + _gmp.mpz_abs(result._mpz_p, self._mpz_p) + return result + + def sqrt(self, modulus=None): + """Return the largest Integer that does not + exceed the square root""" + + if modulus is None: + if self < 0: + raise ValueError("Square root of negative value") + result = IntegerGMP(0) + _gmp.mpz_sqrt(result._mpz_p, + self._mpz_p) + else: + if modulus <= 0: + raise ValueError("Modulus must be positive") + modulus = int(modulus) + result = IntegerGMP(self._tonelli_shanks(int(self) % modulus, modulus)) + + return result + + def __iadd__(self, term): + if is_native_int(term): + if 0 <= term < 65536: + _gmp.mpz_add_ui(self._mpz_p, + self._mpz_p, + c_ulong(term)) + return self + if -65535 < term < 0: + _gmp.mpz_sub_ui(self._mpz_p, + self._mpz_p, + c_ulong(-term)) + return self + term = IntegerGMP(term) + _gmp.mpz_add(self._mpz_p, + self._mpz_p, + term._mpz_p) + return self + + def __isub__(self, term): + if is_native_int(term): + if 0 <= term < 65536: + _gmp.mpz_sub_ui(self._mpz_p, + self._mpz_p, + c_ulong(term)) + return self + if -65535 < term < 0: + _gmp.mpz_add_ui(self._mpz_p, + self._mpz_p, + c_ulong(-term)) + return self + term = IntegerGMP(term) + _gmp.mpz_sub(self._mpz_p, + self._mpz_p, + term._mpz_p) + return self + + def __imul__(self, term): + if is_native_int(term): + if 0 <= term < 65536: + _gmp.mpz_mul_ui(self._mpz_p, + self._mpz_p, + c_ulong(term)) + return self + if -65535 < term < 0: + _gmp.mpz_mul_ui(self._mpz_p, + self._mpz_p, + c_ulong(-term)) + _gmp.mpz_neg(self._mpz_p, self._mpz_p) + return self + term = IntegerGMP(term) + _gmp.mpz_mul(self._mpz_p, + self._mpz_p, + term._mpz_p) + return self + + def __imod__(self, divisor): + if not isinstance(divisor, IntegerGMP): + divisor = IntegerGMP(divisor) + comp = _gmp.mpz_cmp(divisor._mpz_p, + divisor._zero_mpz_p) + if comp == 0: + raise ZeroDivisionError("Division by zero") + if comp < 0: + raise ValueError("Modulus must be positive") + _gmp.mpz_mod(self._mpz_p, + self._mpz_p, + divisor._mpz_p) + return self + + # Boolean/bit operations + def __and__(self, term): + result = IntegerGMP(0) + if not isinstance(term, IntegerGMP): + term = IntegerGMP(term) + _gmp.mpz_and(result._mpz_p, + self._mpz_p, + term._mpz_p) + return result + + def __or__(self, term): + result = IntegerGMP(0) + if not isinstance(term, IntegerGMP): + term = IntegerGMP(term) + _gmp.mpz_ior(result._mpz_p, + self._mpz_p, + term._mpz_p) + return result + + def __rshift__(self, pos): + result = IntegerGMP(0) + if pos < 0: + raise ValueError("negative shift count") + if pos > 65536: + if self < 0: + return -1 + else: + return 0 + _gmp.mpz_tdiv_q_2exp(result._mpz_p, + self._mpz_p, + c_ulong(int(pos))) + return result + + def __irshift__(self, pos): + if pos < 0: + raise ValueError("negative shift count") + if pos > 65536: + if self < 0: + return -1 + else: + return 0 + _gmp.mpz_tdiv_q_2exp(self._mpz_p, + self._mpz_p, + c_ulong(int(pos))) + return self + + def __lshift__(self, pos): + result = IntegerGMP(0) + if not 0 <= pos < 65536: + raise ValueError("Incorrect shift count") + _gmp.mpz_mul_2exp(result._mpz_p, + self._mpz_p, + c_ulong(int(pos))) + return result + + def __ilshift__(self, pos): + if not 0 <= pos < 65536: + raise ValueError("Incorrect shift count") + _gmp.mpz_mul_2exp(self._mpz_p, + self._mpz_p, + c_ulong(int(pos))) + return self + + def get_bit(self, n): + """Return True if the n-th bit is set to 1. + Bit 0 is the least significant.""" + + if self < 0: + raise ValueError("no bit representation for negative values") + if n < 0: + raise ValueError("negative bit count") + if n > 65536: + return 0 + return bool(_gmp.mpz_tstbit(self._mpz_p, + c_ulong(int(n)))) + + # Extra + def is_odd(self): + return _gmp.mpz_tstbit(self._mpz_p, 0) == 1 + + def is_even(self): + return _gmp.mpz_tstbit(self._mpz_p, 0) == 0 + + def size_in_bits(self): + """Return the minimum number of bits that can encode the number.""" + + if self < 0: + raise ValueError("Conversion only valid for non-negative numbers") + return _gmp.mpz_sizeinbase(self._mpz_p, 2) + + def size_in_bytes(self): + """Return the minimum number of bytes that can encode the number.""" + return (self.size_in_bits() - 1) // 8 + 1 + + def is_perfect_square(self): + return _gmp.mpz_perfect_square_p(self._mpz_p) != 0 + + def fail_if_divisible_by(self, small_prime): + """Raise an exception if the small prime is a divisor.""" + + if is_native_int(small_prime): + if 0 < small_prime < 65536: + if _gmp.mpz_divisible_ui_p(self._mpz_p, + c_ulong(small_prime)): + raise ValueError("The value is composite") + return + small_prime = IntegerGMP(small_prime) + if _gmp.mpz_divisible_p(self._mpz_p, + small_prime._mpz_p): + raise ValueError("The value is composite") + + def multiply_accumulate(self, a, b): + """Increment the number by the product of a and b.""" + + if not isinstance(a, IntegerGMP): + a = IntegerGMP(a) + if is_native_int(b): + if 0 < b < 65536: + _gmp.mpz_addmul_ui(self._mpz_p, + a._mpz_p, + c_ulong(b)) + return self + if -65535 < b < 0: + _gmp.mpz_submul_ui(self._mpz_p, + a._mpz_p, + c_ulong(-b)) + return self + b = IntegerGMP(b) + _gmp.mpz_addmul(self._mpz_p, + a._mpz_p, + b._mpz_p) + return self + + def set(self, source): + """Set the Integer to have the given value""" + + if not isinstance(source, IntegerGMP): + source = IntegerGMP(source) + _gmp.mpz_set(self._mpz_p, + source._mpz_p) + return self + + def inplace_inverse(self, modulus): + """Compute the inverse of this number in the ring of + modulo integers. + + Raise an exception if no inverse exists. + """ + + if not isinstance(modulus, IntegerGMP): + modulus = IntegerGMP(modulus) + + comp = _gmp.mpz_cmp(modulus._mpz_p, + self._zero_mpz_p) + if comp == 0: + raise ZeroDivisionError("Modulus cannot be zero") + if comp < 0: + raise ValueError("Modulus must be positive") + + result = _gmp.mpz_invert(self._mpz_p, + self._mpz_p, + modulus._mpz_p) + if not result: + raise ValueError("No inverse value can be computed") + return self + + def inverse(self, modulus): + result = IntegerGMP(self) + result.inplace_inverse(modulus) + return result + + def gcd(self, term): + """Compute the greatest common denominator between this + number and another term.""" + + result = IntegerGMP(0) + if is_native_int(term): + if 0 < term < 65535: + _gmp.mpz_gcd_ui(result._mpz_p, + self._mpz_p, + c_ulong(term)) + return result + term = IntegerGMP(term) + _gmp.mpz_gcd(result._mpz_p, self._mpz_p, term._mpz_p) + return result + + def lcm(self, term): + """Compute the least common multiplier between this + number and another term.""" + + result = IntegerGMP(0) + if not isinstance(term, IntegerGMP): + term = IntegerGMP(term) + _gmp.mpz_lcm(result._mpz_p, self._mpz_p, term._mpz_p) + return result + + @staticmethod + def jacobi_symbol(a, n): + """Compute the Jacobi symbol""" + + if not isinstance(a, IntegerGMP): + a = IntegerGMP(a) + if not isinstance(n, IntegerGMP): + n = IntegerGMP(n) + if n <= 0 or n.is_even(): + raise ValueError("n must be positive odd for the Jacobi symbol") + return _gmp.mpz_jacobi(a._mpz_p, n._mpz_p) + + @staticmethod + def _mult_modulo_bytes(term1, term2, modulus): + if not isinstance(term1, IntegerGMP): + term1 = IntegerGMP(term1) + if not isinstance(term2, IntegerGMP): + term2 = IntegerGMP(term2) + if not isinstance(modulus, IntegerGMP): + modulus = IntegerGMP(modulus) + + if modulus < 0: + raise ValueError("Modulus must be positive") + if modulus == 0: + raise ZeroDivisionError("Modulus cannot be zero") + if (modulus & 1) == 0: + raise ValueError("Odd modulus is required") + + numbers_len = len(modulus.to_bytes()) + result = ((term1 * term2) % modulus).to_bytes(numbers_len) + return result + + # Clean-up + def __del__(self): + + try: + if self._mpz_p is not None: + if self._initialized: + _gmp.mpz_clear(self._mpz_p) + + self._mpz_p = None + except AttributeError: + pass diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_IntegerGMP.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_IntegerGMP.pyi new file mode 100644 index 000000000..2181b470c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_IntegerGMP.pyi @@ -0,0 +1,3 @@ +from ._IntegerBase import IntegerBase +class IntegerGMP(IntegerBase): + pass diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_IntegerNative.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_IntegerNative.py new file mode 100644 index 000000000..e9937d09b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_IntegerNative.py @@ -0,0 +1,382 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from ._IntegerBase import IntegerBase + +from Crypto.Util.number import long_to_bytes, bytes_to_long, inverse, GCD + + +class IntegerNative(IntegerBase): + """A class to model a natural integer (including zero)""" + + def __init__(self, value): + if isinstance(value, float): + raise ValueError("A floating point type is not a natural number") + try: + self._value = value._value + except AttributeError: + self._value = value + + # Conversions + def __int__(self): + return self._value + + def __str__(self): + return str(int(self)) + + def __repr__(self): + return "Integer(%s)" % str(self) + + # Only Python 2.x + def __hex__(self): + return hex(self._value) + + # Only Python 3.x + def __index__(self): + return int(self._value) + + def to_bytes(self, block_size=0, byteorder='big'): + if self._value < 0: + raise ValueError("Conversion only valid for non-negative numbers") + result = long_to_bytes(self._value, block_size) + if len(result) > block_size > 0: + raise ValueError("Value too large to encode") + if byteorder == 'big': + pass + elif byteorder == 'little': + result = bytearray(result) + result.reverse() + result = bytes(result) + else: + raise ValueError("Incorrect byteorder") + return result + + @classmethod + def from_bytes(cls, byte_string, byteorder='big'): + if byteorder == 'big': + pass + elif byteorder == 'little': + byte_string = bytearray(byte_string) + byte_string.reverse() + else: + raise ValueError("Incorrect byteorder") + return cls(bytes_to_long(byte_string)) + + # Relations + def __eq__(self, term): + if term is None: + return False + return self._value == int(term) + + def __ne__(self, term): + return not self.__eq__(term) + + def __lt__(self, term): + return self._value < int(term) + + def __le__(self, term): + return self.__lt__(term) or self.__eq__(term) + + def __gt__(self, term): + return not self.__le__(term) + + def __ge__(self, term): + return not self.__lt__(term) + + def __nonzero__(self): + return self._value != 0 + __bool__ = __nonzero__ + + def is_negative(self): + return self._value < 0 + + # Arithmetic operations + def __add__(self, term): + try: + return self.__class__(self._value + int(term)) + except (ValueError, AttributeError, TypeError): + return NotImplemented + + def __sub__(self, term): + try: + return self.__class__(self._value - int(term)) + except (ValueError, AttributeError, TypeError): + return NotImplemented + + def __mul__(self, factor): + try: + return self.__class__(self._value * int(factor)) + except (ValueError, AttributeError, TypeError): + return NotImplemented + + def __floordiv__(self, divisor): + return self.__class__(self._value // int(divisor)) + + def __mod__(self, divisor): + divisor_value = int(divisor) + if divisor_value < 0: + raise ValueError("Modulus must be positive") + return self.__class__(self._value % divisor_value) + + def inplace_pow(self, exponent, modulus=None): + exp_value = int(exponent) + if exp_value < 0: + raise ValueError("Exponent must not be negative") + + if modulus is not None: + mod_value = int(modulus) + if mod_value < 0: + raise ValueError("Modulus must be positive") + if mod_value == 0: + raise ZeroDivisionError("Modulus cannot be zero") + else: + mod_value = None + self._value = pow(self._value, exp_value, mod_value) + return self + + def __pow__(self, exponent, modulus=None): + result = self.__class__(self) + return result.inplace_pow(exponent, modulus) + + def __abs__(self): + return abs(self._value) + + def sqrt(self, modulus=None): + + value = self._value + if modulus is None: + if value < 0: + raise ValueError("Square root of negative value") + # http://stackoverflow.com/questions/15390807/integer-square-root-in-python + + x = value + y = (x + 1) // 2 + while y < x: + x = y + y = (x + value // x) // 2 + result = x + else: + if modulus <= 0: + raise ValueError("Modulus must be positive") + result = self._tonelli_shanks(self % modulus, modulus) + + return self.__class__(result) + + def __iadd__(self, term): + self._value += int(term) + return self + + def __isub__(self, term): + self._value -= int(term) + return self + + def __imul__(self, term): + self._value *= int(term) + return self + + def __imod__(self, term): + modulus = int(term) + if modulus == 0: + raise ZeroDivisionError("Division by zero") + if modulus < 0: + raise ValueError("Modulus must be positive") + self._value %= modulus + return self + + # Boolean/bit operations + def __and__(self, term): + return self.__class__(self._value & int(term)) + + def __or__(self, term): + return self.__class__(self._value | int(term)) + + def __rshift__(self, pos): + try: + return self.__class__(self._value >> int(pos)) + except OverflowError: + if self._value >= 0: + return 0 + else: + return -1 + + def __irshift__(self, pos): + try: + self._value >>= int(pos) + except OverflowError: + if self._value >= 0: + return 0 + else: + return -1 + return self + + def __lshift__(self, pos): + try: + return self.__class__(self._value << int(pos)) + except OverflowError: + raise ValueError("Incorrect shift count") + + def __ilshift__(self, pos): + try: + self._value <<= int(pos) + except OverflowError: + raise ValueError("Incorrect shift count") + return self + + def get_bit(self, n): + if self._value < 0: + raise ValueError("no bit representation for negative values") + try: + try: + result = (self._value >> n._value) & 1 + if n._value < 0: + raise ValueError("negative bit count") + except AttributeError: + result = (self._value >> n) & 1 + if n < 0: + raise ValueError("negative bit count") + except OverflowError: + result = 0 + return result + + # Extra + def is_odd(self): + return (self._value & 1) == 1 + + def is_even(self): + return (self._value & 1) == 0 + + def size_in_bits(self): + + if self._value < 0: + raise ValueError("Conversion only valid for non-negative numbers") + + if self._value == 0: + return 1 + + return self._value.bit_length() + + def size_in_bytes(self): + return (self.size_in_bits() - 1) // 8 + 1 + + def is_perfect_square(self): + if self._value < 0: + return False + if self._value in (0, 1): + return True + + x = self._value // 2 + square_x = x ** 2 + + while square_x > self._value: + x = (square_x + self._value) // (2 * x) + square_x = x ** 2 + + return self._value == x ** 2 + + def fail_if_divisible_by(self, small_prime): + if (self._value % int(small_prime)) == 0: + raise ValueError("Value is composite") + + def multiply_accumulate(self, a, b): + self._value += int(a) * int(b) + return self + + def set(self, source): + self._value = int(source) + + def inplace_inverse(self, modulus): + self._value = inverse(self._value, int(modulus)) + return self + + def inverse(self, modulus): + result = self.__class__(self) + result.inplace_inverse(modulus) + return result + + def gcd(self, term): + return self.__class__(GCD(abs(self._value), abs(int(term)))) + + def lcm(self, term): + term = int(term) + if self._value == 0 or term == 0: + return self.__class__(0) + return self.__class__(abs((self._value * term) // self.gcd(term)._value)) + + @staticmethod + def jacobi_symbol(a, n): + a = int(a) + n = int(n) + + if n <= 0: + raise ValueError("n must be a positive integer") + + if (n & 1) == 0: + raise ValueError("n must be odd for the Jacobi symbol") + + # Step 1 + a = a % n + # Step 2 + if a == 1 or n == 1: + return 1 + # Step 3 + if a == 0: + return 0 + # Step 4 + e = 0 + a1 = a + while (a1 & 1) == 0: + a1 >>= 1 + e += 1 + # Step 5 + if (e & 1) == 0: + s = 1 + elif n % 8 in (1, 7): + s = 1 + else: + s = -1 + # Step 6 + if n % 4 == 3 and a1 % 4 == 3: + s = -s + # Step 7 + n1 = n % a1 + # Step 8 + return s * IntegerNative.jacobi_symbol(n1, a1) + + @staticmethod + def _mult_modulo_bytes(term1, term2, modulus): + if modulus < 0: + raise ValueError("Modulus must be positive") + if modulus == 0: + raise ZeroDivisionError("Modulus cannot be zero") + if (modulus & 1) == 0: + raise ValueError("Odd modulus is required") + + number_len = len(long_to_bytes(modulus)) + return long_to_bytes((term1 * term2) % modulus, number_len) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_IntegerNative.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_IntegerNative.pyi new file mode 100644 index 000000000..3f65a39eb --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_IntegerNative.pyi @@ -0,0 +1,3 @@ +from ._IntegerBase import IntegerBase +class IntegerNative(IntegerBase): + pass diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_modexp.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_modexp.abi3.so new file mode 100755 index 000000000..d11de72ab Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Math/_modexp.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/DH.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/DH.py new file mode 100644 index 000000000..fb21daabc --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/DH.py @@ -0,0 +1,101 @@ +from Crypto.Util.number import long_to_bytes +from Crypto.PublicKey.ECC import EccKey + + +def _compute_ecdh(key_priv, key_pub): + # See Section 5.7.1.2 in NIST SP 800-56Ar3 + pointP = key_pub.pointQ * key_priv.d + if pointP.is_point_at_infinity(): + raise ValueError("Invalid ECDH point") + z = long_to_bytes(pointP.x, pointP.size_in_bytes()) + return z + + +def key_agreement(**kwargs): + """Perform a Diffie-Hellman key agreement. + + Keywords: + kdf (callable): + A key derivation function that accepts ``bytes`` as input and returns + ``bytes``. + static_priv (EccKey): + The local static private key. Optional. + static_pub (EccKey): + The static public key that belongs to the peer. Optional. + eph_priv (EccKey): + The local ephemeral private key, generated for this session. Optional. + eph_pub (EccKey): + The ephemeral public key, received from the peer for this session. Optional. + + At least two keys must be passed, of which one is a private key and one + a public key. + + Returns (bytes): + The derived secret key material. + """ + + static_priv = kwargs.get('static_priv', None) + static_pub = kwargs.get('static_pub', None) + eph_priv = kwargs.get('eph_priv', None) + eph_pub = kwargs.get('eph_pub', None) + kdf = kwargs.get('kdf', None) + + if kdf is None: + raise ValueError("'kdf' is mandatory") + + count_priv = 0 + count_pub = 0 + curve = None + + def check_curve(curve, key, name, private): + if not isinstance(key, EccKey): + raise TypeError("'%s' must be an ECC key" % name) + if private and not key.has_private(): + raise TypeError("'%s' must be a private ECC key" % name) + if curve is None: + curve = key.curve + elif curve != key.curve: + raise TypeError("'%s' is defined on an incompatible curve" % name) + return curve + + if static_priv is not None: + curve = check_curve(curve, static_priv, 'static_priv', True) + count_priv += 1 + + if static_pub is not None: + curve = check_curve(curve, static_pub, 'static_pub', False) + count_pub += 1 + + if eph_priv is not None: + curve = check_curve(curve, eph_priv, 'eph_priv', True) + count_priv += 1 + + if eph_pub is not None: + curve = check_curve(curve, eph_pub, 'eph_pub', False) + count_pub += 1 + + if (count_priv + count_pub) < 2 or count_priv == 0 or count_pub == 0: + raise ValueError("Too few keys for the ECDH key agreement") + + Zs = b'' + Ze = b'' + + if static_priv and static_pub: + # C(*, 2s) + Zs = _compute_ecdh(static_priv, static_pub) + + if eph_priv and eph_pub: + # C(2e, 0s) or C(2e, 2s) + if bool(static_priv) != bool(static_pub): + raise ValueError("DH mode C(2e, 1s) is not supported") + Ze = _compute_ecdh(eph_priv, eph_pub) + elif eph_priv and static_pub: + # C(1e, 2s) or C(1e, 1s) + Ze = _compute_ecdh(eph_priv, static_pub) + elif eph_pub and static_priv: + # C(1e, 2s) or C(1e, 1s) + Ze = _compute_ecdh(static_priv, eph_pub) + + Z = Ze + Zs + + return kdf(Z) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/DH.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/DH.pyi new file mode 100644 index 000000000..8f8875963 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/DH.pyi @@ -0,0 +1,15 @@ +from typing import TypedDict, Callable, TypeVar, Generic +from typing_extensions import Unpack, NotRequired + +from Crypto.PublicKey.ECC import EccKey + +T = TypeVar('T') + +class RequestParams(TypedDict, Generic[T]): + kdf: Callable[[bytes|bytearray|memoryview], T] + static_priv: NotRequired[EccKey] + static_pub: NotRequired[EccKey] + eph_priv: NotRequired[EccKey] + eph_pub: NotRequired[EccKey] + +def key_agreement(**kwargs: Unpack[RequestParams[T]]) -> T: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/KDF.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/KDF.py new file mode 100644 index 000000000..afc581eb9 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/KDF.py @@ -0,0 +1,642 @@ +# coding=utf-8 +# +# KDF.py : a collection of Key Derivation Functions +# +# Part of the Python Cryptography Toolkit +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +import re +import struct +from functools import reduce + +from Crypto.Util.py3compat import (tobytes, bord, _copy_bytes, iter_range, + tostr, bchr, bstr) + +from Crypto.Hash import SHA1, SHA256, HMAC, CMAC, BLAKE2s +from Crypto.Util.strxor import strxor +from Crypto.Random import get_random_bytes +from Crypto.Util.number import size as bit_size, long_to_bytes, bytes_to_long + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + create_string_buffer, + get_raw_buffer, c_size_t) + +_raw_salsa20_lib = load_pycryptodome_raw_lib("Crypto.Cipher._Salsa20", + """ + int Salsa20_8_core(const uint8_t *x, const uint8_t *y, + uint8_t *out); + """) + +_raw_scrypt_lib = load_pycryptodome_raw_lib("Crypto.Protocol._scrypt", + """ + typedef int (core_t)(const uint8_t [64], const uint8_t [64], uint8_t [64]); + int scryptROMix(const uint8_t *data_in, uint8_t *data_out, + size_t data_len, unsigned N, core_t *core); + """) + + +def PBKDF1(password, salt, dkLen, count=1000, hashAlgo=None): + """Derive one key from a password (or passphrase). + + This function performs key derivation according to an old version of + the PKCS#5 standard (v1.5) or `RFC2898 + `_. + + Args: + password (string): + The secret password to generate the key from. + salt (byte string): + An 8 byte string to use for better protection from dictionary attacks. + This value does not need to be kept secret, but it should be randomly + chosen for each derivation. + dkLen (integer): + The length of the desired key. The default is 16 bytes, suitable for + instance for :mod:`Crypto.Cipher.AES`. + count (integer): + The number of iterations to carry out. The recommendation is 1000 or + more. + hashAlgo (module): + The hash algorithm to use, as a module or an object from the :mod:`Crypto.Hash` package. + The digest length must be no shorter than ``dkLen``. + The default algorithm is :mod:`Crypto.Hash.SHA1`. + + Return: + A byte string of length ``dkLen`` that can be used as key. + """ + + if not hashAlgo: + hashAlgo = SHA1 + password = tobytes(password) + pHash = hashAlgo.new(password+salt) + digest = pHash.digest_size + if dkLen > digest: + raise TypeError("Selected hash algorithm has a too short digest (%d bytes)." % digest) + if len(salt) != 8: + raise ValueError("Salt is not 8 bytes long (%d bytes instead)." % len(salt)) + for i in iter_range(count-1): + pHash = pHash.new(pHash.digest()) + return pHash.digest()[:dkLen] + + +def PBKDF2(password, salt, dkLen=16, count=1000, prf=None, hmac_hash_module=None): + """Derive one or more keys from a password (or passphrase). + + This function performs key derivation according to the PKCS#5 standard (v2.0). + + Args: + password (string or byte string): + The secret password to generate the key from. + + Strings will be encoded as ISO 8859-1 (also known as Latin-1), + which does not allow any characters with codepoints > 255. + salt (string or byte string): + A (byte) string to use for better protection from dictionary attacks. + This value does not need to be kept secret, but it should be randomly + chosen for each derivation. It is recommended to use at least 16 bytes. + + Strings will be encoded as ISO 8859-1 (also known as Latin-1), + which does not allow any characters with codepoints > 255. + dkLen (integer): + The cumulative length of the keys to produce. + + Due to a flaw in the PBKDF2 design, you should not request more bytes + than the ``prf`` can output. For instance, ``dkLen`` should not exceed + 20 bytes in combination with ``HMAC-SHA1``. + count (integer): + The number of iterations to carry out. The higher the value, the slower + and the more secure the function becomes. + + You should find the maximum number of iterations that keeps the + key derivation still acceptable on the slowest hardware you must support. + + Although the default value is 1000, **it is recommended to use at least + 1000000 (1 million) iterations**. + prf (callable): + A pseudorandom function. It must be a function that returns a + pseudorandom byte string from two parameters: a secret and a salt. + The slower the algorithm, the more secure the derivation function. + If not specified, **HMAC-SHA1** is used. + hmac_hash_module (module): + A module from ``Crypto.Hash`` implementing a Merkle-Damgard cryptographic + hash, which PBKDF2 must use in combination with HMAC. + This parameter is mutually exclusive with ``prf``. + + Return: + A byte string of length ``dkLen`` that can be used as key material. + If you want multiple keys, just break up this string into segments of the desired length. + """ + + password = tobytes(password) + salt = tobytes(salt) + + if prf and hmac_hash_module: + raise ValueError("'prf' and 'hmac_hash_module' are mutually exlusive") + + if prf is None and hmac_hash_module is None: + hmac_hash_module = SHA1 + + if prf or not hasattr(hmac_hash_module, "_pbkdf2_hmac_assist"): + # Generic (and slow) implementation + + if prf is None: + prf = lambda p,s: HMAC.new(p, s, hmac_hash_module).digest() + + def link(s): + s[0], s[1] = s[1], prf(password, s[1]) + return s[0] + + key = b'' + i = 1 + while len(key) < dkLen: + s = [ prf(password, salt + struct.pack(">I", i)) ] * 2 + key += reduce(strxor, (link(s) for j in range(count)) ) + i += 1 + + else: + # Optimized implementation + key = b'' + i = 1 + while len(key)I", i)).digest() + key += base._pbkdf2_hmac_assist(first_digest, count) + i += 1 + + return key[:dkLen] + + +class _S2V(object): + """String-to-vector PRF as defined in `RFC5297`_. + + This class implements a pseudorandom function family + based on CMAC that takes as input a vector of strings. + + .. _RFC5297: http://tools.ietf.org/html/rfc5297 + """ + + def __init__(self, key, ciphermod, cipher_params=None): + """Initialize the S2V PRF. + + :Parameters: + key : byte string + A secret that can be used as key for CMACs + based on ciphers from ``ciphermod``. + ciphermod : module + A block cipher module from `Crypto.Cipher`. + cipher_params : dictionary + A set of extra parameters to use to create a cipher instance. + """ + + self._key = _copy_bytes(None, None, key) + self._ciphermod = ciphermod + self._last_string = self._cache = b'\x00' * ciphermod.block_size + + # Max number of update() call we can process + self._n_updates = ciphermod.block_size * 8 - 1 + + if cipher_params is None: + self._cipher_params = {} + else: + self._cipher_params = dict(cipher_params) + + @staticmethod + def new(key, ciphermod): + """Create a new S2V PRF. + + :Parameters: + key : byte string + A secret that can be used as key for CMACs + based on ciphers from ``ciphermod``. + ciphermod : module + A block cipher module from `Crypto.Cipher`. + """ + return _S2V(key, ciphermod) + + def _double(self, bs): + doubled = bytes_to_long(bs)<<1 + if bord(bs[0]) & 0x80: + doubled ^= 0x87 + return long_to_bytes(doubled, len(bs))[-len(bs):] + + def update(self, item): + """Pass the next component of the vector. + + The maximum number of components you can pass is equal to the block + length of the cipher (in bits) minus 1. + + :Parameters: + item : byte string + The next component of the vector. + :Raise TypeError: when the limit on the number of components has been reached. + """ + + if self._n_updates == 0: + raise TypeError("Too many components passed to S2V") + self._n_updates -= 1 + + mac = CMAC.new(self._key, + msg=self._last_string, + ciphermod=self._ciphermod, + cipher_params=self._cipher_params) + self._cache = strxor(self._double(self._cache), mac.digest()) + self._last_string = _copy_bytes(None, None, item) + + def derive(self): + """"Derive a secret from the vector of components. + + :Return: a byte string, as long as the block length of the cipher. + """ + + if len(self._last_string) >= 16: + # xorend + final = self._last_string[:-16] + strxor(self._last_string[-16:], self._cache) + else: + # zero-pad & xor + padded = (self._last_string + b'\x80' + b'\x00' * 15)[:16] + final = strxor(padded, self._double(self._cache)) + mac = CMAC.new(self._key, + msg=final, + ciphermod=self._ciphermod, + cipher_params=self._cipher_params) + return mac.digest() + + +def HKDF(master, key_len, salt, hashmod, num_keys=1, context=None): + """Derive one or more keys from a master secret using + the HMAC-based KDF defined in RFC5869_. + + Args: + master (byte string): + The unguessable value used by the KDF to generate the other keys. + It must be a high-entropy secret, though not necessarily uniform. + It must not be a password. + key_len (integer): + The length in bytes of every derived key. + salt (byte string): + A non-secret, reusable value that strengthens the randomness + extraction step. + Ideally, it is as long as the digest size of the chosen hash. + If empty, a string of zeroes in used. + hashmod (module): + A cryptographic hash algorithm from :mod:`Crypto.Hash`. + :mod:`Crypto.Hash.SHA512` is a good choice. + num_keys (integer): + The number of keys to derive. Every key is :data:`key_len` bytes long. + The maximum cumulative length of all keys is + 255 times the digest size. + context (byte string): + Optional identifier describing what the keys are used for. + + Return: + A byte string or a tuple of byte strings. + + .. _RFC5869: http://tools.ietf.org/html/rfc5869 + """ + + output_len = key_len * num_keys + if output_len > (255 * hashmod.digest_size): + raise ValueError("Too much secret data to derive") + if not salt: + salt = b'\x00' * hashmod.digest_size + if context is None: + context = b"" + + # Step 1: extract + hmac = HMAC.new(salt, master, digestmod=hashmod) + prk = hmac.digest() + + # Step 2: expand + t = [ b"" ] + n = 1 + tlen = 0 + while tlen < output_len: + hmac = HMAC.new(prk, t[-1] + context + struct.pack('B', n), digestmod=hashmod) + t.append(hmac.digest()) + tlen += hashmod.digest_size + n += 1 + derived_output = b"".join(t) + if num_keys == 1: + return derived_output[:key_len] + kol = [derived_output[idx:idx + key_len] + for idx in iter_range(0, output_len, key_len)] + return list(kol[:num_keys]) + + + +def scrypt(password, salt, key_len, N, r, p, num_keys=1): + """Derive one or more keys from a passphrase. + + Args: + password (string): + The secret pass phrase to generate the keys from. + salt (string): + A string to use for better protection from dictionary attacks. + This value does not need to be kept secret, + but it should be randomly chosen for each derivation. + It is recommended to be at least 16 bytes long. + key_len (integer): + The length in bytes of each derived key. + N (integer): + CPU/Memory cost parameter. It must be a power of 2 and less + than :math:`2^{32}`. + r (integer): + Block size parameter. + p (integer): + Parallelization parameter. + It must be no greater than :math:`(2^{32}-1)/(4r)`. + num_keys (integer): + The number of keys to derive. Every key is :data:`key_len` bytes long. + By default, only 1 key is generated. + The maximum cumulative length of all keys is :math:`(2^{32}-1)*32` + (that is, 128TB). + + A good choice of parameters *(N, r , p)* was suggested + by Colin Percival in his `presentation in 2009`__: + + - *( 2¹⁴, 8, 1 )* for interactive logins (≤100ms) + - *( 2²⁰, 8, 1 )* for file encryption (≤5s) + + Return: + A byte string or a tuple of byte strings. + + .. __: http://www.tarsnap.com/scrypt/scrypt-slides.pdf + """ + + if 2 ** (bit_size(N) - 1) != N: + raise ValueError("N must be a power of 2") + if N >= 2 ** 32: + raise ValueError("N is too big") + if p > ((2 ** 32 - 1) * 32) // (128 * r): + raise ValueError("p or r are too big") + + prf_hmac_sha256 = lambda p, s: HMAC.new(p, s, SHA256).digest() + + stage_1 = PBKDF2(password, salt, p * 128 * r, 1, prf=prf_hmac_sha256) + + scryptROMix = _raw_scrypt_lib.scryptROMix + core = _raw_salsa20_lib.Salsa20_8_core + + # Parallelize into p flows + data_out = [] + for flow in iter_range(p): + idx = flow * 128 * r + buffer_out = create_string_buffer(128 * r) + result = scryptROMix(stage_1[idx : idx + 128 * r], + buffer_out, + c_size_t(128 * r), + N, + core) + if result: + raise ValueError("Error %X while running scrypt" % result) + data_out += [ get_raw_buffer(buffer_out) ] + + dk = PBKDF2(password, + b"".join(data_out), + key_len * num_keys, 1, + prf=prf_hmac_sha256) + + if num_keys == 1: + return dk + + kol = [dk[idx:idx + key_len] + for idx in iter_range(0, key_len * num_keys, key_len)] + return kol + + +def _bcrypt_encode(data): + s = "./ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" + + bits = [] + for c in data: + bits_c = bin(bord(c))[2:].zfill(8) + bits.append(bstr(bits_c)) + bits = b"".join(bits) + + bits6 = [ bits[idx:idx+6] for idx in range(0, len(bits), 6) ] + + result = [] + for g in bits6[:-1]: + idx = int(g, 2) + result.append(s[idx]) + + g = bits6[-1] + idx = int(g, 2) << (6 - len(g)) + result.append(s[idx]) + result = "".join(result) + + return tobytes(result) + + +def _bcrypt_decode(data): + s = "./ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" + + bits = [] + for c in tostr(data): + idx = s.find(c) + bits6 = bin(idx)[2:].zfill(6) + bits.append(bits6) + bits = "".join(bits) + + modulo4 = len(data) % 4 + if modulo4 == 1: + raise ValueError("Incorrect length") + elif modulo4 == 2: + bits = bits[:-4] + elif modulo4 == 3: + bits = bits[:-2] + + bits8 = [ bits[idx:idx+8] for idx in range(0, len(bits), 8) ] + + result = [] + for g in bits8: + result.append(bchr(int(g, 2))) + result = b"".join(result) + + return result + + +def _bcrypt_hash(password, cost, salt, constant, invert): + from Crypto.Cipher import _EKSBlowfish + + if len(password) > 72: + raise ValueError("The password is too long. It must be 72 bytes at most.") + + if not (4 <= cost <= 31): + raise ValueError("bcrypt cost factor must be in the range 4..31") + + cipher = _EKSBlowfish.new(password, _EKSBlowfish.MODE_ECB, salt, cost, invert) + ctext = constant + for _ in range(64): + ctext = cipher.encrypt(ctext) + return ctext + + +def bcrypt(password, cost, salt=None): + """Hash a password into a key, using the OpenBSD bcrypt protocol. + + Args: + password (byte string or string): + The secret password or pass phrase. + It must be at most 72 bytes long. + It must not contain the zero byte. + Unicode strings will be encoded as UTF-8. + cost (integer): + The exponential factor that makes it slower to compute the hash. + It must be in the range 4 to 31. + A value of at least 12 is recommended. + salt (byte string): + Optional. Random byte string to thwarts dictionary and rainbow table + attacks. It must be 16 bytes long. + If not passed, a random value is generated. + + Return (byte string): + The bcrypt hash + + Raises: + ValueError: if password is longer than 72 bytes or if it contains the zero byte + + """ + + password = tobytes(password, "utf-8") + + if password.find(bchr(0)[0]) != -1: + raise ValueError("The password contains the zero byte") + + if len(password) < 72: + password += b"\x00" + + if salt is None: + salt = get_random_bytes(16) + if len(salt) != 16: + raise ValueError("bcrypt salt must be 16 bytes long") + + ctext = _bcrypt_hash(password, cost, salt, b"OrpheanBeholderScryDoubt", True) + + cost_enc = b"$" + bstr(str(cost).zfill(2)) + salt_enc = b"$" + _bcrypt_encode(salt) + hash_enc = _bcrypt_encode(ctext[:-1]) # only use 23 bytes, not 24 + return b"$2a" + cost_enc + salt_enc + hash_enc + + +def bcrypt_check(password, bcrypt_hash): + """Verify if the provided password matches the given bcrypt hash. + + Args: + password (byte string or string): + The secret password or pass phrase to test. + It must be at most 72 bytes long. + It must not contain the zero byte. + Unicode strings will be encoded as UTF-8. + bcrypt_hash (byte string, bytearray): + The reference bcrypt hash the password needs to be checked against. + + Raises: + ValueError: if the password does not match + """ + + bcrypt_hash = tobytes(bcrypt_hash) + + if len(bcrypt_hash) != 60: + raise ValueError("Incorrect length of the bcrypt hash: %d bytes instead of 60" % len(bcrypt_hash)) + + if bcrypt_hash[:4] != b'$2a$': + raise ValueError("Unsupported prefix") + + p = re.compile(br'\$2a\$([0-9][0-9])\$([A-Za-z0-9./]{22,22})([A-Za-z0-9./]{31,31})') + r = p.match(bcrypt_hash) + if not r: + raise ValueError("Incorrect bcrypt hash format") + + cost = int(r.group(1)) + if not (4 <= cost <= 31): + raise ValueError("Incorrect cost") + + salt = _bcrypt_decode(r.group(2)) + + bcrypt_hash2 = bcrypt(password, cost, salt) + + secret = get_random_bytes(16) + + mac1 = BLAKE2s.new(digest_bits=160, key=secret, data=bcrypt_hash).digest() + mac2 = BLAKE2s.new(digest_bits=160, key=secret, data=bcrypt_hash2).digest() + if mac1 != mac2: + raise ValueError("Incorrect bcrypt hash") + + +def SP800_108_Counter(master, key_len, prf, num_keys=None, label=b'', context=b''): + """Derive one or more keys from a master secret using + a pseudorandom function in Counter Mode, as specified in + `NIST SP 800-108r1 `_. + + Args: + master (byte string): + The secret value used by the KDF to derive the other keys. + It must not be a password. + The length on the secret must be consistent with the input expected by + the :data:`prf` function. + key_len (integer): + The length in bytes of each derived key. + prf (function): + A pseudorandom function that takes two byte strings as parameters: + the secret and an input. It returns another byte string. + num_keys (integer): + The number of keys to derive. Every key is :data:`key_len` bytes long. + By default, only 1 key is derived. + label (byte string): + Optional description of the purpose of the derived keys. + It must not contain zero bytes. + context (byte string): + Optional information pertaining to + the protocol that uses the keys, such as the identity of the + participants, nonces, session IDs, etc. + It must not contain zero bytes. + + Return: + - a byte string (if ``num_keys`` is not specified), or + - a tuple of byte strings (if ``num_key`` is specified). + """ + + if num_keys is None: + num_keys = 1 + + if label.find(b'\x00') != -1: + raise ValueError("Null byte found in label") + + if context.find(b'\x00') != -1: + raise ValueError("Null byte found in context") + + key_len_enc = long_to_bytes(key_len * num_keys * 8, 4) + output_len = key_len * num_keys + + i = 1 + dk = b"" + while len(dk) < output_len: + info = long_to_bytes(i, 4) + label + b'\x00' + context + key_len_enc + dk += prf(master, info) + i += 1 + if i > 0xFFFFFFFF: + raise ValueError("Overflow in SP800 108 counter") + + if num_keys == 1: + return dk[:key_len] + else: + kol = [dk[idx:idx + key_len] + for idx in iter_range(0, output_len, key_len)] + return kol diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/KDF.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/KDF.pyi new file mode 100644 index 000000000..745f01974 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/KDF.pyi @@ -0,0 +1,42 @@ +from types import ModuleType +from typing import Optional, Callable, Tuple, Union, Dict, Any, overload +from typing_extensions import Literal + +Buffer=bytes|bytearray|memoryview + +RNG = Callable[[int], bytes] +PRF = Callable[[bytes, bytes], bytes] + +def PBKDF1(password: str, salt: bytes, dkLen: int, count: Optional[int]=1000, hashAlgo: Optional[ModuleType]=None) -> bytes: ... +def PBKDF2(password: str, salt: bytes, dkLen: Optional[int]=16, count: Optional[int]=1000, prf: Optional[RNG]=None, hmac_hash_module: Optional[ModuleType]=None) -> bytes: ... + +class _S2V(object): + def __init__(self, key: bytes, ciphermod: ModuleType, cipher_params: Optional[Dict[Any, Any]]=None) -> None: ... + + @staticmethod + def new(key: bytes, ciphermod: ModuleType) -> None: ... + def update(self, item: bytes) -> None: ... + def derive(self) -> bytes: ... + +def HKDF(master: bytes, key_len: int, salt: bytes, hashmod: ModuleType, num_keys: Optional[int]=1, context: Optional[bytes]=None) -> Union[bytes, Tuple[bytes, ...]]: ... + +def scrypt(password: str, salt: str, key_len: int, N: int, r: int, p: int, num_keys: Optional[int]=1) -> Union[bytes, Tuple[bytes, ...]]: ... + +def _bcrypt_decode(data: bytes) -> bytes: ... +def _bcrypt_hash(password:bytes , cost: int, salt: bytes, constant:bytes, invert:bool) -> bytes: ... +def bcrypt(password: Union[bytes, str], cost: int, salt: Optional[bytes]=None) -> bytes: ... +def bcrypt_check(password: Union[bytes, str], bcrypt_hash: Union[bytes, bytearray, str]) -> None: ... + +@overload +def SP800_108_Counter(master: Buffer, + key_len: int, + prf: PRF, + num_keys: Literal[None] = None, + label: Buffer = b'', context: Buffer = b'') -> bytes: ... + +@overload +def SP800_108_Counter(master: Buffer, + key_len: int, + prf: PRF, + num_keys: int, + label: Buffer = b'', context: Buffer = b'') -> Tuple[bytes]: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/SecretSharing.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/SecretSharing.py new file mode 100644 index 000000000..a757e7cb9 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/SecretSharing.py @@ -0,0 +1,278 @@ +# +# SecretSharing.py : distribute a secret amongst a group of participants +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util.py3compat import is_native_int +from Crypto.Util import number +from Crypto.Util.number import long_to_bytes, bytes_to_long +from Crypto.Random import get_random_bytes as rng + + +def _mult_gf2(f1, f2): + """Multiply two polynomials in GF(2)""" + + # Ensure f2 is the smallest + if f2 > f1: + f1, f2 = f2, f1 + z = 0 + while f2: + if f2 & 1: + z ^= f1 + f1 <<= 1 + f2 >>= 1 + return z + + +def _div_gf2(a, b): + """ + Compute division of polynomials over GF(2). + Given a and b, it finds two polynomials q and r such that: + + a = b*q + r with deg(r)= d: + s = 1 << (deg(r) - d) + q ^= s + r ^= _mult_gf2(b, s) + return (q, r) + + +class _Element(object): + """Element of GF(2^128) field""" + + # The irreducible polynomial defining this field is 1+x+x^2+x^7+x^128 + irr_poly = 1 + 2 + 4 + 128 + 2 ** 128 + + def __init__(self, encoded_value): + """Initialize the element to a certain value. + + The value passed as parameter is internally encoded as + a 128-bit integer, where each bit represents a polynomial + coefficient. The LSB is the constant coefficient. + """ + + if is_native_int(encoded_value): + self._value = encoded_value + elif len(encoded_value) == 16: + self._value = bytes_to_long(encoded_value) + else: + raise ValueError("The encoded value must be an integer or a 16 byte string") + + def __eq__(self, other): + return self._value == other._value + + def __int__(self): + """Return the field element, encoded as a 128-bit integer.""" + return self._value + + def encode(self): + """Return the field element, encoded as a 16 byte string.""" + return long_to_bytes(self._value, 16) + + def __mul__(self, factor): + + f1 = self._value + f2 = factor._value + + # Make sure that f2 is the smallest, to speed up the loop + if f2 > f1: + f1, f2 = f2, f1 + + if self.irr_poly in (f1, f2): + return _Element(0) + + mask1 = 2 ** 128 + v, z = f1, 0 + while f2: + # if f2 ^ 1: z ^= v + mask2 = int(bin(f2 & 1)[2:] * 128, base=2) + z = (mask2 & (z ^ v)) | ((mask1 - mask2 - 1) & z) + v <<= 1 + # if v & mask1: v ^= self.irr_poly + mask3 = int(bin((v >> 128) & 1)[2:] * 128, base=2) + v = (mask3 & (v ^ self.irr_poly)) | ((mask1 - mask3 - 1) & v) + f2 >>= 1 + return _Element(z) + + def __add__(self, term): + return _Element(self._value ^ term._value) + + def inverse(self): + """Return the inverse of this element in GF(2^128).""" + + # We use the Extended GCD algorithm + # http://en.wikipedia.org/wiki/Polynomial_greatest_common_divisor + + if self._value == 0: + raise ValueError("Inversion of zero") + + r0, r1 = self._value, self.irr_poly + s0, s1 = 1, 0 + while r1 > 0: + q = _div_gf2(r0, r1)[0] + r0, r1 = r1, r0 ^ _mult_gf2(q, r1) + s0, s1 = s1, s0 ^ _mult_gf2(q, s1) + return _Element(s0) + + def __pow__(self, exponent): + result = _Element(self._value) + for _ in range(exponent - 1): + result = result * self + return result + + +class Shamir(object): + """Shamir's secret sharing scheme. + + A secret is split into ``n`` shares, and it is sufficient to collect + ``k`` of them to reconstruct the secret. + """ + + @staticmethod + def split(k, n, secret, ssss=False): + """Split a secret into ``n`` shares. + + The secret can be reconstructed later using just ``k`` shares + out of the original ``n``. + Each share must be kept confidential to the person it was + assigned to. + + Each share is associated to an index (starting from 1). + + Args: + k (integer): + The sufficient number of shares to reconstruct the secret (``k < n``). + n (integer): + The number of shares that this method will create. + secret (byte string): + A byte string of 16 bytes (e.g. the AES 128 key). + ssss (bool): + If ``True``, the shares can be used with the ``ssss`` utility. + Default: ``False``. + + Return (tuples): + ``n`` tuples. A tuple is meant for each participant and it contains two items: + + 1. the unique index (an integer) + 2. the share (a byte string, 16 bytes) + """ + + # + # We create a polynomial with random coefficients in GF(2^128): + # + # p(x) = \sum_{i=0}^{k-1} c_i * x^i + # + # c_0 is the encoded secret + # + + coeffs = [_Element(rng(16)) for i in range(k - 1)] + coeffs.append(_Element(secret)) + + # Each share is y_i = p(x_i) where x_i is the public index + # associated to each of the n users. + + def make_share(user, coeffs, ssss): + idx = _Element(user) + share = _Element(0) + for coeff in coeffs: + share = idx * share + coeff + if ssss: + share += _Element(user) ** len(coeffs) + return share.encode() + + return [(i, make_share(i, coeffs, ssss)) for i in range(1, n + 1)] + + @staticmethod + def combine(shares, ssss=False): + """Recombine a secret, if enough shares are presented. + + Args: + shares (tuples): + The *k* tuples, each containin the index (an integer) and + the share (a byte string, 16 bytes long) that were assigned to + a participant. + ssss (bool): + If ``True``, the shares were produced by the ``ssss`` utility. + Default: ``False``. + + Return: + The original secret, as a byte string (16 bytes long). + """ + + # + # Given k points (x,y), the interpolation polynomial of degree k-1 is: + # + # L(x) = \sum_{j=0}^{k-1} y_i * l_j(x) + # + # where: + # + # l_j(x) = \prod_{ \overset{0 \le m \le k-1}{m \ne j} } + # \frac{x - x_m}{x_j - x_m} + # + # However, in this case we are purely interested in the constant + # coefficient of L(x). + # + + k = len(shares) + + gf_shares = [] + for x in shares: + idx = _Element(x[0]) + value = _Element(x[1]) + if any(y[0] == idx for y in gf_shares): + raise ValueError("Duplicate share") + if ssss: + value += idx ** k + gf_shares.append((idx, value)) + + result = _Element(0) + for j in range(k): + x_j, y_j = gf_shares[j] + + numerator = _Element(1) + denominator = _Element(1) + + for m in range(k): + x_m = gf_shares[m][0] + if m != j: + numerator *= x_m + denominator *= x_j + x_m + result += y_j * numerator * denominator.inverse() + return result.encode() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/SecretSharing.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/SecretSharing.pyi new file mode 100644 index 000000000..5952c9938 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/SecretSharing.pyi @@ -0,0 +1,22 @@ +from typing import Union, List, Tuple, Optional + +def _mult_gf2(f1: int, f2: int) -> int : ... +def _div_gf2(a: int, b: int) -> int : ... + +class _Element(object): + irr_poly: int + def __init__(self, encoded_value: Union[int, bytes]) -> None: ... + def __eq__(self, other) -> bool: ... + def __int__(self) -> int: ... + def encode(self) -> bytes: ... + def __mul__(self, factor: int) -> _Element: ... + def __add__(self, term: _Element) -> _Element: ... + def inverse(self) -> _Element: ... + def __pow__(self, exponent) -> _Element: ... + +class Shamir(object): + @staticmethod + def split(k: int, n: int, secret: bytes, ssss: Optional[bool]) -> List[Tuple[int, bytes]]: ... + @staticmethod + def combine(shares: List[Tuple[int, bytes]], ssss: Optional[bool]) -> bytes: ... + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/__init__.py new file mode 100644 index 000000000..76e22bf64 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/__init__.py @@ -0,0 +1,31 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +__all__ = ['KDF', 'SecretSharing', 'DH'] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/__init__.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/__init__.pyi new file mode 100644 index 000000000..377ed901e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/__init__.pyi @@ -0,0 +1 @@ +__all__ = ['KDF.pyi', 'SecretSharing.pyi'] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/_scrypt.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/_scrypt.abi3.so new file mode 100755 index 000000000..baf83a847 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Protocol/_scrypt.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/DSA.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/DSA.py new file mode 100644 index 000000000..4c7f47b68 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/DSA.py @@ -0,0 +1,682 @@ +# -*- coding: utf-8 -*- +# +# PublicKey/DSA.py : DSA signature primitive +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__all__ = ['generate', 'construct', 'DsaKey', 'import_key' ] + +import binascii +import struct +import itertools + +from Crypto.Util.py3compat import bchr, bord, tobytes, tostr, iter_range + +from Crypto import Random +from Crypto.IO import PKCS8, PEM +from Crypto.Hash import SHA256 +from Crypto.Util.asn1 import ( + DerObject, DerSequence, + DerInteger, DerObjectId, + DerBitString, + ) + +from Crypto.Math.Numbers import Integer +from Crypto.Math.Primality import (test_probable_prime, COMPOSITE, + PROBABLY_PRIME) + +from Crypto.PublicKey import (_expand_subject_public_key_info, + _create_subject_public_key_info, + _extract_subject_public_key_info) + +# ; The following ASN.1 types are relevant for DSA +# +# SubjectPublicKeyInfo ::= SEQUENCE { +# algorithm AlgorithmIdentifier, +# subjectPublicKey BIT STRING +# } +# +# id-dsa ID ::= { iso(1) member-body(2) us(840) x9-57(10040) x9cm(4) 1 } +# +# ; See RFC3279 +# Dss-Parms ::= SEQUENCE { +# p INTEGER, +# q INTEGER, +# g INTEGER +# } +# +# DSAPublicKey ::= INTEGER +# +# DSSPrivatKey_OpenSSL ::= SEQUENCE +# version INTEGER, +# p INTEGER, +# q INTEGER, +# g INTEGER, +# y INTEGER, +# x INTEGER +# } +# + +class DsaKey(object): + r"""Class defining an actual DSA key. + Do not instantiate directly. + Use :func:`generate`, :func:`construct` or :func:`import_key` instead. + + :ivar p: DSA modulus + :vartype p: integer + + :ivar q: Order of the subgroup + :vartype q: integer + + :ivar g: Generator + :vartype g: integer + + :ivar y: Public key + :vartype y: integer + + :ivar x: Private key + :vartype x: integer + + :undocumented: exportKey, publickey + """ + + _keydata = ['y', 'g', 'p', 'q', 'x'] + + def __init__(self, key_dict): + input_set = set(key_dict.keys()) + public_set = set(('y' , 'g', 'p', 'q')) + if not public_set.issubset(input_set): + raise ValueError("Some DSA components are missing = %s" % + str(public_set - input_set)) + extra_set = input_set - public_set + if extra_set and extra_set != set(('x',)): + raise ValueError("Unknown DSA components = %s" % + str(extra_set - set(('x',)))) + self._key = dict(key_dict) + + def _sign(self, m, k): + if not self.has_private(): + raise TypeError("DSA public key cannot be used for signing") + if not (1 < k < self.q): + raise ValueError("k is not between 2 and q-1") + + x, q, p, g = [self._key[comp] for comp in ['x', 'q', 'p', 'g']] + + blind_factor = Integer.random_range(min_inclusive=1, + max_exclusive=q) + inv_blind_k = (blind_factor * k).inverse(q) + blind_x = x * blind_factor + + r = pow(g, k, p) % q # r = (g**k mod p) mod q + s = (inv_blind_k * (blind_factor * m + blind_x * r)) % q + return map(int, (r, s)) + + def _verify(self, m, sig): + r, s = sig + y, q, p, g = [self._key[comp] for comp in ['y', 'q', 'p', 'g']] + if not (0 < r < q) or not (0 < s < q): + return False + w = Integer(s).inverse(q) + u1 = (w * m) % q + u2 = (w * r) % q + v = (pow(g, u1, p) * pow(y, u2, p) % p) % q + return v == r + + def has_private(self): + """Whether this is a DSA private key""" + + return 'x' in self._key + + def can_encrypt(self): # legacy + return False + + def can_sign(self): # legacy + return True + + def public_key(self): + """A matching DSA public key. + + Returns: + a new :class:`DsaKey` object + """ + + public_components = dict((k, self._key[k]) for k in ('y', 'g', 'p', 'q')) + return DsaKey(public_components) + + def __eq__(self, other): + if bool(self.has_private()) != bool(other.has_private()): + return False + + result = True + for comp in self._keydata: + result = result and (getattr(self._key, comp, None) == + getattr(other._key, comp, None)) + return result + + def __ne__(self, other): + return not self.__eq__(other) + + def __getstate__(self): + # DSA key is not pickable + from pickle import PicklingError + raise PicklingError + + def domain(self): + """The DSA domain parameters. + + Returns + tuple : (p,q,g) + """ + + return [int(self._key[comp]) for comp in ('p', 'q', 'g')] + + def __repr__(self): + attrs = [] + for k in self._keydata: + if k == 'p': + bits = Integer(self.p).size_in_bits() + attrs.append("p(%d)" % (bits,)) + elif hasattr(self, k): + attrs.append(k) + if self.has_private(): + attrs.append("private") + # PY3K: This is meant to be text, do not change to bytes (data) + return "<%s @0x%x %s>" % (self.__class__.__name__, id(self), ",".join(attrs)) + + def __getattr__(self, item): + try: + return int(self._key[item]) + except KeyError: + raise AttributeError(item) + + def export_key(self, format='PEM', pkcs8=None, passphrase=None, + protection=None, randfunc=None): + """Export this DSA key. + + Args: + format (string): + The encoding for the output: + + - *'PEM'* (default). ASCII as per `RFC1421`_/ `RFC1423`_. + - *'DER'*. Binary ASN.1 encoding. + - *'OpenSSH'*. ASCII one-liner as per `RFC4253`_. + Only suitable for public keys, not for private keys. + + passphrase (string): + *Private keys only*. The pass phrase to protect the output. + + pkcs8 (boolean): + *Private keys only*. If ``True`` (default), the key is encoded + with `PKCS#8`_. If ``False``, it is encoded in the custom + OpenSSL/OpenSSH container. + + protection (string): + *Only in combination with a pass phrase*. + The encryption scheme to use to protect the output. + + If :data:`pkcs8` takes value ``True``, this is the PKCS#8 + algorithm to use for deriving the secret and encrypting + the private DSA key. + For a complete list of algorithms, see :mod:`Crypto.IO.PKCS8`. + The default is *PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC*. + + If :data:`pkcs8` is ``False``, the obsolete PEM encryption scheme is + used. It is based on MD5 for key derivation, and Triple DES for + encryption. Parameter :data:`protection` is then ignored. + + The combination ``format='DER'`` and ``pkcs8=False`` is not allowed + if a passphrase is present. + + randfunc (callable): + A function that returns random bytes. + By default it is :func:`Crypto.Random.get_random_bytes`. + + Returns: + byte string : the encoded key + + Raises: + ValueError : when the format is unknown or when you try to encrypt a private + key with *DER* format and OpenSSL/OpenSSH. + + .. warning:: + If you don't provide a pass phrase, the private key will be + exported in the clear! + + .. _RFC1421: http://www.ietf.org/rfc/rfc1421.txt + .. _RFC1423: http://www.ietf.org/rfc/rfc1423.txt + .. _RFC4253: http://www.ietf.org/rfc/rfc4253.txt + .. _`PKCS#8`: http://www.ietf.org/rfc/rfc5208.txt + """ + + if passphrase is not None: + passphrase = tobytes(passphrase) + + if randfunc is None: + randfunc = Random.get_random_bytes + + if format == 'OpenSSH': + tup1 = [self._key[x].to_bytes() for x in ('p', 'q', 'g', 'y')] + + def func(x): + if (bord(x[0]) & 0x80): + return bchr(0) + x + else: + return x + + tup2 = [func(x) for x in tup1] + keyparts = [b'ssh-dss'] + tup2 + keystring = b''.join( + [struct.pack(">I", len(kp)) + kp for kp in keyparts] + ) + return b'ssh-dss ' + binascii.b2a_base64(keystring)[:-1] + + # DER format is always used, even in case of PEM, which simply + # encodes it into BASE64. + params = DerSequence([self.p, self.q, self.g]) + if self.has_private(): + if pkcs8 is None: + pkcs8 = True + if pkcs8: + if not protection: + protection = 'PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC' + private_key = DerInteger(self.x).encode() + binary_key = PKCS8.wrap( + private_key, oid, passphrase, + protection, key_params=params, + randfunc=randfunc + ) + if passphrase: + key_type = 'ENCRYPTED PRIVATE' + else: + key_type = 'PRIVATE' + passphrase = None + else: + if format != 'PEM' and passphrase: + raise ValueError("DSA private key cannot be encrypted") + ints = [0, self.p, self.q, self.g, self.y, self.x] + binary_key = DerSequence(ints).encode() + key_type = "DSA PRIVATE" + else: + if pkcs8: + raise ValueError("PKCS#8 is only meaningful for private keys") + + binary_key = _create_subject_public_key_info(oid, + DerInteger(self.y), params) + key_type = "PUBLIC" + + if format == 'DER': + return binary_key + if format == 'PEM': + pem_str = PEM.encode( + binary_key, key_type + " KEY", + passphrase, randfunc + ) + return tobytes(pem_str) + raise ValueError("Unknown key format '%s'. Cannot export the DSA key." % format) + + # Backward-compatibility + exportKey = export_key + publickey = public_key + + # Methods defined in PyCrypto that we don't support anymore + + def sign(self, M, K): + raise NotImplementedError("Use module Crypto.Signature.DSS instead") + + def verify(self, M, signature): + raise NotImplementedError("Use module Crypto.Signature.DSS instead") + + def encrypt(self, plaintext, K): + raise NotImplementedError + + def decrypt(self, ciphertext): + raise NotImplementedError + + def blind(self, M, B): + raise NotImplementedError + + def unblind(self, M, B): + raise NotImplementedError + + def size(self): + raise NotImplementedError + + +def _generate_domain(L, randfunc): + """Generate a new set of DSA domain parameters""" + + N = { 1024:160, 2048:224, 3072:256 }.get(L) + if N is None: + raise ValueError("Invalid modulus length (%d)" % L) + + outlen = SHA256.digest_size * 8 + n = (L + outlen - 1) // outlen - 1 # ceil(L/outlen) -1 + b_ = L - 1 - (n * outlen) + + # Generate q (A.1.1.2) + q = Integer(4) + upper_bit = 1 << (N - 1) + while test_probable_prime(q, randfunc) != PROBABLY_PRIME: + seed = randfunc(64) + U = Integer.from_bytes(SHA256.new(seed).digest()) & (upper_bit - 1) + q = U | upper_bit | 1 + + assert(q.size_in_bits() == N) + + # Generate p (A.1.1.2) + offset = 1 + upper_bit = 1 << (L - 1) + while True: + V = [ SHA256.new(seed + Integer(offset + j).to_bytes()).digest() + for j in iter_range(n + 1) ] + V = [ Integer.from_bytes(v) for v in V ] + W = sum([V[i] * (1 << (i * outlen)) for i in iter_range(n)], + (V[n] & ((1 << b_) - 1)) * (1 << (n * outlen))) + + X = Integer(W + upper_bit) # 2^{L-1} < X < 2^{L} + assert(X.size_in_bits() == L) + + c = X % (q * 2) + p = X - (c - 1) # 2q divides (p-1) + if p.size_in_bits() == L and \ + test_probable_prime(p, randfunc) == PROBABLY_PRIME: + break + offset += n + 1 + + # Generate g (A.2.3, index=1) + e = (p - 1) // q + for count in itertools.count(1): + U = seed + b"ggen" + bchr(1) + Integer(count).to_bytes() + W = Integer.from_bytes(SHA256.new(U).digest()) + g = pow(W, e, p) + if g != 1: + break + + return (p, q, g, seed) + + +def generate(bits, randfunc=None, domain=None): + """Generate a new DSA key pair. + + The algorithm follows Appendix A.1/A.2 and B.1 of `FIPS 186-4`_, + respectively for domain generation and key pair generation. + + Args: + bits (integer): + Key length, or size (in bits) of the DSA modulus *p*. + It must be 1024, 2048 or 3072. + + randfunc (callable): + Random number generation function; it accepts a single integer N + and return a string of random data N bytes long. + If not specified, :func:`Crypto.Random.get_random_bytes` is used. + + domain (tuple): + The DSA domain parameters *p*, *q* and *g* as a list of 3 + integers. Size of *p* and *q* must comply to `FIPS 186-4`_. + If not specified, the parameters are created anew. + + Returns: + :class:`DsaKey` : a new DSA key object + + Raises: + ValueError : when **bits** is too little, too big, or not a multiple of 64. + + .. _FIPS 186-4: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf + """ + + if randfunc is None: + randfunc = Random.get_random_bytes + + if domain: + p, q, g = map(Integer, domain) + + ## Perform consistency check on domain parameters + # P and Q must be prime + fmt_error = test_probable_prime(p) == COMPOSITE + fmt_error |= test_probable_prime(q) == COMPOSITE + # Verify Lagrange's theorem for sub-group + fmt_error |= ((p - 1) % q) != 0 + fmt_error |= g <= 1 or g >= p + fmt_error |= pow(g, q, p) != 1 + if fmt_error: + raise ValueError("Invalid DSA domain parameters") + else: + p, q, g, _ = _generate_domain(bits, randfunc) + + L = p.size_in_bits() + N = q.size_in_bits() + + if L != bits: + raise ValueError("Mismatch between size of modulus (%d)" + " and 'bits' parameter (%d)" % (L, bits)) + + if (L, N) not in [(1024, 160), (2048, 224), + (2048, 256), (3072, 256)]: + raise ValueError("Lengths of p and q (%d, %d) are not compatible" + "to FIPS 186-3" % (L, N)) + + if not 1 < g < p: + raise ValueError("Incorrent DSA generator") + + # B.1.1 + c = Integer.random(exact_bits=N + 64, randfunc=randfunc) + x = c % (q - 1) + 1 # 1 <= x <= q-1 + y = pow(g, x, p) + + key_dict = { 'y':y, 'g':g, 'p':p, 'q':q, 'x':x } + return DsaKey(key_dict) + + +def construct(tup, consistency_check=True): + """Construct a DSA key from a tuple of valid DSA components. + + Args: + tup (tuple): + A tuple of long integers, with 4 or 5 items + in the following order: + + 1. Public key (*y*). + 2. Sub-group generator (*g*). + 3. Modulus, finite field order (*p*). + 4. Sub-group order (*q*). + 5. Private key (*x*). Optional. + + consistency_check (boolean): + If ``True``, the library will verify that the provided components + fulfil the main DSA properties. + + Raises: + ValueError: when the key being imported fails the most basic DSA validity checks. + + Returns: + :class:`DsaKey` : a DSA key object + """ + + key_dict = dict(zip(('y', 'g', 'p', 'q', 'x'), map(Integer, tup))) + key = DsaKey(key_dict) + + fmt_error = False + if consistency_check: + # P and Q must be prime + fmt_error = test_probable_prime(key.p) == COMPOSITE + fmt_error |= test_probable_prime(key.q) == COMPOSITE + # Verify Lagrange's theorem for sub-group + fmt_error |= ((key.p - 1) % key.q) != 0 + fmt_error |= key.g <= 1 or key.g >= key.p + fmt_error |= pow(key.g, key.q, key.p) != 1 + # Public key + fmt_error |= key.y <= 0 or key.y >= key.p + if hasattr(key, 'x'): + fmt_error |= key.x <= 0 or key.x >= key.q + fmt_error |= pow(key.g, key.x, key.p) != key.y + + if fmt_error: + raise ValueError("Invalid DSA key components") + + return key + + +# Dss-Parms ::= SEQUENCE { +# p OCTET STRING, +# q OCTET STRING, +# g OCTET STRING +# } +# DSAPublicKey ::= INTEGER -- public key, y + +def _import_openssl_private(encoded, passphrase, params): + if params: + raise ValueError("DSA private key already comes with parameters") + der = DerSequence().decode(encoded, nr_elements=6, only_ints_expected=True) + if der[0] != 0: + raise ValueError("No version found") + tup = [der[comp] for comp in (4, 3, 1, 2, 5)] + return construct(tup) + + +def _import_subjectPublicKeyInfo(encoded, passphrase, params): + + algoid, encoded_key, emb_params = _expand_subject_public_key_info(encoded) + if algoid != oid: + raise ValueError("No DSA subjectPublicKeyInfo") + if params and emb_params: + raise ValueError("Too many DSA parameters") + + y = DerInteger().decode(encoded_key).value + p, q, g = list(DerSequence().decode(params or emb_params)) + tup = (y, g, p, q) + return construct(tup) + + +def _import_x509_cert(encoded, passphrase, params): + + sp_info = _extract_subject_public_key_info(encoded) + return _import_subjectPublicKeyInfo(sp_info, None, params) + + +def _import_pkcs8(encoded, passphrase, params): + if params: + raise ValueError("PKCS#8 already includes parameters") + k = PKCS8.unwrap(encoded, passphrase) + if k[0] != oid: + raise ValueError("No PKCS#8 encoded DSA key") + x = DerInteger().decode(k[1]).value + p, q, g = list(DerSequence().decode(k[2])) + tup = (pow(g, x, p), g, p, q, x) + return construct(tup) + + +def _import_key_der(key_data, passphrase, params): + """Import a DSA key (public or private half), encoded in DER form.""" + + decodings = (_import_openssl_private, + _import_subjectPublicKeyInfo, + _import_x509_cert, + _import_pkcs8) + + for decoding in decodings: + try: + return decoding(key_data, passphrase, params) + except ValueError: + pass + + raise ValueError("DSA key format is not supported") + + +def import_key(extern_key, passphrase=None): + """Import a DSA key. + + Args: + extern_key (string or byte string): + The DSA key to import. + + The following formats are supported for a DSA **public** key: + + - X.509 certificate (binary DER or PEM) + - X.509 ``subjectPublicKeyInfo`` (binary DER or PEM) + - OpenSSH (ASCII one-liner, see `RFC4253`_) + + The following formats are supported for a DSA **private** key: + + - `PKCS#8`_ ``PrivateKeyInfo`` or ``EncryptedPrivateKeyInfo`` + DER SEQUENCE (binary or PEM) + - OpenSSL/OpenSSH custom format (binary or PEM) + + For details about the PEM encoding, see `RFC1421`_/`RFC1423`_. + + passphrase (string): + In case of an encrypted private key, this is the pass phrase + from which the decryption key is derived. + + Encryption may be applied either at the `PKCS#8`_ or at the PEM level. + + Returns: + :class:`DsaKey` : a DSA key object + + Raises: + ValueError : when the given key cannot be parsed (possibly because + the pass phrase is wrong). + + .. _RFC1421: http://www.ietf.org/rfc/rfc1421.txt + .. _RFC1423: http://www.ietf.org/rfc/rfc1423.txt + .. _RFC4253: http://www.ietf.org/rfc/rfc4253.txt + .. _PKCS#8: http://www.ietf.org/rfc/rfc5208.txt + """ + + extern_key = tobytes(extern_key) + if passphrase is not None: + passphrase = tobytes(passphrase) + + if extern_key.startswith(b'-----'): + # This is probably a PEM encoded key + (der, marker, enc_flag) = PEM.decode(tostr(extern_key), passphrase) + if enc_flag: + passphrase = None + return _import_key_der(der, passphrase, None) + + if extern_key.startswith(b'ssh-dss '): + # This is probably a public OpenSSH key + keystring = binascii.a2b_base64(extern_key.split(b' ')[1]) + keyparts = [] + while len(keystring) > 4: + length = struct.unpack(">I", keystring[:4])[0] + keyparts.append(keystring[4:4 + length]) + keystring = keystring[4 + length:] + if keyparts[0] == b"ssh-dss": + tup = [Integer.from_bytes(keyparts[x]) for x in (4, 3, 1, 2)] + return construct(tup) + + if len(extern_key) > 0 and bord(extern_key[0]) == 0x30: + # This is probably a DER encoded key + return _import_key_der(extern_key, passphrase, None) + + raise ValueError("DSA key format is not supported") + + +# Backward compatibility +importKey = import_key + +#: `Object ID`_ for a DSA key. +#: +#: id-dsa ID ::= { iso(1) member-body(2) us(840) x9-57(10040) x9cm(4) 1 } +#: +#: .. _`Object ID`: http://www.alvestrand.no/objectid/1.2.840.10040.4.1.html +oid = "1.2.840.10040.4.1" diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/DSA.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/DSA.pyi new file mode 100644 index 000000000..354ac1f02 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/DSA.pyi @@ -0,0 +1,31 @@ +from typing import Dict, Tuple, Callable, Union, Optional + +__all__ = ['generate', 'construct', 'DsaKey', 'import_key' ] + +RNG = Callable[[int], bytes] + +class DsaKey(object): + def __init__(self, key_dict: Dict[str, int]) -> None: ... + def has_private(self) -> bool: ... + def can_encrypt(self) -> bool: ... # legacy + def can_sign(self) -> bool: ... # legacy + def public_key(self) -> DsaKey: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __getstate__(self) -> None: ... + def domain(self) -> Tuple[int, int, int]: ... + def __repr__(self) -> str: ... + def __getattr__(self, item: str) -> int: ... + def export_key(self, format: Optional[str]="PEM", pkcs8: Optional[bool]=None, passphrase: Optional[str]=None, + protection: Optional[str]=None, randfunc: Optional[RNG]=None) -> bytes: ... + # Backward-compatibility + exportKey = export_key + publickey = public_key + +def generate(bits: int, randfunc: Optional[RNG]=None, domain: Optional[Tuple[int, int, int]]=None) -> DsaKey: ... +def construct(tup: Union[Tuple[int, int, int, int], Tuple[int, int, int, int, int]], consistency_check: Optional[bool]=True) -> DsaKey: ... +def import_key(extern_key: Union[str, bytes], passphrase: Optional[str]=None) -> DsaKey: ... +# Backward compatibility +importKey = import_key + +oid: str diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/ECC.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/ECC.py new file mode 100644 index 000000000..daddc4718 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/ECC.py @@ -0,0 +1,1818 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from __future__ import print_function + +import re +import struct +import binascii +from collections import namedtuple + +from Crypto.Util.py3compat import bord, tobytes, tostr, bchr, is_string +from Crypto.Util.number import bytes_to_long, long_to_bytes + +from Crypto.Math.Numbers import Integer +from Crypto.Util.asn1 import (DerObjectId, DerOctetString, DerSequence, + DerBitString) + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, + SmartPointer, c_size_t, c_uint8_ptr, + c_ulonglong, null_pointer) + +from Crypto.PublicKey import (_expand_subject_public_key_info, + _create_subject_public_key_info, + _extract_subject_public_key_info) + +from Crypto.Hash import SHA512, SHAKE256 + +from Crypto.Random import get_random_bytes +from Crypto.Random.random import getrandbits + + +_ec_lib = load_pycryptodome_raw_lib("Crypto.PublicKey._ec_ws", """ +typedef void EcContext; +typedef void EcPoint; +int ec_ws_new_context(EcContext **pec_ctx, + const uint8_t *modulus, + const uint8_t *b, + const uint8_t *order, + size_t len, + uint64_t seed); +void ec_free_context(EcContext *ec_ctx); +int ec_ws_new_point(EcPoint **pecp, + const uint8_t *x, + const uint8_t *y, + size_t len, + const EcContext *ec_ctx); +void ec_ws_free_point(EcPoint *ecp); +int ec_ws_get_xy(uint8_t *x, + uint8_t *y, + size_t len, + const EcPoint *ecp); +int ec_ws_double(EcPoint *p); +int ec_ws_add(EcPoint *ecpa, EcPoint *ecpb); +int ec_ws_scalar(EcPoint *ecp, + const uint8_t *k, + size_t len, + uint64_t seed); +int ec_ws_clone(EcPoint **pecp2, const EcPoint *ecp); +int ec_ws_cmp(const EcPoint *ecp1, const EcPoint *ecp2); +int ec_ws_neg(EcPoint *p); +""") + +_ed25519_lib = load_pycryptodome_raw_lib("Crypto.PublicKey._ed25519", """ +typedef void Point; +int ed25519_new_point(Point **out, + const uint8_t x[32], + const uint8_t y[32], + size_t modsize, + const void *context); +int ed25519_clone(Point **P, const Point *Q); +void ed25519_free_point(Point *p); +int ed25519_cmp(const Point *p1, const Point *p2); +int ed25519_neg(Point *p); +int ed25519_get_xy(uint8_t *xb, uint8_t *yb, size_t modsize, Point *p); +int ed25519_double(Point *p); +int ed25519_add(Point *P1, const Point *P2); +int ed25519_scalar(Point *P, const uint8_t *scalar, size_t scalar_len, uint64_t seed); +""") + +_ed448_lib = load_pycryptodome_raw_lib("Crypto.PublicKey._ed448", """ +typedef void EcContext; +typedef void PointEd448; +int ed448_new_context(EcContext **pec_ctx); +void ed448_context(EcContext *ec_ctx); +void ed448_free_context(EcContext *ec_ctx); +int ed448_new_point(PointEd448 **out, + const uint8_t x[56], + const uint8_t y[56], + size_t len, + const EcContext *context); +int ed448_clone(PointEd448 **P, const PointEd448 *Q); +void ed448_free_point(PointEd448 *p); +int ed448_cmp(const PointEd448 *p1, const PointEd448 *p2); +int ed448_neg(PointEd448 *p); +int ed448_get_xy(uint8_t *xb, uint8_t *yb, size_t len, const PointEd448 *p); +int ed448_double(PointEd448 *p); +int ed448_add(PointEd448 *P1, const PointEd448 *P2); +int ed448_scalar(PointEd448 *P, const uint8_t *scalar, size_t scalar_len, uint64_t seed); +""") + + +def lib_func(ecc_obj, func_name): + if ecc_obj._curve.desc == "Ed25519": + result = getattr(_ed25519_lib, "ed25519_" + func_name) + elif ecc_obj._curve.desc == "Ed448": + result = getattr(_ed448_lib, "ed448_" + func_name) + else: + result = getattr(_ec_lib, "ec_ws_" + func_name) + return result + +# +# _curves is a database of curve parameters. Items are indexed by their +# human-friendly name, suchas "P-256". Each item has the following fields: +# - p: the prime number that defines the finite field for all modulo operations +# - b: the constant in the Short Weierstrass curve equation +# - order: the number of elements in the group with the generator below +# - Gx the affine coordinate X of the generator point +# - Gy the affine coordinate Y of the generator point +# - G the generator, as an EccPoint object +# - modulus_bits the minimum number of bits for encoding the modulus p +# - oid an ASCII string with the registered ASN.1 Object ID +# - context a raw pointer to memory holding a context for all curve operations (can be NULL) +# - desc an ASCII string describing the curve +# - openssh the ASCII string used in OpenSSH id files for public keys on this curve +# - name the ASCII string which is also a valid key in _curves + + +_Curve = namedtuple("_Curve", "p b order Gx Gy G modulus_bits oid context desc openssh name") +_curves = {} + + +p192_names = ["p192", "NIST P-192", "P-192", "prime192v1", "secp192r1", + "nistp192"] + + +def init_p192(): + p = 0xfffffffffffffffffffffffffffffffeffffffffffffffff + b = 0x64210519e59c80e70fa7e9ab72243049feb8deecc146b9b1 + order = 0xffffffffffffffffffffffff99def836146bc9b1b4d22831 + Gx = 0x188da80eb03090f67cbf20eb43a18800f4ff0afd82ff1012 + Gy = 0x07192b95ffc8da78631011ed6b24cdd573f977a11e794811 + + p192_modulus = long_to_bytes(p, 24) + p192_b = long_to_bytes(b, 24) + p192_order = long_to_bytes(order, 24) + + ec_p192_context = VoidPointer() + result = _ec_lib.ec_ws_new_context(ec_p192_context.address_of(), + c_uint8_ptr(p192_modulus), + c_uint8_ptr(p192_b), + c_uint8_ptr(p192_order), + c_size_t(len(p192_modulus)), + c_ulonglong(getrandbits(64)) + ) + if result: + raise ImportError("Error %d initializing P-192 context" % result) + + context = SmartPointer(ec_p192_context.get(), _ec_lib.ec_free_context) + p192 = _Curve(Integer(p), + Integer(b), + Integer(order), + Integer(Gx), + Integer(Gy), + None, + 192, + "1.2.840.10045.3.1.1", # ANSI X9.62 / SEC2 + context, + "NIST P-192", + "ecdsa-sha2-nistp192", + "p192") + global p192_names + _curves.update(dict.fromkeys(p192_names, p192)) + + +init_p192() +del init_p192 + + +p224_names = ["p224", "NIST P-224", "P-224", "prime224v1", "secp224r1", + "nistp224"] + + +def init_p224(): + p = 0xffffffffffffffffffffffffffffffff000000000000000000000001 + b = 0xb4050a850c04b3abf54132565044b0b7d7bfd8ba270b39432355ffb4 + order = 0xffffffffffffffffffffffffffff16a2e0b8f03e13dd29455c5c2a3d + Gx = 0xb70e0cbd6bb4bf7f321390b94a03c1d356c21122343280d6115c1d21 + Gy = 0xbd376388b5f723fb4c22dfe6cd4375a05a07476444d5819985007e34 + + p224_modulus = long_to_bytes(p, 28) + p224_b = long_to_bytes(b, 28) + p224_order = long_to_bytes(order, 28) + + ec_p224_context = VoidPointer() + result = _ec_lib.ec_ws_new_context(ec_p224_context.address_of(), + c_uint8_ptr(p224_modulus), + c_uint8_ptr(p224_b), + c_uint8_ptr(p224_order), + c_size_t(len(p224_modulus)), + c_ulonglong(getrandbits(64)) + ) + if result: + raise ImportError("Error %d initializing P-224 context" % result) + + context = SmartPointer(ec_p224_context.get(), _ec_lib.ec_free_context) + p224 = _Curve(Integer(p), + Integer(b), + Integer(order), + Integer(Gx), + Integer(Gy), + None, + 224, + "1.3.132.0.33", # SEC 2 + context, + "NIST P-224", + "ecdsa-sha2-nistp224", + "p224") + global p224_names + _curves.update(dict.fromkeys(p224_names, p224)) + + +init_p224() +del init_p224 + + +p256_names = ["p256", "NIST P-256", "P-256", "prime256v1", "secp256r1", + "nistp256"] + + +def init_p256(): + p = 0xffffffff00000001000000000000000000000000ffffffffffffffffffffffff + b = 0x5ac635d8aa3a93e7b3ebbd55769886bc651d06b0cc53b0f63bce3c3e27d2604b + order = 0xffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551 + Gx = 0x6b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296 + Gy = 0x4fe342e2fe1a7f9b8ee7eb4a7c0f9e162bce33576b315ececbb6406837bf51f5 + + p256_modulus = long_to_bytes(p, 32) + p256_b = long_to_bytes(b, 32) + p256_order = long_to_bytes(order, 32) + + ec_p256_context = VoidPointer() + result = _ec_lib.ec_ws_new_context(ec_p256_context.address_of(), + c_uint8_ptr(p256_modulus), + c_uint8_ptr(p256_b), + c_uint8_ptr(p256_order), + c_size_t(len(p256_modulus)), + c_ulonglong(getrandbits(64)) + ) + if result: + raise ImportError("Error %d initializing P-256 context" % result) + + context = SmartPointer(ec_p256_context.get(), _ec_lib.ec_free_context) + p256 = _Curve(Integer(p), + Integer(b), + Integer(order), + Integer(Gx), + Integer(Gy), + None, + 256, + "1.2.840.10045.3.1.7", # ANSI X9.62 / SEC2 + context, + "NIST P-256", + "ecdsa-sha2-nistp256", + "p256") + global p256_names + _curves.update(dict.fromkeys(p256_names, p256)) + + +init_p256() +del init_p256 + + +p384_names = ["p384", "NIST P-384", "P-384", "prime384v1", "secp384r1", + "nistp384"] + + +def init_p384(): + p = 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffeffffffff0000000000000000ffffffff + b = 0xb3312fa7e23ee7e4988e056be3f82d19181d9c6efe8141120314088f5013875ac656398d8a2ed19d2a85c8edd3ec2aef + order = 0xffffffffffffffffffffffffffffffffffffffffffffffffc7634d81f4372ddf581a0db248b0a77aecec196accc52973 + Gx = 0xaa87ca22be8b05378eb1c71ef320ad746e1d3b628ba79b9859f741e082542a385502f25dbf55296c3a545e3872760aB7 + Gy = 0x3617de4a96262c6f5d9e98bf9292dc29f8f41dbd289a147ce9da3113b5f0b8c00a60b1ce1d7e819d7a431d7c90ea0e5F + + p384_modulus = long_to_bytes(p, 48) + p384_b = long_to_bytes(b, 48) + p384_order = long_to_bytes(order, 48) + + ec_p384_context = VoidPointer() + result = _ec_lib.ec_ws_new_context(ec_p384_context.address_of(), + c_uint8_ptr(p384_modulus), + c_uint8_ptr(p384_b), + c_uint8_ptr(p384_order), + c_size_t(len(p384_modulus)), + c_ulonglong(getrandbits(64)) + ) + if result: + raise ImportError("Error %d initializing P-384 context" % result) + + context = SmartPointer(ec_p384_context.get(), _ec_lib.ec_free_context) + p384 = _Curve(Integer(p), + Integer(b), + Integer(order), + Integer(Gx), + Integer(Gy), + None, + 384, + "1.3.132.0.34", # SEC 2 + context, + "NIST P-384", + "ecdsa-sha2-nistp384", + "p384") + global p384_names + _curves.update(dict.fromkeys(p384_names, p384)) + + +init_p384() +del init_p384 + + +p521_names = ["p521", "NIST P-521", "P-521", "prime521v1", "secp521r1", + "nistp521"] + + +def init_p521(): + p = 0x000001ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff + b = 0x00000051953eb9618e1c9a1f929a21a0b68540eea2da725b99b315f3b8b489918ef109e156193951ec7e937b1652c0bd3bb1bf073573df883d2c34f1ef451fd46b503f00 + order = 0x000001fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffa51868783bf2f966b7fcc0148f709a5d03bb5c9b8899c47aebb6fb71e91386409 + Gx = 0x000000c6858e06b70404e9cd9e3ecb662395b4429c648139053fb521f828af606b4d3dbaa14b5e77efe75928fe1dc127a2ffa8de3348b3c1856a429bf97e7e31c2e5bd66 + Gy = 0x0000011839296a789a3bc0045c8a5fb42c7d1bd998f54449579b446817afbd17273e662c97ee72995ef42640c550b9013fad0761353c7086a272c24088be94769fd16650 + + p521_modulus = long_to_bytes(p, 66) + p521_b = long_to_bytes(b, 66) + p521_order = long_to_bytes(order, 66) + + ec_p521_context = VoidPointer() + result = _ec_lib.ec_ws_new_context(ec_p521_context.address_of(), + c_uint8_ptr(p521_modulus), + c_uint8_ptr(p521_b), + c_uint8_ptr(p521_order), + c_size_t(len(p521_modulus)), + c_ulonglong(getrandbits(64)) + ) + if result: + raise ImportError("Error %d initializing P-521 context" % result) + + context = SmartPointer(ec_p521_context.get(), _ec_lib.ec_free_context) + p521 = _Curve(Integer(p), + Integer(b), + Integer(order), + Integer(Gx), + Integer(Gy), + None, + 521, + "1.3.132.0.35", # SEC 2 + context, + "NIST P-521", + "ecdsa-sha2-nistp521", + "p521") + global p521_names + _curves.update(dict.fromkeys(p521_names, p521)) + + +init_p521() +del init_p521 + + +ed25519_names = ["ed25519", "Ed25519"] + + +def init_ed25519(): + p = 0x7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffed # 2**255 - 19 + order = 0x1000000000000000000000000000000014def9dea2f79cd65812631a5cf5d3ed + Gx = 0x216936d3cd6e53fec0a4e231fdd6dc5c692cc7609525a7b2c9562d608f25d51a + Gy = 0x6666666666666666666666666666666666666666666666666666666666666658 + + ed25519 = _Curve(Integer(p), + None, + Integer(order), + Integer(Gx), + Integer(Gy), + None, + 255, + "1.3.101.112", # RFC8410 + None, + "Ed25519", # Used throughout; do not change + "ssh-ed25519", + "ed25519") + global ed25519_names + _curves.update(dict.fromkeys(ed25519_names, ed25519)) + + +init_ed25519() +del init_ed25519 + + +ed448_names = ["ed448", "Ed448"] + + +def init_ed448(): + p = 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffeffffffffffffffffffffffffffffffffffffffffffffffffffffffff # 2**448 - 2**224 - 1 + order = 0x3fffffffffffffffffffffffffffffffffffffffffffffffffffffff7cca23e9c44edb49aed63690216cc2728dc58f552378c292ab5844f3 + Gx = 0x4f1970c66bed0ded221d15a622bf36da9e146570470f1767ea6de324a3d3a46412ae1af72ab66511433b80e18b00938e2626a82bc70cc05e + Gy = 0x693f46716eb6bc248876203756c9c7624bea73736ca3984087789c1e05a0c2d73ad3ff1ce67c39c4fdbd132c4ed7c8ad9808795bf230fa14 + + ed448_context = VoidPointer() + result = _ed448_lib.ed448_new_context(ed448_context.address_of()) + if result: + raise ImportError("Error %d initializing Ed448 context" % result) + + context = SmartPointer(ed448_context.get(), _ed448_lib.ed448_free_context) + + ed448 = _Curve(Integer(p), + None, + Integer(order), + Integer(Gx), + Integer(Gy), + None, + 448, + "1.3.101.113", # RFC8410 + context, + "Ed448", # Used throughout; do not change + None, + "ed448") + global ed448_names + _curves.update(dict.fromkeys(ed448_names, ed448)) + + +init_ed448() +del init_ed448 + + +class UnsupportedEccFeature(ValueError): + pass + + +class EccPoint(object): + """A class to model a point on an Elliptic Curve. + + The class supports operators for: + + * Adding two points: ``R = S + T`` + * In-place addition: ``S += T`` + * Negating a point: ``R = -T`` + * Comparing two points: ``if S == T: ...`` or ``if S != T: ...`` + * Multiplying a point by a scalar: ``R = S*k`` + * In-place multiplication by a scalar: ``T *= k`` + + :ivar x: The affine X-coordinate of the ECC point + :vartype x: integer + + :ivar y: The affine Y-coordinate of the ECC point + :vartype y: integer + + :ivar xy: The tuple with affine X- and Y- coordinates + """ + + def __init__(self, x, y, curve="p256"): + + try: + self._curve = _curves[curve] + except KeyError: + raise ValueError("Unknown curve name %s" % str(curve)) + self._curve_name = curve + + modulus_bytes = self.size_in_bytes() + + xb = long_to_bytes(x, modulus_bytes) + yb = long_to_bytes(y, modulus_bytes) + if len(xb) != modulus_bytes or len(yb) != modulus_bytes: + raise ValueError("Incorrect coordinate length") + + new_point = lib_func(self, "new_point") + free_func = lib_func(self, "free_point") + + self._point = VoidPointer() + try: + context = self._curve.context.get() + except AttributeError: + context = null_pointer + result = new_point(self._point.address_of(), + c_uint8_ptr(xb), + c_uint8_ptr(yb), + c_size_t(modulus_bytes), + context) + + if result: + if result == 15: + raise ValueError("The EC point does not belong to the curve") + raise ValueError("Error %d while instantiating an EC point" % result) + + # Ensure that object disposal of this Python object will (eventually) + # free the memory allocated by the raw library for the EC point + self._point = SmartPointer(self._point.get(), free_func) + + def set(self, point): + clone = lib_func(self, "clone") + free_func = lib_func(self, "free_point") + + self._point = VoidPointer() + result = clone(self._point.address_of(), + point._point.get()) + + if result: + raise ValueError("Error %d while cloning an EC point" % result) + + self._point = SmartPointer(self._point.get(), free_func) + return self + + def __eq__(self, point): + if not isinstance(point, EccPoint): + return False + + cmp_func = lib_func(self, "cmp") + return 0 == cmp_func(self._point.get(), point._point.get()) + + # Only needed for Python 2 + def __ne__(self, point): + return not self == point + + def __neg__(self): + neg_func = lib_func(self, "neg") + np = self.copy() + result = neg_func(np._point.get()) + if result: + raise ValueError("Error %d while inverting an EC point" % result) + return np + + def copy(self): + """Return a copy of this point.""" + x, y = self.xy + np = EccPoint(x, y, self._curve_name) + return np + + def _is_eddsa(self): + return self._curve.name in ("ed25519", "ed448") + + def is_point_at_infinity(self): + """``True`` if this is the *point-at-infinity*.""" + + if self._is_eddsa(): + return self.x == 0 + else: + return self.xy == (0, 0) + + def point_at_infinity(self): + """Return the *point-at-infinity* for the curve.""" + + if self._is_eddsa(): + return EccPoint(0, 1, self._curve_name) + else: + return EccPoint(0, 0, self._curve_name) + + @property + def x(self): + return self.xy[0] + + @property + def y(self): + return self.xy[1] + + @property + def xy(self): + modulus_bytes = self.size_in_bytes() + xb = bytearray(modulus_bytes) + yb = bytearray(modulus_bytes) + get_xy = lib_func(self, "get_xy") + result = get_xy(c_uint8_ptr(xb), + c_uint8_ptr(yb), + c_size_t(modulus_bytes), + self._point.get()) + if result: + raise ValueError("Error %d while encoding an EC point" % result) + + return (Integer(bytes_to_long(xb)), Integer(bytes_to_long(yb))) + + def size_in_bytes(self): + """Size of each coordinate, in bytes.""" + return (self.size_in_bits() + 7) // 8 + + def size_in_bits(self): + """Size of each coordinate, in bits.""" + return self._curve.modulus_bits + + def double(self): + """Double this point (in-place operation). + + Returns: + This same object (to enable chaining). + """ + + double_func = lib_func(self, "double") + result = double_func(self._point.get()) + if result: + raise ValueError("Error %d while doubling an EC point" % result) + return self + + def __iadd__(self, point): + """Add a second point to this one""" + + add_func = lib_func(self, "add") + result = add_func(self._point.get(), point._point.get()) + if result: + if result == 16: + raise ValueError("EC points are not on the same curve") + raise ValueError("Error %d while adding two EC points" % result) + return self + + def __add__(self, point): + """Return a new point, the addition of this one and another""" + + np = self.copy() + np += point + return np + + def __imul__(self, scalar): + """Multiply this point by a scalar""" + + scalar_func = lib_func(self, "scalar") + if scalar < 0: + raise ValueError("Scalar multiplication is only defined for non-negative integers") + sb = long_to_bytes(scalar) + result = scalar_func(self._point.get(), + c_uint8_ptr(sb), + c_size_t(len(sb)), + c_ulonglong(getrandbits(64))) + if result: + raise ValueError("Error %d during scalar multiplication" % result) + return self + + def __mul__(self, scalar): + """Return a new point, the scalar product of this one""" + + np = self.copy() + np *= scalar + return np + + def __rmul__(self, left_hand): + return self.__mul__(left_hand) + + +# Last piece of initialization +p192_G = EccPoint(_curves['p192'].Gx, _curves['p192'].Gy, "p192") +p192 = _curves['p192']._replace(G=p192_G) +_curves.update(dict.fromkeys(p192_names, p192)) +del p192_G, p192, p192_names + +p224_G = EccPoint(_curves['p224'].Gx, _curves['p224'].Gy, "p224") +p224 = _curves['p224']._replace(G=p224_G) +_curves.update(dict.fromkeys(p224_names, p224)) +del p224_G, p224, p224_names + +p256_G = EccPoint(_curves['p256'].Gx, _curves['p256'].Gy, "p256") +p256 = _curves['p256']._replace(G=p256_G) +_curves.update(dict.fromkeys(p256_names, p256)) +del p256_G, p256, p256_names + +p384_G = EccPoint(_curves['p384'].Gx, _curves['p384'].Gy, "p384") +p384 = _curves['p384']._replace(G=p384_G) +_curves.update(dict.fromkeys(p384_names, p384)) +del p384_G, p384, p384_names + +p521_G = EccPoint(_curves['p521'].Gx, _curves['p521'].Gy, "p521") +p521 = _curves['p521']._replace(G=p521_G) +_curves.update(dict.fromkeys(p521_names, p521)) +del p521_G, p521, p521_names + +ed25519_G = EccPoint(_curves['Ed25519'].Gx, _curves['Ed25519'].Gy, "Ed25519") +ed25519 = _curves['Ed25519']._replace(G=ed25519_G) +_curves.update(dict.fromkeys(ed25519_names, ed25519)) +del ed25519_G, ed25519, ed25519_names + +ed448_G = EccPoint(_curves['Ed448'].Gx, _curves['Ed448'].Gy, "Ed448") +ed448 = _curves['Ed448']._replace(G=ed448_G) +_curves.update(dict.fromkeys(ed448_names, ed448)) +del ed448_G, ed448, ed448_names + + +class EccKey(object): + r"""Class defining an ECC key. + Do not instantiate directly. + Use :func:`generate`, :func:`construct` or :func:`import_key` instead. + + :ivar curve: The name of the curve as defined in the `ECC table`_. + :vartype curve: string + + :ivar pointQ: an ECC point representating the public component. + :vartype pointQ: :class:`EccPoint` + + :ivar d: A scalar that represents the private component + in NIST P curves. It is smaller than the + order of the generator point. + :vartype d: integer + + :ivar seed: A seed that representats the private component + in EdDSA curves + (Ed25519, 32 bytes; Ed448, 57 bytes). + :vartype seed: bytes + """ + + def __init__(self, **kwargs): + """Create a new ECC key + + Keywords: + curve : string + The name of the curve. + d : integer + Mandatory for a private key one NIST P curves. + It must be in the range ``[1..order-1]``. + seed : bytes + Mandatory for a private key on the Ed25519 (32 bytes) + or Ed448 (57 bytes) curve. + point : EccPoint + Mandatory for a public key. If provided for a private key, + the implementation will NOT check whether it matches ``d``. + + Only one parameter among ``d``, ``seed`` or ``point`` may be used. + """ + + kwargs_ = dict(kwargs) + curve_name = kwargs_.pop("curve", None) + self._d = kwargs_.pop("d", None) + self._seed = kwargs_.pop("seed", None) + self._point = kwargs_.pop("point", None) + if curve_name is None and self._point: + curve_name = self._point._curve_name + if kwargs_: + raise TypeError("Unknown parameters: " + str(kwargs_)) + + if curve_name not in _curves: + raise ValueError("Unsupported curve (%s)" % curve_name) + self._curve = _curves[curve_name] + self.curve = self._curve.desc + + count = int(self._d is not None) + int(self._seed is not None) + + if count == 0: + if self._point is None: + raise ValueError("At lest one between parameters 'point', 'd' or 'seed' must be specified") + return + + if count == 2: + raise ValueError("Parameters d and seed are mutually exclusive") + + # NIST P curves work with d, EdDSA works with seed + + if not self._is_eddsa(): + if self._seed is not None: + raise ValueError("Parameter 'seed' can only be used with Ed25519 or Ed448") + self._d = Integer(self._d) + if not 1 <= self._d < self._curve.order: + raise ValueError("Parameter d must be an integer smaller than the curve order") + else: + if self._d is not None: + raise ValueError("Parameter d can only be used with NIST P curves") + # RFC 8032, 5.1.5 + if self._curve.name == "ed25519": + if len(self._seed) != 32: + raise ValueError("Parameter seed must be 32 bytes long for Ed25519") + seed_hash = SHA512.new(self._seed).digest() # h + self._prefix = seed_hash[32:] + tmp = bytearray(seed_hash[:32]) + tmp[0] &= 0xF8 + tmp[31] = (tmp[31] & 0x7F) | 0x40 + # RFC 8032, 5.2.5 + elif self._curve.name == "ed448": + if len(self._seed) != 57: + raise ValueError("Parameter seed must be 57 bytes long for Ed448") + seed_hash = SHAKE256.new(self._seed).read(114) # h + self._prefix = seed_hash[57:] + tmp = bytearray(seed_hash[:57]) + tmp[0] &= 0xFC + tmp[55] |= 0x80 + tmp[56] = 0 + self._d = Integer.from_bytes(tmp, byteorder='little') + + def _is_eddsa(self): + return self._curve.desc in ("Ed25519", "Ed448") + + def __eq__(self, other): + if not isinstance(other, EccKey): + return False + + if other.has_private() != self.has_private(): + return False + + return other.pointQ == self.pointQ + + def __repr__(self): + if self.has_private(): + if self._is_eddsa(): + extra = ", seed=%s" % tostr(binascii.hexlify(self._seed)) + else: + extra = ", d=%d" % int(self._d) + else: + extra = "" + x, y = self.pointQ.xy + return "EccKey(curve='%s', point_x=%d, point_y=%d%s)" % (self._curve.desc, x, y, extra) + + def has_private(self): + """``True`` if this key can be used for making signatures or decrypting data.""" + + return self._d is not None + + # ECDSA + def _sign(self, z, k): + assert 0 < k < self._curve.order + + order = self._curve.order + blind = Integer.random_range(min_inclusive=1, + max_exclusive=order) + + blind_d = self._d * blind + inv_blind_k = (blind * k).inverse(order) + + r = (self._curve.G * k).x % order + s = inv_blind_k * (blind * z + blind_d * r) % order + return (r, s) + + # ECDSA + def _verify(self, z, rs): + order = self._curve.order + sinv = rs[1].inverse(order) + point1 = self._curve.G * ((sinv * z) % order) + point2 = self.pointQ * ((sinv * rs[0]) % order) + return (point1 + point2).x == rs[0] + + @property + def d(self): + if not self.has_private(): + raise ValueError("This is not a private ECC key") + return self._d + + @property + def seed(self): + if not self.has_private(): + raise ValueError("This is not a private ECC key") + return self._seed + + @property + def pointQ(self): + if self._point is None: + self._point = self._curve.G * self._d + return self._point + + def public_key(self): + """A matching ECC public key. + + Returns: + a new :class:`EccKey` object + """ + + return EccKey(curve=self._curve.desc, point=self.pointQ) + + def _export_SEC1(self, compress): + if self._is_eddsa(): + raise ValueError("SEC1 format is unsupported for EdDSA curves") + + # See 2.2 in RFC5480 and 2.3.3 in SEC1 + # + # The first byte is: + # - 0x02: compressed, only X-coordinate, Y-coordinate is even + # - 0x03: compressed, only X-coordinate, Y-coordinate is odd + # - 0x04: uncompressed, X-coordinate is followed by Y-coordinate + # + # PAI is in theory encoded as 0x00. + + modulus_bytes = self.pointQ.size_in_bytes() + + if compress: + if self.pointQ.y.is_odd(): + first_byte = b'\x03' + else: + first_byte = b'\x02' + public_key = (first_byte + + self.pointQ.x.to_bytes(modulus_bytes)) + else: + public_key = (b'\x04' + + self.pointQ.x.to_bytes(modulus_bytes) + + self.pointQ.y.to_bytes(modulus_bytes)) + return public_key + + def _export_eddsa(self): + x, y = self.pointQ.xy + if self._curve.name == "ed25519": + result = bytearray(y.to_bytes(32, byteorder='little')) + result[31] = ((x & 1) << 7) | result[31] + elif self._curve.name == "ed448": + result = bytearray(y.to_bytes(57, byteorder='little')) + result[56] = (x & 1) << 7 + else: + raise ValueError("Not an EdDSA key to export") + return bytes(result) + + def _export_subjectPublicKeyInfo(self, compress): + if self._is_eddsa(): + oid = self._curve.oid + public_key = self._export_eddsa() + params = None + else: + oid = "1.2.840.10045.2.1" # unrestricted + public_key = self._export_SEC1(compress) + params = DerObjectId(self._curve.oid) + + return _create_subject_public_key_info(oid, + public_key, + params) + + def _export_rfc5915_private_der(self, include_ec_params=True): + + assert self.has_private() + + # ECPrivateKey ::= SEQUENCE { + # version INTEGER { ecPrivkeyVer1(1) } (ecPrivkeyVer1), + # privateKey OCTET STRING, + # parameters [0] ECParameters {{ NamedCurve }} OPTIONAL, + # publicKey [1] BIT STRING OPTIONAL + # } + + # Public key - uncompressed form + modulus_bytes = self.pointQ.size_in_bytes() + public_key = (b'\x04' + + self.pointQ.x.to_bytes(modulus_bytes) + + self.pointQ.y.to_bytes(modulus_bytes)) + + seq = [1, + DerOctetString(self.d.to_bytes(modulus_bytes)), + DerObjectId(self._curve.oid, explicit=0), + DerBitString(public_key, explicit=1)] + + if not include_ec_params: + del seq[2] + + return DerSequence(seq).encode() + + def _export_pkcs8(self, **kwargs): + from Crypto.IO import PKCS8 + + if kwargs.get('passphrase', None) is not None and 'protection' not in kwargs: + raise ValueError("At least the 'protection' parameter must be present") + + if self._is_eddsa(): + oid = self._curve.oid + private_key = DerOctetString(self._seed).encode() + params = None + else: + oid = "1.2.840.10045.2.1" # unrestricted + private_key = self._export_rfc5915_private_der(include_ec_params=False) + params = DerObjectId(self._curve.oid) + + result = PKCS8.wrap(private_key, + oid, + key_params=params, + **kwargs) + return result + + def _export_public_pem(self, compress): + from Crypto.IO import PEM + + encoded_der = self._export_subjectPublicKeyInfo(compress) + return PEM.encode(encoded_der, "PUBLIC KEY") + + def _export_private_pem(self, passphrase, **kwargs): + from Crypto.IO import PEM + + encoded_der = self._export_rfc5915_private_der() + return PEM.encode(encoded_der, "EC PRIVATE KEY", passphrase, **kwargs) + + def _export_private_clear_pkcs8_in_clear_pem(self): + from Crypto.IO import PEM + + encoded_der = self._export_pkcs8() + return PEM.encode(encoded_der, "PRIVATE KEY") + + def _export_private_encrypted_pkcs8_in_clear_pem(self, passphrase, **kwargs): + from Crypto.IO import PEM + + assert passphrase + if 'protection' not in kwargs: + raise ValueError("At least the 'protection' parameter should be present") + encoded_der = self._export_pkcs8(passphrase=passphrase, **kwargs) + return PEM.encode(encoded_der, "ENCRYPTED PRIVATE KEY") + + def _export_openssh(self, compress): + if self.has_private(): + raise ValueError("Cannot export OpenSSH private keys") + + desc = self._curve.openssh + + if desc is None: + raise ValueError("Cannot export %s keys as OpenSSH" % self._curve.name) + elif desc == "ssh-ed25519": + public_key = self._export_eddsa() + comps = (tobytes(desc), tobytes(public_key)) + else: + modulus_bytes = self.pointQ.size_in_bytes() + + if compress: + first_byte = 2 + self.pointQ.y.is_odd() + public_key = (bchr(first_byte) + + self.pointQ.x.to_bytes(modulus_bytes)) + else: + public_key = (b'\x04' + + self.pointQ.x.to_bytes(modulus_bytes) + + self.pointQ.y.to_bytes(modulus_bytes)) + + middle = desc.split("-")[2] + comps = (tobytes(desc), tobytes(middle), public_key) + + blob = b"".join([struct.pack(">I", len(x)) + x for x in comps]) + return desc + " " + tostr(binascii.b2a_base64(blob)) + + def export_key(self, **kwargs): + """Export this ECC key. + + Args: + format (string): + The output format: + + - ``'DER'``. The key will be encoded in ASN.1 DER format (binary). + For a public key, the ASN.1 ``subjectPublicKeyInfo`` structure + defined in `RFC5480`_ will be used. + For a private key, the ASN.1 ``ECPrivateKey`` structure defined + in `RFC5915`_ is used instead (possibly within a PKCS#8 envelope, + see the ``use_pkcs8`` flag below). + - ``'PEM'``. The key will be encoded in a PEM_ envelope (ASCII). + - ``'OpenSSH'``. The key will be encoded in the OpenSSH_ format + (ASCII, public keys only). + - ``'SEC1'``. The public key (i.e., the EC point) will be encoded + into ``bytes`` according to Section 2.3.3 of `SEC1`_ + (which is a subset of the older X9.62 ITU standard). + Only for NIST P-curves. + - ``'raw'``. The public key will be encoded as ``bytes``, + without any metadata. + + * For NIST P-curves: equivalent to ``'SEC1'``. + * For EdDSA curves: ``bytes`` in the format defined in `RFC8032`_. + + passphrase (bytes or string): + (*Private keys only*) The passphrase to protect the + private key. + + use_pkcs8 (boolean): + (*Private keys only*) + If ``True`` (default and recommended), the `PKCS#8`_ representation + will be used. It must be ``True`` for EdDSA curves. + + If ``False`` and a passphrase is present, the obsolete PEM + encryption will be used. + + protection (string): + When a private key is exported with password-protection + and PKCS#8 (both ``DER`` and ``PEM`` formats), this parameter MUST be + present, + For all possible protection schemes, + refer to :ref:`the encryption parameters of PKCS#8`. + It is recommended to use ``'PBKDF2WithHMAC-SHA5126AndAES128-CBC'``. + + compress (boolean): + If ``True``, the method returns a more compact representation + of the public key, with the X-coordinate only. + + If ``False`` (default), the method returns the full public key. + + This parameter is ignored for EdDSA curves, as compression is + mandatory. + + prot_params (dict): + When a private key is exported with password-protection + and PKCS#8 (both ``DER`` and ``PEM`` formats), this dictionary + contains the parameters to use to derive the encryption key + from the passphrase. + For all possible values, + refer to :ref:`the encryption parameters of PKCS#8`. + The recommendation is to use ``{'iteration_count':21000}`` for PBKDF2, + and ``{'iteration_count':131072}`` for scrypt. + + .. warning:: + If you don't provide a passphrase, the private key will be + exported in the clear! + + .. note:: + When exporting a private key with password-protection and `PKCS#8`_ + (both ``DER`` and ``PEM`` formats), any extra parameters + to ``export_key()`` will be passed to :mod:`Crypto.IO.PKCS8`. + + .. _PEM: http://www.ietf.org/rfc/rfc1421.txt + .. _`PEM encryption`: http://www.ietf.org/rfc/rfc1423.txt + .. _OpenSSH: http://www.openssh.com/txt/rfc5656.txt + .. _RFC5480: https://tools.ietf.org/html/rfc5480 + .. _SEC1: https://www.secg.org/sec1-v2.pdf + + Returns: + A multi-line string (for ``'PEM'`` and ``'OpenSSH'``) or + ``bytes`` (for ``'DER'``, ``'SEC1'``, and ``'raw'``) with the encoded key. + """ + + args = kwargs.copy() + ext_format = args.pop("format") + if ext_format not in ("PEM", "DER", "OpenSSH", "SEC1", "raw"): + raise ValueError("Unknown format '%s'" % ext_format) + + compress = args.pop("compress", False) + + if self.has_private(): + passphrase = args.pop("passphrase", None) + if is_string(passphrase): + passphrase = tobytes(passphrase) + if not passphrase: + raise ValueError("Empty passphrase") + use_pkcs8 = args.pop("use_pkcs8", True) + + if not use_pkcs8: + if self._is_eddsa(): + raise ValueError("'pkcs8' must be True for EdDSA curves") + if 'protection' in args: + raise ValueError("'protection' is only supported for PKCS#8") + + if ext_format == "PEM": + if use_pkcs8: + if passphrase: + return self._export_private_encrypted_pkcs8_in_clear_pem(passphrase, **args) + else: + return self._export_private_clear_pkcs8_in_clear_pem() + else: + return self._export_private_pem(passphrase, **args) + elif ext_format == "DER": + # DER + if passphrase and not use_pkcs8: + raise ValueError("Private keys can only be encrpyted with DER using PKCS#8") + if use_pkcs8: + return self._export_pkcs8(passphrase=passphrase, **args) + else: + return self._export_rfc5915_private_der() + else: + raise ValueError("Private keys cannot be exported " + "in the '%s' format" % ext_format) + else: # Public key + if args: + raise ValueError("Unexpected parameters: '%s'" % args) + if ext_format == "PEM": + return self._export_public_pem(compress) + elif ext_format == "DER": + return self._export_subjectPublicKeyInfo(compress) + elif ext_format == "SEC1": + return self._export_SEC1(compress) + elif ext_format == "raw": + if self._curve.name in ('ed25519', 'ed448'): + return self._export_eddsa() + else: + return self._export_SEC1(compress) + else: + return self._export_openssh(compress) + + +def generate(**kwargs): + """Generate a new private key on the given curve. + + Args: + + curve (string): + Mandatory. It must be a curve name defined in the `ECC table`_. + + randfunc (callable): + Optional. The RNG to read randomness from. + If ``None``, :func:`Crypto.Random.get_random_bytes` is used. + """ + + curve_name = kwargs.pop("curve") + curve = _curves[curve_name] + randfunc = kwargs.pop("randfunc", get_random_bytes) + if kwargs: + raise TypeError("Unknown parameters: " + str(kwargs)) + + if _curves[curve_name].name == "ed25519": + seed = randfunc(32) + new_key = EccKey(curve=curve_name, seed=seed) + elif _curves[curve_name].name == "ed448": + seed = randfunc(57) + new_key = EccKey(curve=curve_name, seed=seed) + else: + d = Integer.random_range(min_inclusive=1, + max_exclusive=curve.order, + randfunc=randfunc) + new_key = EccKey(curve=curve_name, d=d) + + return new_key + + +def construct(**kwargs): + """Build a new ECC key (private or public) starting + from some base components. + + In most cases, you will already have an existing key + which you can read in with :func:`import_key` instead + of this function. + + Args: + curve (string): + Mandatory. The name of the elliptic curve, as defined in the `ECC table`_. + + d (integer): + Mandatory for a private key and a NIST P-curve (e.g., P-256): + the integer in the range ``[1..order-1]`` that represents the key. + + seed (bytes): + Mandatory for a private key and an EdDSA curve. + It must be 32 bytes for Ed25519, and 57 bytes for Ed448. + + point_x (integer): + Mandatory for a public key: the X coordinate (affine) of the ECC point. + + point_y (integer): + Mandatory for a public key: the Y coordinate (affine) of the ECC point. + + Returns: + :class:`EccKey` : a new ECC key object + """ + + curve_name = kwargs["curve"] + curve = _curves[curve_name] + point_x = kwargs.pop("point_x", None) + point_y = kwargs.pop("point_y", None) + + if "point" in kwargs: + raise TypeError("Unknown keyword: point") + + if None not in (point_x, point_y): + # ValueError is raised if the point is not on the curve + kwargs["point"] = EccPoint(point_x, point_y, curve_name) + + new_key = EccKey(**kwargs) + + # Validate that the private key matches the public one + # because EccKey will not do that automatically + if new_key.has_private() and 'point' in kwargs: + pub_key = curve.G * new_key.d + if pub_key.xy != (point_x, point_y): + raise ValueError("Private and public ECC keys do not match") + + return new_key + + +def _import_public_der(ec_point, curve_oid=None, curve_name=None): + """Convert an encoded EC point into an EccKey object + + ec_point: byte string with the EC point (SEC1-encoded) + curve_oid: string with the name the curve + curve_name: string with the OID of the curve + + Either curve_id or curve_name must be specified + + """ + + for _curve_name, curve in _curves.items(): + if curve_oid and curve.oid == curve_oid: + break + if curve_name == _curve_name: + break + else: + if curve_oid: + raise UnsupportedEccFeature("Unsupported ECC curve (OID: %s)" % curve_oid) + else: + raise UnsupportedEccFeature("Unsupported ECC curve (%s)" % curve_name) + + # See 2.2 in RFC5480 and 2.3.3 in SEC1 + # The first byte is: + # - 0x02: compressed, only X-coordinate, Y-coordinate is even + # - 0x03: compressed, only X-coordinate, Y-coordinate is odd + # - 0x04: uncompressed, X-coordinate is followed by Y-coordinate + # + # PAI is in theory encoded as 0x00. + + modulus_bytes = curve.p.size_in_bytes() + point_type = bord(ec_point[0]) + + # Uncompressed point + if point_type == 0x04: + if len(ec_point) != (1 + 2 * modulus_bytes): + raise ValueError("Incorrect EC point length") + x = Integer.from_bytes(ec_point[1:modulus_bytes+1]) + y = Integer.from_bytes(ec_point[modulus_bytes+1:]) + # Compressed point + elif point_type in (0x02, 0x03): + if len(ec_point) != (1 + modulus_bytes): + raise ValueError("Incorrect EC point length") + x = Integer.from_bytes(ec_point[1:]) + # Right now, we only support Short Weierstrass curves + y = (x**3 - x*3 + curve.b).sqrt(curve.p) + if point_type == 0x02 and y.is_odd(): + y = curve.p - y + if point_type == 0x03 and y.is_even(): + y = curve.p - y + else: + raise ValueError("Incorrect EC point encoding") + + return construct(curve=_curve_name, point_x=x, point_y=y) + + +def _import_subjectPublicKeyInfo(encoded, *kwargs): + """Convert a subjectPublicKeyInfo into an EccKey object""" + + # See RFC5480 + + # Parse the generic subjectPublicKeyInfo structure + oid, ec_point, params = _expand_subject_public_key_info(encoded) + + nist_p_oids = ( + "1.2.840.10045.2.1", # id-ecPublicKey (unrestricted) + "1.3.132.1.12", # id-ecDH + "1.3.132.1.13" # id-ecMQV + ) + eddsa_oids = { + "1.3.101.112": ("Ed25519", _import_ed25519_public_key), # id-Ed25519 + "1.3.101.113": ("Ed448", _import_ed448_public_key) # id-Ed448 + } + + if oid in nist_p_oids: + # See RFC5480 + + # Parameters are mandatory and encoded as ECParameters + # ECParameters ::= CHOICE { + # namedCurve OBJECT IDENTIFIER + # -- implicitCurve NULL + # -- specifiedCurve SpecifiedECDomain + # } + # implicitCurve and specifiedCurve are not supported (as per RFC) + if not params: + raise ValueError("Missing ECC parameters for ECC OID %s" % oid) + try: + curve_oid = DerObjectId().decode(params).value + except ValueError: + raise ValueError("Error decoding namedCurve") + + # ECPoint ::= OCTET STRING + return _import_public_der(ec_point, curve_oid=curve_oid) + + elif oid in eddsa_oids: + # See RFC8410 + curve_name, import_eddsa_public_key = eddsa_oids[oid] + + # Parameters must be absent + if params: + raise ValueError("Unexpected ECC parameters for ECC OID %s" % oid) + + x, y = import_eddsa_public_key(ec_point) + return construct(point_x=x, point_y=y, curve=curve_name) + else: + raise UnsupportedEccFeature("Unsupported ECC OID: %s" % oid) + + +def _import_rfc5915_der(encoded, passphrase, curve_oid=None): + + # See RFC5915 https://tools.ietf.org/html/rfc5915 + # + # ECPrivateKey ::= SEQUENCE { + # version INTEGER { ecPrivkeyVer1(1) } (ecPrivkeyVer1), + # privateKey OCTET STRING, + # parameters [0] ECParameters {{ NamedCurve }} OPTIONAL, + # publicKey [1] BIT STRING OPTIONAL + # } + + private_key = DerSequence().decode(encoded, nr_elements=(3, 4)) + if private_key[0] != 1: + raise ValueError("Incorrect ECC private key version") + + try: + parameters = DerObjectId(explicit=0).decode(private_key[2]).value + if curve_oid is not None and parameters != curve_oid: + raise ValueError("Curve mismatch") + curve_oid = parameters + except ValueError: + pass + + if curve_oid is None: + raise ValueError("No curve found") + + for curve_name, curve in _curves.items(): + if curve.oid == curve_oid: + break + else: + raise UnsupportedEccFeature("Unsupported ECC curve (OID: %s)" % curve_oid) + + scalar_bytes = DerOctetString().decode(private_key[1]).payload + modulus_bytes = curve.p.size_in_bytes() + if len(scalar_bytes) != modulus_bytes: + raise ValueError("Private key is too small") + d = Integer.from_bytes(scalar_bytes) + + # Decode public key (if any) + if len(private_key) > 2: + public_key_enc = DerBitString(explicit=1).decode(private_key[-1]).value + public_key = _import_public_der(public_key_enc, curve_oid=curve_oid) + point_x = public_key.pointQ.x + point_y = public_key.pointQ.y + else: + point_x = point_y = None + + return construct(curve=curve_name, d=d, point_x=point_x, point_y=point_y) + + +def _import_pkcs8(encoded, passphrase): + from Crypto.IO import PKCS8 + + algo_oid, private_key, params = PKCS8.unwrap(encoded, passphrase) + + nist_p_oids = ( + "1.2.840.10045.2.1", # id-ecPublicKey (unrestricted) + "1.3.132.1.12", # id-ecDH + "1.3.132.1.13" # id-ecMQV + ) + eddsa_oids = { + "1.3.101.112": "Ed25519", # id-Ed25519 + "1.3.101.113": "Ed448", # id-Ed448 + } + + if algo_oid in nist_p_oids: + curve_oid = DerObjectId().decode(params).value + return _import_rfc5915_der(private_key, passphrase, curve_oid) + elif algo_oid in eddsa_oids: + if params is not None: + raise ValueError("EdDSA ECC private key must not have parameters") + curve_oid = None + seed = DerOctetString().decode(private_key).payload + return construct(curve=eddsa_oids[algo_oid], seed=seed) + else: + raise UnsupportedEccFeature("Unsupported ECC purpose (OID: %s)" % algo_oid) + + +def _import_x509_cert(encoded, *kwargs): + + sp_info = _extract_subject_public_key_info(encoded) + return _import_subjectPublicKeyInfo(sp_info) + + +def _import_der(encoded, passphrase): + + try: + return _import_subjectPublicKeyInfo(encoded, passphrase) + except UnsupportedEccFeature as err: + raise err + except (ValueError, TypeError, IndexError): + pass + + try: + return _import_x509_cert(encoded, passphrase) + except UnsupportedEccFeature as err: + raise err + except (ValueError, TypeError, IndexError): + pass + + try: + return _import_rfc5915_der(encoded, passphrase) + except UnsupportedEccFeature as err: + raise err + except (ValueError, TypeError, IndexError): + pass + + try: + return _import_pkcs8(encoded, passphrase) + except UnsupportedEccFeature as err: + raise err + except (ValueError, TypeError, IndexError): + pass + + raise ValueError("Not an ECC DER key") + + +def _import_openssh_public(encoded): + parts = encoded.split(b' ') + if len(parts) not in (2, 3): + raise ValueError("Not an openssh public key") + + try: + keystring = binascii.a2b_base64(parts[1]) + + keyparts = [] + while len(keystring) > 4: + lk = struct.unpack(">I", keystring[:4])[0] + keyparts.append(keystring[4:4 + lk]) + keystring = keystring[4 + lk:] + + if parts[0] != keyparts[0]: + raise ValueError("Mismatch in openssh public key") + + # NIST P curves + if parts[0].startswith(b"ecdsa-sha2-"): + + for curve_name, curve in _curves.items(): + if curve.openssh is None: + continue + if not curve.openssh.startswith("ecdsa-sha2"): + continue + middle = tobytes(curve.openssh.split("-")[2]) + if keyparts[1] == middle: + break + else: + raise ValueError("Unsupported ECC curve: " + middle) + + ecc_key = _import_public_der(keyparts[2], curve_oid=curve.oid) + + # EdDSA + elif parts[0] == b"ssh-ed25519": + x, y = _import_ed25519_public_key(keyparts[1]) + ecc_key = construct(curve="Ed25519", point_x=x, point_y=y) + else: + raise ValueError("Unsupported SSH key type: " + parts[0]) + + except (IndexError, TypeError, binascii.Error): + raise ValueError("Error parsing SSH key type: " + parts[0]) + + return ecc_key + + +def _import_openssh_private_ecc(data, password): + + from ._openssh import (import_openssh_private_generic, + read_bytes, read_string, check_padding) + + key_type, decrypted = import_openssh_private_generic(data, password) + + eddsa_keys = { + "ssh-ed25519": ("Ed25519", _import_ed25519_public_key, 32), + } + + # https://datatracker.ietf.org/doc/html/draft-miller-ssh-agent-04 + if key_type.startswith("ecdsa-sha2"): + + ecdsa_curve_name, decrypted = read_string(decrypted) + if ecdsa_curve_name not in _curves: + raise UnsupportedEccFeature("Unsupported ECC curve %s" % ecdsa_curve_name) + curve = _curves[ecdsa_curve_name] + modulus_bytes = (curve.modulus_bits + 7) // 8 + + public_key, decrypted = read_bytes(decrypted) + + if bord(public_key[0]) != 4: + raise ValueError("Only uncompressed OpenSSH EC keys are supported") + if len(public_key) != 2 * modulus_bytes + 1: + raise ValueError("Incorrect public key length") + + point_x = Integer.from_bytes(public_key[1:1+modulus_bytes]) + point_y = Integer.from_bytes(public_key[1+modulus_bytes:]) + + private_key, decrypted = read_bytes(decrypted) + d = Integer.from_bytes(private_key) + + params = {'d': d, 'curve': ecdsa_curve_name} + + elif key_type in eddsa_keys: + + curve_name, import_eddsa_public_key, seed_len = eddsa_keys[key_type] + + public_key, decrypted = read_bytes(decrypted) + point_x, point_y = import_eddsa_public_key(public_key) + + private_public_key, decrypted = read_bytes(decrypted) + seed = private_public_key[:seed_len] + + params = {'seed': seed, 'curve': curve_name} + else: + raise ValueError("Unsupport SSH agent key type:" + key_type) + + _, padded = read_string(decrypted) # Comment + check_padding(padded) + + return construct(point_x=point_x, point_y=point_y, **params) + + +def _import_ed25519_public_key(encoded): + """Import an Ed25519 ECC public key, encoded as raw bytes as described + in RFC8032_. + + Args: + encoded (bytes): + The Ed25519 public key to import. It must be 32 bytes long. + + Returns: + :class:`EccKey` : a new ECC key object + + Raises: + ValueError: when the given key cannot be parsed. + + .. _RFC8032: https://datatracker.ietf.org/doc/html/rfc8032 + """ + + if len(encoded) != 32: + raise ValueError("Incorrect length. Only Ed25519 public keys are supported.") + + p = Integer(0x7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffed) # 2**255 - 19 + d = 37095705934669439343138083508754565189542113879843219016388785533085940283555 + + y = bytearray(encoded) + x_lsb = y[31] >> 7 + y[31] &= 0x7F + point_y = Integer.from_bytes(y, byteorder='little') + if point_y >= p: + raise ValueError("Invalid Ed25519 key (y)") + if point_y == 1: + return 0, 1 + + u = (point_y**2 - 1) % p + v = ((point_y**2 % p) * d + 1) % p + try: + v_inv = v.inverse(p) + x2 = (u * v_inv) % p + point_x = Integer._tonelli_shanks(x2, p) + if (point_x & 1) != x_lsb: + point_x = p - point_x + except ValueError: + raise ValueError("Invalid Ed25519 public key") + return point_x, point_y + + +def _import_ed448_public_key(encoded): + """Import an Ed448 ECC public key, encoded as raw bytes as described + in RFC8032_. + + Args: + encoded (bytes): + The Ed448 public key to import. It must be 57 bytes long. + + Returns: + :class:`EccKey` : a new ECC key object + + Raises: + ValueError: when the given key cannot be parsed. + + .. _RFC8032: https://datatracker.ietf.org/doc/html/rfc8032 + """ + + if len(encoded) != 57: + raise ValueError("Incorrect length. Only Ed448 public keys are supported.") + + p = Integer(0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffeffffffffffffffffffffffffffffffffffffffffffffffffffffffff) # 2**448 - 2**224 - 1 + d = 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffeffffffffffffffffffffffffffffffffffffffffffffffffffff6756 + + y = encoded[:56] + x_lsb = bord(encoded[56]) >> 7 + point_y = Integer.from_bytes(y, byteorder='little') + if point_y >= p: + raise ValueError("Invalid Ed448 key (y)") + if point_y == 1: + return 0, 1 + + u = (point_y**2 - 1) % p + v = ((point_y**2 % p) * d - 1) % p + try: + v_inv = v.inverse(p) + x2 = (u * v_inv) % p + point_x = Integer._tonelli_shanks(x2, p) + if (point_x & 1) != x_lsb: + point_x = p - point_x + except ValueError: + raise ValueError("Invalid Ed448 public key") + return point_x, point_y + + +def import_key(encoded, passphrase=None, curve_name=None): + """Import an ECC key (public or private). + + Args: + encoded (bytes or multi-line string): + The ECC key to import. + The function will try to automatically detect the right format. + + Supported formats for an ECC **public** key: + + * X.509 certificate: binary (DER) or ASCII (PEM). + * X.509 ``subjectPublicKeyInfo``: binary (DER) or ASCII (PEM). + * SEC1_ (or X9.62), as ``bytes``. NIST P curves only. + You must also provide the ``curve_name`` (with a value from the `ECC table`_) + * OpenSSH line, defined in RFC5656_ and RFC8709_ (ASCII). + This is normally the content of files like ``~/.ssh/id_ecdsa.pub``. + + Supported formats for an ECC **private** key: + + * A binary ``ECPrivateKey`` structure, as defined in `RFC5915`_ (DER). + NIST P curves only. + * A `PKCS#8`_ structure (or the more recent Asymmetric Key Package, RFC5958_): binary (DER) or ASCII (PEM). + * `OpenSSH 6.5`_ and newer versions (ASCII). + + Private keys can be in the clear or password-protected. + + For details about the PEM encoding, see `RFC1421`_/`RFC1423`_. + + passphrase (byte string): + The passphrase to use for decrypting a private key. + Encryption may be applied protected at the PEM level (not recommended) + or at the PKCS#8 level (recommended). + This parameter is ignored if the key in input is not encrypted. + + curve_name (string): + For a SEC1 encoding only. This is the name of the curve, + as defined in the `ECC table`_. + + .. note:: + + To import EdDSA private and public keys, when encoded as raw ``bytes``, use: + + * :func:`Crypto.Signature.eddsa.import_public_key`, or + * :func:`Crypto.Signature.eddsa.import_private_key`. + + Returns: + :class:`EccKey` : a new ECC key object + + Raises: + ValueError: when the given key cannot be parsed (possibly because + the pass phrase is wrong). + + .. _RFC1421: https://datatracker.ietf.org/doc/html/rfc1421 + .. _RFC1423: https://datatracker.ietf.org/doc/html/rfc1423 + .. _RFC5915: https://datatracker.ietf.org/doc/html/rfc5915 + .. _RFC5656: https://datatracker.ietf.org/doc/html/rfc5656 + .. _RFC8709: https://datatracker.ietf.org/doc/html/rfc8709 + .. _RFC5958: https://datatracker.ietf.org/doc/html/rfc5958 + .. _`PKCS#8`: https://datatracker.ietf.org/doc/html/rfc5208 + .. _`OpenSSH 6.5`: https://flak.tedunangst.com/post/new-openssh-key-format-and-bcrypt-pbkdf + .. _SEC1: https://www.secg.org/sec1-v2.pdf + """ + + from Crypto.IO import PEM + + encoded = tobytes(encoded) + if passphrase is not None: + passphrase = tobytes(passphrase) + + # PEM + if encoded.startswith(b'-----BEGIN OPENSSH PRIVATE KEY'): + text_encoded = tostr(encoded) + openssh_encoded, marker, enc_flag = PEM.decode(text_encoded, passphrase) + result = _import_openssh_private_ecc(openssh_encoded, passphrase) + return result + + elif encoded.startswith(b'-----'): + + text_encoded = tostr(encoded) + + # Remove any EC PARAMETERS section + # Ignore its content because the curve type must be already given in the key + ecparams_start = "-----BEGIN EC PARAMETERS-----" + ecparams_end = "-----END EC PARAMETERS-----" + text_encoded = re.sub(ecparams_start + ".*?" + ecparams_end, "", + text_encoded, + flags=re.DOTALL) + + der_encoded, marker, enc_flag = PEM.decode(text_encoded, passphrase) + if enc_flag: + passphrase = None + try: + result = _import_der(der_encoded, passphrase) + except UnsupportedEccFeature as uef: + raise uef + except ValueError: + raise ValueError("Invalid DER encoding inside the PEM file") + return result + + # OpenSSH + if encoded.startswith((b'ecdsa-sha2-', b'ssh-ed25519')): + return _import_openssh_public(encoded) + + # DER + if len(encoded) > 0 and bord(encoded[0]) == 0x30: + return _import_der(encoded, passphrase) + + # SEC1 + if len(encoded) > 0 and bord(encoded[0]) in (0x02, 0x03, 0x04): + if curve_name is None: + raise ValueError("No curve name was provided") + return _import_public_der(encoded, curve_name=curve_name) + + raise ValueError("ECC key format is not supported") + + +if __name__ == "__main__": + + import time + + d = 0xc51e4753afdec1e6b6c6a5b992f43f8dd0c7a8933072708b6522468b2ffb06fd + + point = _curves['p256'].G.copy() + count = 3000 + + start = time.time() + for x in range(count): + pointX = point * d + print("(P-256 G)", (time.time() - start) / count * 1000, "ms") + + start = time.time() + for x in range(count): + pointX = pointX * d + print("(P-256 arbitrary point)", (time.time() - start) / count * 1000, "ms") diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/ECC.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/ECC.pyi new file mode 100644 index 000000000..d7f884b82 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/ECC.pyi @@ -0,0 +1,102 @@ +from __future__ import annotations + +from typing import Union, Callable, Optional, Tuple, Dict, NamedTuple, Any, overload, Literal +from typing_extensions import TypedDict, Unpack, NotRequired + +from Crypto.Math.Numbers import Integer +from Crypto.IO._PBES import ProtParams + +RNG = Callable[[int], bytes] + + +class UnsupportedEccFeature(ValueError): + ... + + +class EccPoint(object): + def __init__(self, + x: Union[int, Integer], + y: Union[int, Integer], + curve: Optional[str] = ...) -> None: ... + + def set(self, point: EccPoint) -> EccPoint: ... + def __eq__(self, point: object) -> bool: ... + def __neg__(self) -> EccPoint: ... + def copy(self) -> EccPoint: ... + def is_point_at_infinity(self) -> bool: ... + def point_at_infinity(self) -> EccPoint: ... + @property + def x(self) -> int: ... + @property + def y(self) -> int: ... + @property + def xy(self) -> Tuple[int, int]: ... + def size_in_bytes(self) -> int: ... + def size_in_bits(self) -> int: ... + def double(self) -> EccPoint: ... + def __iadd__(self, point: EccPoint) -> EccPoint: ... + def __add__(self, point: EccPoint) -> EccPoint: ... + def __imul__(self, scalar: int) -> EccPoint: ... + def __mul__(self, scalar: int) -> EccPoint: ... + + +class ExportParams(TypedDict): + passphrase: NotRequired[Union[bytes, str]] + use_pkcs8: NotRequired[bool] + protection: NotRequired[str] + compress: NotRequired[bool] + prot_params: NotRequired[ProtParams] + + +class EccKey(object): + curve: str + def __init__(self, *, curve: str = ..., d: int = ..., point: EccPoint = ...) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __repr__(self) -> str: ... + def has_private(self) -> bool: ... + @property + def d(self) -> int: ... + @property + def pointQ(self) -> EccPoint: ... + def public_key(self) -> EccKey: ... + + @overload + def export_key(self, + *, + format: Literal['PEM', 'OpenSSH'], + **kwargs: Unpack[ExportParams]) -> str: ... + + @overload + def export_key(self, + *, + format: Literal['DER', 'SEC1', 'raw'], + **kwargs: Unpack[ExportParams]) -> bytes: ... + + +_Curve = NamedTuple("_Curve", [('p', Integer), + ('order', Integer), + ('b', Integer), + ('Gx', Integer), + ('Gy', Integer), + ('G', EccPoint), + ('modulus_bits', int), + ('oid', str), + ('context', Any), + ('desc', str), + ('openssh', Union[str, None]), + ]) + +_curves: Dict[str, _Curve] + + +def generate(**kwargs: Union[str, RNG]) -> EccKey: ... +def construct(**kwargs: Union[str, int]) -> EccKey: ... + + +def import_key(encoded: Union[bytes, str], + passphrase: Optional[str] = None, + curve_name: Optional[str] = None) -> EccKey: ... + + +def _import_ed25519_public_key(encoded: bytes) -> EccKey: ... +def _import_ed448_public_key(encoded: bytes) -> EccKey: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/ElGamal.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/ElGamal.py new file mode 100644 index 000000000..3b1084056 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/ElGamal.py @@ -0,0 +1,286 @@ +# +# ElGamal.py : ElGamal encryption/decryption and signatures +# +# Part of the Python Cryptography Toolkit +# +# Originally written by: A.M. Kuchling +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__all__ = ['generate', 'construct', 'ElGamalKey'] + +from Crypto import Random +from Crypto.Math.Primality import ( generate_probable_safe_prime, + test_probable_prime, COMPOSITE ) +from Crypto.Math.Numbers import Integer + +# Generate an ElGamal key with N bits +def generate(bits, randfunc): + """Randomly generate a fresh, new ElGamal key. + + The key will be safe for use for both encryption and signature + (although it should be used for **only one** purpose). + + Args: + bits (int): + Key length, or size (in bits) of the modulus *p*. + The recommended value is 2048. + randfunc (callable): + Random number generation function; it should accept + a single integer *N* and return a string of random + *N* random bytes. + + Return: + an :class:`ElGamalKey` object + """ + + obj=ElGamalKey() + + # Generate a safe prime p + # See Algorithm 4.86 in Handbook of Applied Cryptography + obj.p = generate_probable_safe_prime(exact_bits=bits, randfunc=randfunc) + q = (obj.p - 1) >> 1 + + # Generate generator g + while 1: + # Choose a square residue; it will generate a cyclic group of order q. + obj.g = pow(Integer.random_range(min_inclusive=2, + max_exclusive=obj.p, + randfunc=randfunc), 2, obj.p) + + # We must avoid g=2 because of Bleichenbacher's attack described + # in "Generating ElGamal signatures without knowning the secret key", + # 1996 + if obj.g in (1, 2): + continue + + # Discard g if it divides p-1 because of the attack described + # in Note 11.67 (iii) in HAC + if (obj.p - 1) % obj.g == 0: + continue + + # g^{-1} must not divide p-1 because of Khadir's attack + # described in "Conditions of the generator for forging ElGamal + # signature", 2011 + ginv = obj.g.inverse(obj.p) + if (obj.p - 1) % ginv == 0: + continue + + # Found + break + + # Generate private key x + obj.x = Integer.random_range(min_inclusive=2, + max_exclusive=obj.p-1, + randfunc=randfunc) + # Generate public key y + obj.y = pow(obj.g, obj.x, obj.p) + return obj + +def construct(tup): + r"""Construct an ElGamal key from a tuple of valid ElGamal components. + + The modulus *p* must be a prime. + The following conditions must apply: + + .. math:: + + \begin{align} + &1 < g < p-1 \\ + &g^{p-1} = 1 \text{ mod } 1 \\ + &1 < x < p-1 \\ + &g^x = y \text{ mod } p + \end{align} + + Args: + tup (tuple): + A tuple with either 3 or 4 integers, + in the following order: + + 1. Modulus (*p*). + 2. Generator (*g*). + 3. Public key (*y*). + 4. Private key (*x*). Optional. + + Raises: + ValueError: when the key being imported fails the most basic ElGamal validity checks. + + Returns: + an :class:`ElGamalKey` object + """ + + obj=ElGamalKey() + if len(tup) not in [3,4]: + raise ValueError('argument for construct() wrong length') + for i in range(len(tup)): + field = obj._keydata[i] + setattr(obj, field, Integer(tup[i])) + + fmt_error = test_probable_prime(obj.p) == COMPOSITE + fmt_error |= obj.g<=1 or obj.g>=obj.p + fmt_error |= pow(obj.g, obj.p-1, obj.p)!=1 + fmt_error |= obj.y<1 or obj.y>=obj.p + if len(tup)==4: + fmt_error |= obj.x<=1 or obj.x>=obj.p + fmt_error |= pow(obj.g, obj.x, obj.p)!=obj.y + + if fmt_error: + raise ValueError("Invalid ElGamal key components") + + return obj + +class ElGamalKey(object): + r"""Class defining an ElGamal key. + Do not instantiate directly. + Use :func:`generate` or :func:`construct` instead. + + :ivar p: Modulus + :vartype d: integer + + :ivar g: Generator + :vartype e: integer + + :ivar y: Public key component + :vartype y: integer + + :ivar x: Private key component + :vartype x: integer + """ + + #: Dictionary of ElGamal parameters. + #: + #: A public key will only have the following entries: + #: + #: - **y**, the public key. + #: - **g**, the generator. + #: - **p**, the modulus. + #: + #: A private key will also have: + #: + #: - **x**, the private key. + _keydata=['p', 'g', 'y', 'x'] + + def __init__(self, randfunc=None): + if randfunc is None: + randfunc = Random.new().read + self._randfunc = randfunc + + def _encrypt(self, M, K): + a=pow(self.g, K, self.p) + b=( pow(self.y, K, self.p)*M ) % self.p + return [int(a), int(b)] + + def _decrypt(self, M): + if (not hasattr(self, 'x')): + raise TypeError('Private key not available in this object') + r = Integer.random_range(min_inclusive=2, + max_exclusive=self.p-1, + randfunc=self._randfunc) + a_blind = (pow(self.g, r, self.p) * M[0]) % self.p + ax=pow(a_blind, self.x, self.p) + plaintext_blind = (ax.inverse(self.p) * M[1] ) % self.p + plaintext = (plaintext_blind * pow(self.y, r, self.p)) % self.p + return int(plaintext) + + def _sign(self, M, K): + if (not hasattr(self, 'x')): + raise TypeError('Private key not available in this object') + p1=self.p-1 + K = Integer(K) + if (K.gcd(p1)!=1): + raise ValueError('Bad K value: GCD(K,p-1)!=1') + a=pow(self.g, K, self.p) + t=(Integer(M)-self.x*a) % p1 + while t<0: t=t+p1 + b=(t*K.inverse(p1)) % p1 + return [int(a), int(b)] + + def _verify(self, M, sig): + sig = [Integer(x) for x in sig] + if sig[0]<1 or sig[0]>self.p-1: + return 0 + v1=pow(self.y, sig[0], self.p) + v1=(v1*pow(sig[0], sig[1], self.p)) % self.p + v2=pow(self.g, M, self.p) + if v1==v2: + return 1 + return 0 + + def has_private(self): + """Whether this is an ElGamal private key""" + + if hasattr(self, 'x'): + return 1 + else: + return 0 + + def can_encrypt(self): + return True + + def can_sign(self): + return True + + def publickey(self): + """A matching ElGamal public key. + + Returns: + a new :class:`ElGamalKey` object + """ + return construct((self.p, self.g, self.y)) + + def __eq__(self, other): + if bool(self.has_private()) != bool(other.has_private()): + return False + + result = True + for comp in self._keydata: + result = result and (getattr(self.key, comp, None) == + getattr(other.key, comp, None)) + return result + + def __ne__(self, other): + return not self.__eq__(other) + + def __getstate__(self): + # ElGamal key is not pickable + from pickle import PicklingError + raise PicklingError + + # Methods defined in PyCrypto that we don't support anymore + + def sign(self, M, K): + raise NotImplementedError + + def verify(self, M, signature): + raise NotImplementedError + + def encrypt(self, plaintext, K): + raise NotImplementedError + + def decrypt(self, ciphertext): + raise NotImplementedError + + def blind(self, M, B): + raise NotImplementedError + + def unblind(self, M, B): + raise NotImplementedError + + def size(self): + raise NotImplementedError diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/ElGamal.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/ElGamal.pyi new file mode 100644 index 000000000..904853132 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/ElGamal.pyi @@ -0,0 +1,18 @@ +from typing import Callable, Union, Tuple, Optional + +__all__ = ['generate', 'construct', 'ElGamalKey'] + +RNG = Callable[[int], bytes] + +def generate(bits: int, randfunc: RNG) -> ElGamalKey: ... +def construct(tup: Union[Tuple[int, int, int], Tuple[int, int, int, int]]) -> ElGamalKey: ... + +class ElGamalKey(object): + def __init__(self, randfunc: Optional[RNG]=None) -> None: ... + def has_private(self) -> bool: ... + def can_encrypt(self) -> bool: ... + def can_sign(self) -> bool: ... + def publickey(self) -> ElGamalKey: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __getstate__(self) -> None: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/RSA.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/RSA.py new file mode 100644 index 000000000..26d8ab510 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/RSA.py @@ -0,0 +1,864 @@ +# -*- coding: utf-8 -*- +# =================================================================== +# +# Copyright (c) 2016, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +__all__ = ['generate', 'construct', 'import_key', + 'RsaKey', 'oid'] + +import binascii +import struct + +from Crypto import Random +from Crypto.Util.py3compat import tobytes, bord, tostr +from Crypto.Util.asn1 import DerSequence, DerNull +from Crypto.Util.number import bytes_to_long + +from Crypto.Math.Numbers import Integer +from Crypto.Math.Primality import (test_probable_prime, + generate_probable_prime, COMPOSITE) + +from Crypto.PublicKey import (_expand_subject_public_key_info, + _create_subject_public_key_info, + _extract_subject_public_key_info) + + +class RsaKey(object): + r"""Class defining an RSA key, private or public. + Do not instantiate directly. + Use :func:`generate`, :func:`construct` or :func:`import_key` instead. + + :ivar n: RSA modulus + :vartype n: integer + + :ivar e: RSA public exponent + :vartype e: integer + + :ivar d: RSA private exponent + :vartype d: integer + + :ivar p: First factor of the RSA modulus + :vartype p: integer + + :ivar q: Second factor of the RSA modulus + :vartype q: integer + + :ivar invp: Chinese remainder component (:math:`p^{-1} \text{mod } q`) + :vartype invp: integer + + :ivar invq: Chinese remainder component (:math:`q^{-1} \text{mod } p`) + :vartype invq: integer + + :ivar u: Same as ``invp`` + :vartype u: integer + """ + + def __init__(self, **kwargs): + """Build an RSA key. + + :Keywords: + n : integer + The modulus. + e : integer + The public exponent. + d : integer + The private exponent. Only required for private keys. + p : integer + The first factor of the modulus. Only required for private keys. + q : integer + The second factor of the modulus. Only required for private keys. + u : integer + The CRT coefficient (inverse of p modulo q). Only required for + private keys. + """ + + input_set = set(kwargs.keys()) + public_set = set(('n', 'e')) + private_set = public_set | set(('p', 'q', 'd', 'u')) + if input_set not in (private_set, public_set): + raise ValueError("Some RSA components are missing") + for component, value in kwargs.items(): + setattr(self, "_" + component, value) + if input_set == private_set: + self._dp = self._d % (self._p - 1) # = (e⁻¹) mod (p-1) + self._dq = self._d % (self._q - 1) # = (e⁻¹) mod (q-1) + self._invq = None # will be computed on demand + + @property + def n(self): + return int(self._n) + + @property + def e(self): + return int(self._e) + + @property + def d(self): + if not self.has_private(): + raise AttributeError("No private exponent available for public keys") + return int(self._d) + + @property + def p(self): + if not self.has_private(): + raise AttributeError("No CRT component 'p' available for public keys") + return int(self._p) + + @property + def q(self): + if not self.has_private(): + raise AttributeError("No CRT component 'q' available for public keys") + return int(self._q) + + @property + def dp(self): + if not self.has_private(): + raise AttributeError("No CRT component 'dp' available for public keys") + return int(self._dp) + + @property + def dq(self): + if not self.has_private(): + raise AttributeError("No CRT component 'dq' available for public keys") + return int(self._dq) + + @property + def invq(self): + if not self.has_private(): + raise AttributeError("No CRT component 'invq' available for public keys") + if self._invq is None: + self._invq = self._q.inverse(self._p) + return int(self._invq) + + @property + def invp(self): + return self.u + + @property + def u(self): + if not self.has_private(): + raise AttributeError("No CRT component 'u' available for public keys") + return int(self._u) + + def size_in_bits(self): + """Size of the RSA modulus in bits""" + return self._n.size_in_bits() + + def size_in_bytes(self): + """The minimal amount of bytes that can hold the RSA modulus""" + return (self._n.size_in_bits() - 1) // 8 + 1 + + def _encrypt(self, plaintext): + if not 0 <= plaintext < self._n: + raise ValueError("Plaintext too large") + return int(pow(Integer(plaintext), self._e, self._n)) + + def _decrypt_to_bytes(self, ciphertext): + if not 0 <= ciphertext < self._n: + raise ValueError("Ciphertext too large") + if not self.has_private(): + raise TypeError("This is not a private key") + + # Blinded RSA decryption (to prevent timing attacks): + # Step 1: Generate random secret blinding factor r, + # such that 0 < r < n-1 + r = Integer.random_range(min_inclusive=1, max_exclusive=self._n) + # Step 2: Compute c' = c * r**e mod n + cp = Integer(ciphertext) * pow(r, self._e, self._n) % self._n + # Step 3: Compute m' = c'**d mod n (normal RSA decryption) + m1 = pow(cp, self._dp, self._p) + m2 = pow(cp, self._dq, self._q) + h = ((m2 - m1) * self._u) % self._q + mp = h * self._p + m1 + # Step 4: Compute m = m' * (r**(-1)) mod n + # then encode into a big endian byte string + result = Integer._mult_modulo_bytes( + r.inverse(self._n), + mp, + self._n) + return result + + def _decrypt(self, ciphertext): + """Legacy private method""" + + return bytes_to_long(self._decrypt_to_bytes(ciphertext)) + + def has_private(self): + """Whether this is an RSA private key""" + + return hasattr(self, "_d") + + def can_encrypt(self): # legacy + return True + + def can_sign(self): # legacy + return True + + def public_key(self): + """A matching RSA public key. + + Returns: + a new :class:`RsaKey` object + """ + return RsaKey(n=self._n, e=self._e) + + def __eq__(self, other): + if self.has_private() != other.has_private(): + return False + if self.n != other.n or self.e != other.e: + return False + if not self.has_private(): + return True + return (self.d == other.d) + + def __ne__(self, other): + return not (self == other) + + def __getstate__(self): + # RSA key is not pickable + from pickle import PicklingError + raise PicklingError + + def __repr__(self): + if self.has_private(): + extra = ", d=%d, p=%d, q=%d, u=%d" % (int(self._d), int(self._p), + int(self._q), int(self._u)) + else: + extra = "" + return "RsaKey(n=%d, e=%d%s)" % (int(self._n), int(self._e), extra) + + def __str__(self): + if self.has_private(): + key_type = "Private" + else: + key_type = "Public" + return "%s RSA key at 0x%X" % (key_type, id(self)) + + def export_key(self, format='PEM', passphrase=None, pkcs=1, + protection=None, randfunc=None, prot_params=None): + """Export this RSA key. + + Keyword Args: + format (string): + The desired output format: + + - ``'PEM'``. (default) Text output, according to `RFC1421`_/`RFC1423`_. + - ``'DER'``. Binary output. + - ``'OpenSSH'``. Text output, according to the OpenSSH specification. + Only suitable for public keys (not private keys). + + Note that PEM contains a DER structure. + + passphrase (bytes or string): + (*Private keys only*) The passphrase to protect the + private key. + + pkcs (integer): + (*Private keys only*) The standard to use for + serializing the key: PKCS#1 or PKCS#8. + + With ``pkcs=1`` (*default*), the private key is encoded with a + simple `PKCS#1`_ structure (``RSAPrivateKey``). The key cannot be + securely encrypted. + + With ``pkcs=8``, the private key is encoded with a `PKCS#8`_ structure + (``PrivateKeyInfo``). PKCS#8 offers the best ways to securely + encrypt the key. + + .. note:: + This parameter is ignored for a public key. + For DER and PEM, the output is always an + ASN.1 DER ``SubjectPublicKeyInfo`` structure. + + protection (string): + (*For private keys only*) + The encryption scheme to use for protecting the private key + using the passphrase. + + You can only specify a value if ``pkcs=8``. + For all possible protection schemes, + refer to :ref:`the encryption parameters of PKCS#8`. + The recommended value is + ``'PBKDF2WithHMAC-SHA512AndAES256-CBC'``. + + If ``None`` (default), the behavior depends on :attr:`format`: + + - if ``format='PEM'``, the obsolete PEM encryption scheme is used. + It is based on MD5 for key derivation, and 3DES for encryption. + + - if ``format='DER'``, the ``'PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC'`` + scheme is used. + + prot_params (dict): + (*For private keys only*) + + The parameters to use to derive the encryption key + from the passphrase. ``'protection'`` must be also specified. + For all possible values, + refer to :ref:`the encryption parameters of PKCS#8`. + The recommendation is to use ``{'iteration_count':21000}`` for PBKDF2, + and ``{'iteration_count':131072}`` for scrypt. + + randfunc (callable): + A function that provides random bytes. Only used for PEM encoding. + The default is :func:`Crypto.Random.get_random_bytes`. + + Returns: + bytes: the encoded key + + Raises: + ValueError:when the format is unknown or when you try to encrypt a private + key with *DER* format and PKCS#1. + + .. warning:: + If you don't provide a pass phrase, the private key will be + exported in the clear! + + .. _RFC1421: http://www.ietf.org/rfc/rfc1421.txt + .. _RFC1423: http://www.ietf.org/rfc/rfc1423.txt + .. _`PKCS#1`: http://www.ietf.org/rfc/rfc3447.txt + .. _`PKCS#8`: http://www.ietf.org/rfc/rfc5208.txt + """ + + if passphrase is not None: + passphrase = tobytes(passphrase) + + if randfunc is None: + randfunc = Random.get_random_bytes + + if format == 'OpenSSH': + e_bytes, n_bytes = [x.to_bytes() for x in (self._e, self._n)] + if bord(e_bytes[0]) & 0x80: + e_bytes = b'\x00' + e_bytes + if bord(n_bytes[0]) & 0x80: + n_bytes = b'\x00' + n_bytes + keyparts = [b'ssh-rsa', e_bytes, n_bytes] + keystring = b''.join([struct.pack(">I", len(kp)) + kp for kp in keyparts]) + return b'ssh-rsa ' + binascii.b2a_base64(keystring)[:-1] + + # DER format is always used, even in case of PEM, which simply + # encodes it into BASE64. + if self.has_private(): + binary_key = DerSequence([0, + self.n, + self.e, + self.d, + self.p, + self.q, + self.d % (self.p-1), + self.d % (self.q-1), + Integer(self.q).inverse(self.p) + ]).encode() + if pkcs == 1: + key_type = 'RSA PRIVATE KEY' + if format == 'DER' and passphrase: + raise ValueError("PKCS#1 private key cannot be encrypted") + else: # PKCS#8 + from Crypto.IO import PKCS8 + + if format == 'PEM' and protection is None: + key_type = 'PRIVATE KEY' + binary_key = PKCS8.wrap(binary_key, oid, None, + key_params=DerNull()) + else: + key_type = 'ENCRYPTED PRIVATE KEY' + if not protection: + if prot_params: + raise ValueError("'protection' parameter must be set") + protection = 'PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC' + binary_key = PKCS8.wrap(binary_key, oid, + passphrase, protection, + prot_params=prot_params, + key_params=DerNull()) + passphrase = None + else: + key_type = "PUBLIC KEY" + binary_key = _create_subject_public_key_info(oid, + DerSequence([self.n, + self.e]), + DerNull() + ) + + if format == 'DER': + return binary_key + if format == 'PEM': + from Crypto.IO import PEM + + pem_str = PEM.encode(binary_key, key_type, passphrase, randfunc) + return tobytes(pem_str) + + raise ValueError("Unknown key format '%s'. Cannot export the RSA key." % format) + + # Backward compatibility + def exportKey(self, *args, **kwargs): + """:meta private:""" + return self.export_key(*args, **kwargs) + + def publickey(self): + """:meta private:""" + return self.public_key() + + # Methods defined in PyCrypto that we don't support anymore + def sign(self, M, K): + """:meta private:""" + raise NotImplementedError("Use module Crypto.Signature.pkcs1_15 instead") + + def verify(self, M, signature): + """:meta private:""" + raise NotImplementedError("Use module Crypto.Signature.pkcs1_15 instead") + + def encrypt(self, plaintext, K): + """:meta private:""" + raise NotImplementedError("Use module Crypto.Cipher.PKCS1_OAEP instead") + + def decrypt(self, ciphertext): + """:meta private:""" + raise NotImplementedError("Use module Crypto.Cipher.PKCS1_OAEP instead") + + def blind(self, M, B): + """:meta private:""" + raise NotImplementedError + + def unblind(self, M, B): + """:meta private:""" + raise NotImplementedError + + def size(self): + """:meta private:""" + raise NotImplementedError + + +def generate(bits, randfunc=None, e=65537): + """Create a new RSA key pair. + + The algorithm closely follows NIST `FIPS 186-4`_ in its + sections B.3.1 and B.3.3. The modulus is the product of + two non-strong probable primes. + Each prime passes a suitable number of Miller-Rabin tests + with random bases and a single Lucas test. + + Args: + bits (integer): + Key length, or size (in bits) of the RSA modulus. + It must be at least 1024, but **2048 is recommended.** + The FIPS standard only defines 1024, 2048 and 3072. + Keyword Args: + randfunc (callable): + Function that returns random bytes. + The default is :func:`Crypto.Random.get_random_bytes`. + e (integer): + Public RSA exponent. It must be an odd positive integer. + It is typically a small number with very few ones in its + binary representation. + The FIPS standard requires the public exponent to be + at least 65537 (the default). + + Returns: an RSA key object (:class:`RsaKey`, with private key). + + .. _FIPS 186-4: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf + """ + + if bits < 1024: + raise ValueError("RSA modulus length must be >= 1024") + if e % 2 == 0 or e < 3: + raise ValueError("RSA public exponent must be a positive, odd integer larger than 2.") + + if randfunc is None: + randfunc = Random.get_random_bytes + + d = n = Integer(1) + e = Integer(e) + + while n.size_in_bits() != bits and d < (1 << (bits // 2)): + # Generate the prime factors of n: p and q. + # By construciton, their product is always + # 2^{bits-1} < p*q < 2^bits. + size_q = bits // 2 + size_p = bits - size_q + + min_p = min_q = (Integer(1) << (2 * size_q - 1)).sqrt() + if size_q != size_p: + min_p = (Integer(1) << (2 * size_p - 1)).sqrt() + + def filter_p(candidate): + return candidate > min_p and (candidate - 1).gcd(e) == 1 + + p = generate_probable_prime(exact_bits=size_p, + randfunc=randfunc, + prime_filter=filter_p) + + min_distance = Integer(1) << (bits // 2 - 100) + + def filter_q(candidate): + return (candidate > min_q and + (candidate - 1).gcd(e) == 1 and + abs(candidate - p) > min_distance) + + q = generate_probable_prime(exact_bits=size_q, + randfunc=randfunc, + prime_filter=filter_q) + + n = p * q + lcm = (p - 1).lcm(q - 1) + d = e.inverse(lcm) + + if p > q: + p, q = q, p + + u = p.inverse(q) + + return RsaKey(n=n, e=e, d=d, p=p, q=q, u=u) + + +def construct(rsa_components, consistency_check=True): + r"""Construct an RSA key from a tuple of valid RSA components. + + The modulus **n** must be the product of two primes. + The public exponent **e** must be odd and larger than 1. + + In case of a private key, the following equations must apply: + + .. math:: + + \begin{align} + p*q &= n \\ + e*d &\equiv 1 ( \text{mod lcm} [(p-1)(q-1)]) \\ + p*u &\equiv 1 ( \text{mod } q) + \end{align} + + Args: + rsa_components (tuple): + A tuple of integers, with at least 2 and no + more than 6 items. The items come in the following order: + + 1. RSA modulus *n*. + 2. Public exponent *e*. + 3. Private exponent *d*. + Only required if the key is private. + 4. First factor of *n* (*p*). + Optional, but the other factor *q* must also be present. + 5. Second factor of *n* (*q*). Optional. + 6. CRT coefficient *q*, that is :math:`p^{-1} \text{mod }q`. Optional. + + Keyword Args: + consistency_check (boolean): + If ``True``, the library will verify that the provided components + fulfil the main RSA properties. + + Raises: + ValueError: when the key being imported fails the most basic RSA validity checks. + + Returns: An RSA key object (:class:`RsaKey`). + """ + + class InputComps(object): + pass + + input_comps = InputComps() + for (comp, value) in zip(('n', 'e', 'd', 'p', 'q', 'u'), rsa_components): + setattr(input_comps, comp, Integer(value)) + + n = input_comps.n + e = input_comps.e + if not hasattr(input_comps, 'd'): + key = RsaKey(n=n, e=e) + else: + d = input_comps.d + if hasattr(input_comps, 'q'): + p = input_comps.p + q = input_comps.q + else: + # Compute factors p and q from the private exponent d. + # We assume that n has no more than two factors. + # See 8.2.2(i) in Handbook of Applied Cryptography. + ktot = d * e - 1 + # The quantity d*e-1 is a multiple of phi(n), even, + # and can be represented as t*2^s. + t = ktot + while t % 2 == 0: + t //= 2 + # Cycle through all multiplicative inverses in Zn. + # The algorithm is non-deterministic, but there is a 50% chance + # any candidate a leads to successful factoring. + # See "Digitalized Signatures and Public Key Functions as Intractable + # as Factorization", M. Rabin, 1979 + spotted = False + a = Integer(2) + while not spotted and a < 100: + k = Integer(t) + # Cycle through all values a^{t*2^i}=a^k + while k < ktot: + cand = pow(a, k, n) + # Check if a^k is a non-trivial root of unity (mod n) + if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1: + # We have found a number such that (cand-1)(cand+1)=0 (mod n). + # Either of the terms divides n. + p = Integer(n).gcd(cand + 1) + spotted = True + break + k *= 2 + # This value was not any good... let's try another! + a += 2 + if not spotted: + raise ValueError("Unable to compute factors p and q from exponent d.") + # Found ! + assert ((n % p) == 0) + q = n // p + + if hasattr(input_comps, 'u'): + u = input_comps.u + else: + u = p.inverse(q) + + # Build key object + key = RsaKey(n=n, e=e, d=d, p=p, q=q, u=u) + + # Verify consistency of the key + if consistency_check: + + # Modulus and public exponent must be coprime + if e <= 1 or e >= n: + raise ValueError("Invalid RSA public exponent") + if Integer(n).gcd(e) != 1: + raise ValueError("RSA public exponent is not coprime to modulus") + + # For RSA, modulus must be odd + if not n & 1: + raise ValueError("RSA modulus is not odd") + + if key.has_private(): + # Modulus and private exponent must be coprime + if d <= 1 or d >= n: + raise ValueError("Invalid RSA private exponent") + if Integer(n).gcd(d) != 1: + raise ValueError("RSA private exponent is not coprime to modulus") + # Modulus must be product of 2 primes + if p * q != n: + raise ValueError("RSA factors do not match modulus") + if test_probable_prime(p) == COMPOSITE: + raise ValueError("RSA factor p is composite") + if test_probable_prime(q) == COMPOSITE: + raise ValueError("RSA factor q is composite") + # See Carmichael theorem + phi = (p - 1) * (q - 1) + lcm = phi // (p - 1).gcd(q - 1) + if (e * d % int(lcm)) != 1: + raise ValueError("Invalid RSA condition") + if hasattr(key, 'u'): + # CRT coefficient + if u <= 1 or u >= q: + raise ValueError("Invalid RSA component u") + if (p * u % q) != 1: + raise ValueError("Invalid RSA component u with p") + + return key + + +def _import_pkcs1_private(encoded, *kwargs): + # RSAPrivateKey ::= SEQUENCE { + # version Version, + # modulus INTEGER, -- n + # publicExponent INTEGER, -- e + # privateExponent INTEGER, -- d + # prime1 INTEGER, -- p + # prime2 INTEGER, -- q + # exponent1 INTEGER, -- d mod (p-1) + # exponent2 INTEGER, -- d mod (q-1) + # coefficient INTEGER -- (inverse of q) mod p + # } + # + # Version ::= INTEGER + der = DerSequence().decode(encoded, nr_elements=9, only_ints_expected=True) + if der[0] != 0: + raise ValueError("No PKCS#1 encoding of an RSA private key") + return construct(der[1:6] + [Integer(der[4]).inverse(der[5])]) + + +def _import_pkcs1_public(encoded, *kwargs): + # RSAPublicKey ::= SEQUENCE { + # modulus INTEGER, -- n + # publicExponent INTEGER -- e + # } + der = DerSequence().decode(encoded, nr_elements=2, only_ints_expected=True) + return construct(der) + + +def _import_subjectPublicKeyInfo(encoded, *kwargs): + + algoid, encoded_key, params = _expand_subject_public_key_info(encoded) + if algoid != oid or params is not None: + raise ValueError("No RSA subjectPublicKeyInfo") + return _import_pkcs1_public(encoded_key) + + +def _import_x509_cert(encoded, *kwargs): + + sp_info = _extract_subject_public_key_info(encoded) + return _import_subjectPublicKeyInfo(sp_info) + + +def _import_pkcs8(encoded, passphrase): + from Crypto.IO import PKCS8 + + k = PKCS8.unwrap(encoded, passphrase) + if k[0] != oid: + raise ValueError("No PKCS#8 encoded RSA key") + return _import_keyDER(k[1], passphrase) + + +def _import_keyDER(extern_key, passphrase): + """Import an RSA key (public or private half), encoded in DER form.""" + + decodings = (_import_pkcs1_private, + _import_pkcs1_public, + _import_subjectPublicKeyInfo, + _import_x509_cert, + _import_pkcs8) + + for decoding in decodings: + try: + return decoding(extern_key, passphrase) + except ValueError: + pass + + raise ValueError("RSA key format is not supported") + + +def _import_openssh_private_rsa(data, password): + + from ._openssh import (import_openssh_private_generic, + read_bytes, read_string, check_padding) + + ssh_name, decrypted = import_openssh_private_generic(data, password) + + if ssh_name != "ssh-rsa": + raise ValueError("This SSH key is not RSA") + + n, decrypted = read_bytes(decrypted) + e, decrypted = read_bytes(decrypted) + d, decrypted = read_bytes(decrypted) + iqmp, decrypted = read_bytes(decrypted) + p, decrypted = read_bytes(decrypted) + q, decrypted = read_bytes(decrypted) + + _, padded = read_string(decrypted) # Comment + check_padding(padded) + + build = [Integer.from_bytes(x) for x in (n, e, d, q, p, iqmp)] + return construct(build) + + +def import_key(extern_key, passphrase=None): + """Import an RSA key (public or private). + + Args: + extern_key (string or byte string): + The RSA key to import. + + The following formats are supported for an RSA **public key**: + + - X.509 certificate (binary or PEM format) + - X.509 ``subjectPublicKeyInfo`` DER SEQUENCE (binary or PEM + encoding) + - `PKCS#1`_ ``RSAPublicKey`` DER SEQUENCE (binary or PEM encoding) + - An OpenSSH line (e.g. the content of ``~/.ssh/id_ecdsa``, ASCII) + + The following formats are supported for an RSA **private key**: + + - PKCS#1 ``RSAPrivateKey`` DER SEQUENCE (binary or PEM encoding) + - `PKCS#8`_ ``PrivateKeyInfo`` or ``EncryptedPrivateKeyInfo`` + DER SEQUENCE (binary or PEM encoding) + - OpenSSH (text format, introduced in `OpenSSH 6.5`_) + + For details about the PEM encoding, see `RFC1421`_/`RFC1423`_. + + passphrase (string or byte string): + For private keys only, the pass phrase that encrypts the key. + + Returns: An RSA key object (:class:`RsaKey`). + + Raises: + ValueError/IndexError/TypeError: + When the given key cannot be parsed (possibly because the pass + phrase is wrong). + + .. _RFC1421: http://www.ietf.org/rfc/rfc1421.txt + .. _RFC1423: http://www.ietf.org/rfc/rfc1423.txt + .. _`PKCS#1`: http://www.ietf.org/rfc/rfc3447.txt + .. _`PKCS#8`: http://www.ietf.org/rfc/rfc5208.txt + .. _`OpenSSH 6.5`: https://flak.tedunangst.com/post/new-openssh-key-format-and-bcrypt-pbkdf + """ + + from Crypto.IO import PEM + + extern_key = tobytes(extern_key) + if passphrase is not None: + passphrase = tobytes(passphrase) + + if extern_key.startswith(b'-----BEGIN OPENSSH PRIVATE KEY'): + text_encoded = tostr(extern_key) + openssh_encoded, marker, enc_flag = PEM.decode(text_encoded, passphrase) + result = _import_openssh_private_rsa(openssh_encoded, passphrase) + return result + + if extern_key.startswith(b'-----'): + # This is probably a PEM encoded key. + (der, marker, enc_flag) = PEM.decode(tostr(extern_key), passphrase) + if enc_flag: + passphrase = None + return _import_keyDER(der, passphrase) + + if extern_key.startswith(b'ssh-rsa '): + # This is probably an OpenSSH key + keystring = binascii.a2b_base64(extern_key.split(b' ')[1]) + keyparts = [] + while len(keystring) > 4: + length = struct.unpack(">I", keystring[:4])[0] + keyparts.append(keystring[4:4 + length]) + keystring = keystring[4 + length:] + e = Integer.from_bytes(keyparts[1]) + n = Integer.from_bytes(keyparts[2]) + return construct([n, e]) + + if len(extern_key) > 0 and bord(extern_key[0]) == 0x30: + # This is probably a DER encoded key + return _import_keyDER(extern_key, passphrase) + + raise ValueError("RSA key format is not supported") + + +# Backward compatibility +importKey = import_key + +#: `Object ID`_ for the RSA encryption algorithm. This OID often indicates +#: a generic RSA key, even when such key will be actually used for digital +#: signatures. +#: +#: .. _`Object ID`: http://www.alvestrand.no/objectid/1.2.840.113549.1.1.1.html +oid = "1.2.840.113549.1.1.1" diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/RSA.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/RSA.pyi new file mode 100644 index 000000000..9d3d15704 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/RSA.pyi @@ -0,0 +1,78 @@ +from typing import Callable, Union, Tuple, Optional, overload, Literal + +from Crypto.Math.Numbers import Integer +from Crypto.IO._PBES import ProtParams + +__all__ = ['generate', 'construct', 'import_key', + 'RsaKey', 'oid'] + +RNG = Callable[[int], bytes] + +class RsaKey(object): + def __init__(self, **kwargs: int) -> None: ... + + @property + def n(self) -> int: ... + @property + def e(self) -> int: ... + @property + def d(self) -> int: ... + @property + def p(self) -> int: ... + @property + def q(self) -> int: ... + @property + def u(self) -> int: ... + @property + def invp(self) -> int: ... + @property + def invq(self) -> int: ... + + def size_in_bits(self) -> int: ... + def size_in_bytes(self) -> int: ... + def has_private(self) -> bool: ... + def can_encrypt(self) -> bool: ... # legacy + def can_sign(self) -> bool:... # legacy + def public_key(self) -> RsaKey: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __getstate__(self) -> None: ... + def __repr__(self) -> str: ... + def __str__(self) -> str: ... + + @overload + def export_key(self, + format: Optional[str]="PEM", + passphrase: Optional[str]=None, + pkcs: Optional[int]=1, + protection: Optional[str]=None, + randfunc: Optional[RNG]=None + ) -> bytes: ... + @overload + def export_key(self, *, + format: Optional[str]="PEM", + passphrase: str, + pkcs: Literal[8], + protection: str, + randfunc: Optional[RNG]=None, + prot_params: ProtParams, + ) -> bytes: ... + + # Backward compatibility + exportKey = export_key + publickey = public_key + +Int = Union[int, Integer] + +def generate(bits: int, randfunc: Optional[RNG]=None, e: Optional[int]=65537) -> RsaKey: ... +def construct(rsa_components: Union[Tuple[Int, Int], # n, e + Tuple[Int, Int, Int], # n, e, d + Tuple[Int, Int, Int, Int, Int], # n, e, d, p, q + Tuple[Int, Int, Int, Int, Int, Int]], # n, e, d, p, q, crt_q + consistency_check: Optional[bool]=True) -> RsaKey: ... +def import_key(extern_key: Union[str, bytes], passphrase: Optional[str]=None) -> RsaKey: ... + +# Backward compatibility +importKey = import_key + +oid: str diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/__init__.py new file mode 100644 index 000000000..cf3a23847 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/__init__.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.asn1 import (DerSequence, DerInteger, DerBitString, + DerObjectId, DerNull) + + +def _expand_subject_public_key_info(encoded): + """Parse a SubjectPublicKeyInfo structure. + + It returns a triple with: + * OID (string) + * encoded public key (bytes) + * Algorithm parameters (bytes or None) + """ + + # + # SubjectPublicKeyInfo ::= SEQUENCE { + # algorithm AlgorithmIdentifier, + # subjectPublicKey BIT STRING + # } + # + # AlgorithmIdentifier ::= SEQUENCE { + # algorithm OBJECT IDENTIFIER, + # parameters ANY DEFINED BY algorithm OPTIONAL + # } + # + + spki = DerSequence().decode(encoded, nr_elements=2) + algo = DerSequence().decode(spki[0], nr_elements=(1,2)) + algo_oid = DerObjectId().decode(algo[0]) + spk = DerBitString().decode(spki[1]).value + + if len(algo) == 1: + algo_params = None + else: + try: + DerNull().decode(algo[1]) + algo_params = None + except: + algo_params = algo[1] + + return algo_oid.value, spk, algo_params + + +def _create_subject_public_key_info(algo_oid, public_key, params): + + if params is None: + algorithm = DerSequence([DerObjectId(algo_oid)]) + else: + algorithm = DerSequence([DerObjectId(algo_oid), params]) + + spki = DerSequence([algorithm, + DerBitString(public_key) + ]) + return spki.encode() + + +def _extract_subject_public_key_info(x509_certificate): + """Extract subjectPublicKeyInfo from a DER X.509 certificate.""" + + certificate = DerSequence().decode(x509_certificate, nr_elements=3) + tbs_certificate = DerSequence().decode(certificate[0], + nr_elements=range(6, 11)) + + index = 5 + try: + tbs_certificate[0] + 1 + # Version not present + version = 1 + except TypeError: + version = DerInteger(explicit=0).decode(tbs_certificate[0]).value + if version not in (2, 3): + raise ValueError("Incorrect X.509 certificate version") + index = 6 + + return tbs_certificate[index] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/__init__.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/__init__.pyi new file mode 100644 index 000000000..e69de29bb diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/_ec_ws.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/_ec_ws.abi3.so new file mode 100755 index 000000000..b1272d2e5 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/_ec_ws.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/_ed25519.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/_ed25519.abi3.so new file mode 100755 index 000000000..e047bcb97 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/_ed25519.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/_ed448.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/_ed448.abi3.so new file mode 100755 index 000000000..da7209a83 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/_ed448.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/_openssh.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/_openssh.py new file mode 100644 index 000000000..88dacfc27 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/_openssh.py @@ -0,0 +1,135 @@ +# =================================================================== +# +# Copyright (c) 2019, Helder Eijs +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import struct + +from Crypto.Cipher import AES +from Crypto.Hash import SHA512 +from Crypto.Protocol.KDF import _bcrypt_hash +from Crypto.Util.strxor import strxor +from Crypto.Util.py3compat import tostr, bchr, bord + + +def read_int4(data): + if len(data) < 4: + raise ValueError("Insufficient data") + value = struct.unpack(">I", data[:4])[0] + return value, data[4:] + + +def read_bytes(data): + size, data = read_int4(data) + if len(data) < size: + raise ValueError("Insufficient data (V)") + return data[:size], data[size:] + + +def read_string(data): + s, d = read_bytes(data) + return tostr(s), d + + +def check_padding(pad): + for v, x in enumerate(pad): + if bord(x) != ((v + 1) & 0xFF): + raise ValueError("Incorrect padding") + + +def import_openssh_private_generic(data, password): + # https://cvsweb.openbsd.org/cgi-bin/cvsweb/src/usr.bin/ssh/PROTOCOL.key?annotate=HEAD + # https://github.com/openssh/openssh-portable/blob/master/sshkey.c + # https://coolaj86.com/articles/the-openssh-private-key-format/ + # https://coolaj86.com/articles/the-ssh-public-key-format/ + + if not data.startswith(b'openssh-key-v1\x00'): + raise ValueError("Incorrect magic value") + data = data[15:] + + ciphername, data = read_string(data) + kdfname, data = read_string(data) + kdfoptions, data = read_bytes(data) + number_of_keys, data = read_int4(data) + + if number_of_keys != 1: + raise ValueError("We only handle 1 key at a time") + + _, data = read_string(data) # Public key + encrypted, data = read_bytes(data) + if data: + raise ValueError("Too much data") + + if len(encrypted) % 8 != 0: + raise ValueError("Incorrect payload length") + + # Decrypt if necessary + if ciphername == 'none': + decrypted = encrypted + else: + if (ciphername, kdfname) != ('aes256-ctr', 'bcrypt'): + raise ValueError("Unsupported encryption scheme %s/%s" % (ciphername, kdfname)) + + salt, kdfoptions = read_bytes(kdfoptions) + iterations, kdfoptions = read_int4(kdfoptions) + + if len(salt) != 16: + raise ValueError("Incorrect salt length") + if kdfoptions: + raise ValueError("Too much data in kdfoptions") + + pwd_sha512 = SHA512.new(password).digest() + # We need 32+16 = 48 bytes, therefore 2 bcrypt outputs are sufficient + stripes = [] + constant = b"OxychromaticBlowfishSwatDynamite" + for count in range(1, 3): + salt_sha512 = SHA512.new(salt + struct.pack(">I", count)).digest() + out_le = _bcrypt_hash(pwd_sha512, 6, salt_sha512, constant, False) + out = struct.pack("IIIIIIII", out_le)) + acc = bytearray(out) + for _ in range(1, iterations): + out_le = _bcrypt_hash(pwd_sha512, 6, SHA512.new(out).digest(), constant, False) + out = struct.pack("IIIIIIII", out_le)) + strxor(acc, out, output=acc) + stripes.append(acc[:24]) + + result = b"".join([bchr(a)+bchr(b) for (a, b) in zip(*stripes)]) + + cipher = AES.new(result[:32], + AES.MODE_CTR, + nonce=b"", + initial_value=result[32:32+16]) + decrypted = cipher.decrypt(encrypted) + + checkint1, decrypted = read_int4(decrypted) + checkint2, decrypted = read_int4(decrypted) + if checkint1 != checkint2: + raise ValueError("Incorrect checksum") + ssh_name, decrypted = read_string(decrypted) + + return ssh_name, decrypted diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/_openssh.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/_openssh.pyi new file mode 100644 index 000000000..15f3677ad --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/_openssh.pyi @@ -0,0 +1,7 @@ +from typing import Tuple + +def read_int4(data: bytes) -> Tuple[int, bytes]: ... +def read_bytes(data: bytes) -> Tuple[bytes, bytes]: ... +def read_string(data: bytes) -> Tuple[str, bytes]: ... +def check_padding(pad: bytes) -> None: ... +def import_openssh_private_generic(data: bytes, password: bytes) -> Tuple[str, bytes]: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/_x25519.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/_x25519.abi3.so new file mode 100755 index 000000000..dbb00d591 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/PublicKey/_x25519.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Random/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Random/__init__.py new file mode 100644 index 000000000..0f83a07e3 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Random/__init__.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# +# Random/__init__.py : PyCrypto random number generation +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__all__ = ['new', 'get_random_bytes'] + +from os import urandom + +class _UrandomRNG(object): + + def read(self, n): + """Return a random byte string of the desired size.""" + return urandom(n) + + def flush(self): + """Method provided for backward compatibility only.""" + pass + + def reinit(self): + """Method provided for backward compatibility only.""" + pass + + def close(self): + """Method provided for backward compatibility only.""" + pass + + +def new(*args, **kwargs): + """Return a file-like object that outputs cryptographically random bytes.""" + return _UrandomRNG() + + +def atfork(): + pass + + +#: Function that returns a random byte string of the desired size. +get_random_bytes = urandom + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Random/__init__.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Random/__init__.pyi new file mode 100644 index 000000000..ddc5b9b01 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Random/__init__.pyi @@ -0,0 +1,19 @@ +from typing import Any + +__all__ = ['new', 'get_random_bytes'] + +from os import urandom + +class _UrandomRNG(object): + + def read(self, n: int) -> bytes:... + def flush(self) -> None: ... + def reinit(self) -> None: ... + def close(self) -> None: ... + +def new(*args: Any, **kwargs: Any) -> _UrandomRNG: ... + +def atfork() -> None: ... + +get_random_bytes = urandom + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Random/random.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Random/random.py new file mode 100644 index 000000000..5389b3bbe --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Random/random.py @@ -0,0 +1,138 @@ +# -*- coding: utf-8 -*- +# +# Random/random.py : Strong alternative for the standard 'random' module +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__all__ = ['StrongRandom', 'getrandbits', 'randrange', 'randint', 'choice', 'shuffle', 'sample'] + +from Crypto import Random + +from Crypto.Util.py3compat import is_native_int + +class StrongRandom(object): + def __init__(self, rng=None, randfunc=None): + if randfunc is None and rng is None: + self._randfunc = None + elif randfunc is not None and rng is None: + self._randfunc = randfunc + elif randfunc is None and rng is not None: + self._randfunc = rng.read + else: + raise ValueError("Cannot specify both 'rng' and 'randfunc'") + + def getrandbits(self, k): + """Return an integer with k random bits.""" + + if self._randfunc is None: + self._randfunc = Random.new().read + mask = (1 << k) - 1 + return mask & bytes_to_long(self._randfunc(ceil_div(k, 8))) + + def randrange(self, *args): + """randrange([start,] stop[, step]): + Return a randomly-selected element from range(start, stop, step).""" + if len(args) == 3: + (start, stop, step) = args + elif len(args) == 2: + (start, stop) = args + step = 1 + elif len(args) == 1: + (stop,) = args + start = 0 + step = 1 + else: + raise TypeError("randrange expected at most 3 arguments, got %d" % (len(args),)) + if (not is_native_int(start) or not is_native_int(stop) or not + is_native_int(step)): + raise TypeError("randrange requires integer arguments") + if step == 0: + raise ValueError("randrange step argument must not be zero") + + num_choices = ceil_div(stop - start, step) + if num_choices < 0: + num_choices = 0 + if num_choices < 1: + raise ValueError("empty range for randrange(%r, %r, %r)" % (start, stop, step)) + + # Pick a random number in the range of possible numbers + r = num_choices + while r >= num_choices: + r = self.getrandbits(size(num_choices)) + + return start + (step * r) + + def randint(self, a, b): + """Return a random integer N such that a <= N <= b.""" + if not is_native_int(a) or not is_native_int(b): + raise TypeError("randint requires integer arguments") + N = self.randrange(a, b+1) + assert a <= N <= b + return N + + def choice(self, seq): + """Return a random element from a (non-empty) sequence. + + If the seqence is empty, raises IndexError. + """ + if len(seq) == 0: + raise IndexError("empty sequence") + return seq[self.randrange(len(seq))] + + def shuffle(self, x): + """Shuffle the sequence in place.""" + # Fisher-Yates shuffle. O(n) + # See http://en.wikipedia.org/wiki/Fisher-Yates_shuffle + # Working backwards from the end of the array, we choose a random item + # from the remaining items until all items have been chosen. + for i in range(len(x)-1, 0, -1): # iterate from len(x)-1 downto 1 + j = self.randrange(0, i+1) # choose random j such that 0 <= j <= i + x[i], x[j] = x[j], x[i] # exchange x[i] and x[j] + + def sample(self, population, k): + """Return a k-length list of unique elements chosen from the population sequence.""" + + num_choices = len(population) + if k > num_choices: + raise ValueError("sample larger than population") + + retval = [] + selected = {} # we emulate a set using a dict here + for i in range(k): + r = None + while r is None or r in selected: + r = self.randrange(num_choices) + retval.append(population[r]) + selected[r] = 1 + return retval + +_r = StrongRandom() +getrandbits = _r.getrandbits +randrange = _r.randrange +randint = _r.randint +choice = _r.choice +shuffle = _r.shuffle +sample = _r.sample + +# These are at the bottom to avoid problems with recursive imports +from Crypto.Util.number import ceil_div, bytes_to_long, long_to_bytes, size + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Random/random.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Random/random.pyi new file mode 100644 index 000000000..9b7cf7eeb --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Random/random.pyi @@ -0,0 +1,22 @@ +from typing import Callable, Tuple, Union, Sequence, Any, Optional, TypeVar + +__all__ = ['StrongRandom', 'getrandbits', 'randrange', 'randint', 'choice', 'shuffle', 'sample'] + +T = TypeVar('T') + +class StrongRandom(object): + def __init__(self, rng: Optional[Any]=None, randfunc: Optional[Callable]=None) -> None: ... # TODO What is rng? + def getrandbits(self, k: int) -> int: ... + def randrange(self, start: int, stop: int = ..., step: int = ...) -> int: ... + def randint(self, a: int, b: int) -> int: ... + def choice(self, seq: Sequence[T]) -> T: ... + def shuffle(self, x: Sequence) -> None: ... + def sample(self, population: Sequence, k: int) -> list: ... + +_r = StrongRandom() +getrandbits = _r.getrandbits +randrange = _r.randrange +randint = _r.randint +choice = _r.choice +shuffle = _r.shuffle +sample = _r.sample diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/__init__.py new file mode 100644 index 000000000..05fc139ad --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/__init__.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/__init__.py: Self-test for cipher modules +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for cipher modules""" + +__revision__ = "$Id$" + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.Cipher import test_AES; tests += test_AES.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_ARC2; tests += test_ARC2.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_ARC4; tests += test_ARC4.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_Blowfish; tests += test_Blowfish.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_CAST; tests += test_CAST.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_DES3; tests += test_DES3.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_DES; tests += test_DES.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_Salsa20; tests += test_Salsa20.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_ChaCha20; tests += test_ChaCha20.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_ChaCha20_Poly1305; tests += test_ChaCha20_Poly1305.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_pkcs1_15; tests += test_pkcs1_15.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_pkcs1_oaep; tests += test_pkcs1_oaep.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_OCB; tests += test_OCB.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_CBC; tests += test_CBC.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_CFB; tests += test_CFB.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_OpenPGP; tests += test_OpenPGP.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_OFB; tests += test_OFB.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_CTR; tests += test_CTR.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_CCM; tests += test_CCM.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_EAX; tests += test_EAX.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_GCM; tests += test_GCM.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_SIV; tests += test_SIV.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/common.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/common.py new file mode 100644 index 000000000..c5bc755ab --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/common.py @@ -0,0 +1,510 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/common.py: Common code for Crypto.SelfTest.Hash +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-testing for PyCrypto hash modules""" + +import unittest +from binascii import a2b_hex, b2a_hex, hexlify + +from Crypto.Util.py3compat import b +from Crypto.Util.strxor import strxor_c + +class _NoDefault: pass # sentinel object +def _extract(d, k, default=_NoDefault): + """Get an item from a dictionary, and remove it from the dictionary.""" + try: + retval = d[k] + except KeyError: + if default is _NoDefault: + raise + return default + del d[k] + return retval + +# Generic cipher test case +class CipherSelfTest(unittest.TestCase): + + def __init__(self, module, params): + unittest.TestCase.__init__(self) + self.module = module + + # Extract the parameters + params = params.copy() + self.description = _extract(params, 'description') + self.key = b(_extract(params, 'key')) + self.plaintext = b(_extract(params, 'plaintext')) + self.ciphertext = b(_extract(params, 'ciphertext')) + self.module_name = _extract(params, 'module_name', None) + self.assoc_data = _extract(params, 'assoc_data', None) + self.mac = _extract(params, 'mac', None) + if self.assoc_data: + self.mac = b(self.mac) + + mode = _extract(params, 'mode', None) + self.mode_name = str(mode) + + if mode is not None: + # Block cipher + self.mode = getattr(self.module, "MODE_" + mode) + + self.iv = _extract(params, 'iv', None) + if self.iv is None: + self.iv = _extract(params, 'nonce', None) + if self.iv is not None: + self.iv = b(self.iv) + + else: + # Stream cipher + self.mode = None + self.iv = _extract(params, 'iv', None) + if self.iv is not None: + self.iv = b(self.iv) + + self.extra_params = params + + def shortDescription(self): + return self.description + + def _new(self): + params = self.extra_params.copy() + key = a2b_hex(self.key) + + old_style = [] + if self.mode is not None: + old_style = [ self.mode ] + if self.iv is not None: + old_style += [ a2b_hex(self.iv) ] + + return self.module.new(key, *old_style, **params) + + def isMode(self, name): + if not hasattr(self.module, "MODE_"+name): + return False + return self.mode == getattr(self.module, "MODE_"+name) + + def runTest(self): + plaintext = a2b_hex(self.plaintext) + ciphertext = a2b_hex(self.ciphertext) + assoc_data = [] + if self.assoc_data: + assoc_data = [ a2b_hex(b(x)) for x in self.assoc_data] + + ct = None + pt = None + + # + # Repeat the same encryption or decryption twice and verify + # that the result is always the same + # + for i in range(2): + cipher = self._new() + decipher = self._new() + + # Only AEAD modes + for comp in assoc_data: + cipher.update(comp) + decipher.update(comp) + + ctX = b2a_hex(cipher.encrypt(plaintext)) + ptX = b2a_hex(decipher.decrypt(ciphertext)) + + if ct: + self.assertEqual(ct, ctX) + self.assertEqual(pt, ptX) + ct, pt = ctX, ptX + + self.assertEqual(self.ciphertext, ct) # encrypt + self.assertEqual(self.plaintext, pt) # decrypt + + if self.mac: + mac = b2a_hex(cipher.digest()) + self.assertEqual(self.mac, mac) + decipher.verify(a2b_hex(self.mac)) + +class CipherStreamingSelfTest(CipherSelfTest): + + def shortDescription(self): + desc = self.module_name + if self.mode is not None: + desc += " in %s mode" % (self.mode_name,) + return "%s should behave like a stream cipher" % (desc,) + + def runTest(self): + plaintext = a2b_hex(self.plaintext) + ciphertext = a2b_hex(self.ciphertext) + + # The cipher should work like a stream cipher + + # Test counter mode encryption, 3 bytes at a time + ct3 = [] + cipher = self._new() + for i in range(0, len(plaintext), 3): + ct3.append(cipher.encrypt(plaintext[i:i+3])) + ct3 = b2a_hex(b("").join(ct3)) + self.assertEqual(self.ciphertext, ct3) # encryption (3 bytes at a time) + + # Test counter mode decryption, 3 bytes at a time + pt3 = [] + cipher = self._new() + for i in range(0, len(ciphertext), 3): + pt3.append(cipher.encrypt(ciphertext[i:i+3])) + # PY3K: This is meant to be text, do not change to bytes (data) + pt3 = b2a_hex(b("").join(pt3)) + self.assertEqual(self.plaintext, pt3) # decryption (3 bytes at a time) + + +class RoundtripTest(unittest.TestCase): + def __init__(self, module, params): + from Crypto import Random + unittest.TestCase.__init__(self) + self.module = module + self.iv = Random.get_random_bytes(module.block_size) + self.key = b(params['key']) + self.plaintext = 100 * b(params['plaintext']) + self.module_name = params.get('module_name', None) + + def shortDescription(self): + return """%s .decrypt() output of .encrypt() should not be garbled""" % (self.module_name,) + + def runTest(self): + + ## ECB mode + mode = self.module.MODE_ECB + encryption_cipher = self.module.new(a2b_hex(self.key), mode) + ciphertext = encryption_cipher.encrypt(self.plaintext) + decryption_cipher = self.module.new(a2b_hex(self.key), mode) + decrypted_plaintext = decryption_cipher.decrypt(ciphertext) + self.assertEqual(self.plaintext, decrypted_plaintext) + + +class IVLengthTest(unittest.TestCase): + def __init__(self, module, params): + unittest.TestCase.__init__(self) + self.module = module + self.key = b(params['key']) + + def shortDescription(self): + return "Check that all modes except MODE_ECB and MODE_CTR require an IV of the proper length" + + def runTest(self): + self.assertRaises(TypeError, self.module.new, a2b_hex(self.key), + self.module.MODE_ECB, b("")) + + def _dummy_counter(self): + return "\0" * self.module.block_size + + +class NoDefaultECBTest(unittest.TestCase): + def __init__(self, module, params): + unittest.TestCase.__init__(self) + self.module = module + self.key = b(params['key']) + + def runTest(self): + self.assertRaises(TypeError, self.module.new, a2b_hex(self.key)) + + +class BlockSizeTest(unittest.TestCase): + def __init__(self, module, params): + unittest.TestCase.__init__(self) + self.module = module + self.key = a2b_hex(b(params['key'])) + + def runTest(self): + cipher = self.module.new(self.key, self.module.MODE_ECB) + self.assertEqual(cipher.block_size, self.module.block_size) + + +class ByteArrayTest(unittest.TestCase): + """Verify we can use bytearray's for encrypting and decrypting""" + + def __init__(self, module, params): + unittest.TestCase.__init__(self) + self.module = module + + # Extract the parameters + params = params.copy() + self.description = _extract(params, 'description') + self.key = b(_extract(params, 'key')) + self.plaintext = b(_extract(params, 'plaintext')) + self.ciphertext = b(_extract(params, 'ciphertext')) + self.module_name = _extract(params, 'module_name', None) + self.assoc_data = _extract(params, 'assoc_data', None) + self.mac = _extract(params, 'mac', None) + if self.assoc_data: + self.mac = b(self.mac) + + mode = _extract(params, 'mode', None) + self.mode_name = str(mode) + + if mode is not None: + # Block cipher + self.mode = getattr(self.module, "MODE_" + mode) + + self.iv = _extract(params, 'iv', None) + if self.iv is None: + self.iv = _extract(params, 'nonce', None) + if self.iv is not None: + self.iv = b(self.iv) + else: + # Stream cipher + self.mode = None + self.iv = _extract(params, 'iv', None) + if self.iv is not None: + self.iv = b(self.iv) + + self.extra_params = params + + def _new(self): + params = self.extra_params.copy() + key = a2b_hex(self.key) + + old_style = [] + if self.mode is not None: + old_style = [ self.mode ] + if self.iv is not None: + old_style += [ a2b_hex(self.iv) ] + + return self.module.new(key, *old_style, **params) + + def runTest(self): + + plaintext = a2b_hex(self.plaintext) + ciphertext = a2b_hex(self.ciphertext) + assoc_data = [] + if self.assoc_data: + assoc_data = [ bytearray(a2b_hex(b(x))) for x in self.assoc_data] + + cipher = self._new() + decipher = self._new() + + # Only AEAD modes + for comp in assoc_data: + cipher.update(comp) + decipher.update(comp) + + ct = b2a_hex(cipher.encrypt(bytearray(plaintext))) + pt = b2a_hex(decipher.decrypt(bytearray(ciphertext))) + + self.assertEqual(self.ciphertext, ct) # encrypt + self.assertEqual(self.plaintext, pt) # decrypt + + if self.mac: + mac = b2a_hex(cipher.digest()) + self.assertEqual(self.mac, mac) + decipher.verify(bytearray(a2b_hex(self.mac))) + + +class MemoryviewTest(unittest.TestCase): + """Verify we can use memoryviews for encrypting and decrypting""" + + def __init__(self, module, params): + unittest.TestCase.__init__(self) + self.module = module + + # Extract the parameters + params = params.copy() + self.description = _extract(params, 'description') + self.key = b(_extract(params, 'key')) + self.plaintext = b(_extract(params, 'plaintext')) + self.ciphertext = b(_extract(params, 'ciphertext')) + self.module_name = _extract(params, 'module_name', None) + self.assoc_data = _extract(params, 'assoc_data', None) + self.mac = _extract(params, 'mac', None) + if self.assoc_data: + self.mac = b(self.mac) + + mode = _extract(params, 'mode', None) + self.mode_name = str(mode) + + if mode is not None: + # Block cipher + self.mode = getattr(self.module, "MODE_" + mode) + + self.iv = _extract(params, 'iv', None) + if self.iv is None: + self.iv = _extract(params, 'nonce', None) + if self.iv is not None: + self.iv = b(self.iv) + else: + # Stream cipher + self.mode = None + self.iv = _extract(params, 'iv', None) + if self.iv is not None: + self.iv = b(self.iv) + + self.extra_params = params + + def _new(self): + params = self.extra_params.copy() + key = a2b_hex(self.key) + + old_style = [] + if self.mode is not None: + old_style = [ self.mode ] + if self.iv is not None: + old_style += [ a2b_hex(self.iv) ] + + return self.module.new(key, *old_style, **params) + + def runTest(self): + + plaintext = a2b_hex(self.plaintext) + ciphertext = a2b_hex(self.ciphertext) + assoc_data = [] + if self.assoc_data: + assoc_data = [ memoryview(a2b_hex(b(x))) for x in self.assoc_data] + + cipher = self._new() + decipher = self._new() + + # Only AEAD modes + for comp in assoc_data: + cipher.update(comp) + decipher.update(comp) + + ct = b2a_hex(cipher.encrypt(memoryview(plaintext))) + pt = b2a_hex(decipher.decrypt(memoryview(ciphertext))) + + self.assertEqual(self.ciphertext, ct) # encrypt + self.assertEqual(self.plaintext, pt) # decrypt + + if self.mac: + mac = b2a_hex(cipher.digest()) + self.assertEqual(self.mac, mac) + decipher.verify(memoryview(a2b_hex(self.mac))) + + +def make_block_tests(module, module_name, test_data, additional_params=dict()): + tests = [] + extra_tests_added = False + for i in range(len(test_data)): + row = test_data[i] + + # Build the "params" dictionary with + # - plaintext + # - ciphertext + # - key + # - mode (default is ECB) + # - (optionally) description + # - (optionally) any other parameter that this cipher mode requires + params = {} + if len(row) == 3: + (params['plaintext'], params['ciphertext'], params['key']) = row + elif len(row) == 4: + (params['plaintext'], params['ciphertext'], params['key'], params['description']) = row + elif len(row) == 5: + (params['plaintext'], params['ciphertext'], params['key'], params['description'], extra_params) = row + params.update(extra_params) + else: + raise AssertionError("Unsupported tuple size %d" % (len(row),)) + + if not "mode" in params: + params["mode"] = "ECB" + + # Build the display-name for the test + p2 = params.copy() + p_key = _extract(p2, 'key') + p_plaintext = _extract(p2, 'plaintext') + p_ciphertext = _extract(p2, 'ciphertext') + p_mode = _extract(p2, 'mode') + p_description = _extract(p2, 'description', None) + + if p_description is not None: + description = p_description + elif p_mode == 'ECB' and not p2: + description = "p=%s, k=%s" % (p_plaintext, p_key) + else: + description = "p=%s, k=%s, %r" % (p_plaintext, p_key, p2) + name = "%s #%d: %s" % (module_name, i+1, description) + params['description'] = name + params['module_name'] = module_name + params.update(additional_params) + + # Add extra test(s) to the test suite before the current test + if not extra_tests_added: + tests += [ + RoundtripTest(module, params), + IVLengthTest(module, params), + NoDefaultECBTest(module, params), + ByteArrayTest(module, params), + BlockSizeTest(module, params), + ] + extra_tests_added = True + + # Add the current test to the test suite + tests.append(CipherSelfTest(module, params)) + + return tests + +def make_stream_tests(module, module_name, test_data): + tests = [] + extra_tests_added = False + for i in range(len(test_data)): + row = test_data[i] + + # Build the "params" dictionary + params = {} + if len(row) == 3: + (params['plaintext'], params['ciphertext'], params['key']) = row + elif len(row) == 4: + (params['plaintext'], params['ciphertext'], params['key'], params['description']) = row + elif len(row) == 5: + (params['plaintext'], params['ciphertext'], params['key'], params['description'], extra_params) = row + params.update(extra_params) + else: + raise AssertionError("Unsupported tuple size %d" % (len(row),)) + + # Build the display-name for the test + p2 = params.copy() + p_key = _extract(p2, 'key') + p_plaintext = _extract(p2, 'plaintext') + p_ciphertext = _extract(p2, 'ciphertext') + p_description = _extract(p2, 'description', None) + + if p_description is not None: + description = p_description + elif not p2: + description = "p=%s, k=%s" % (p_plaintext, p_key) + else: + description = "p=%s, k=%s, %r" % (p_plaintext, p_key, p2) + name = "%s #%d: %s" % (module_name, i+1, description) + params['description'] = name + params['module_name'] = module_name + + # Add extra test(s) to the test suite before the current test + if not extra_tests_added: + tests += [ + ByteArrayTest(module, params), + ] + + tests.append(MemoryviewTest(module, params)) + extra_tests_added = True + + # Add the test to the test suite + tests.append(CipherSelfTest(module, params)) + tests.append(CipherStreamingSelfTest(module, params)) + return tests + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_AES.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_AES.py new file mode 100644 index 000000000..116deec64 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_AES.py @@ -0,0 +1,1351 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/AES.py: Self-test for the AES cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.AES""" + +from __future__ import print_function + +import unittest +from Crypto.Hash import SHA256 +from Crypto.Cipher import AES +from Crypto.Util.py3compat import * +from binascii import hexlify + +# This is a list of (plaintext, ciphertext, key[, description[, params]]) tuples. +test_data = [ + # FIPS PUB 197 test vectors + # http://csrc.nist.gov/publications/fips/fips197/fips-197.pdf + + ('00112233445566778899aabbccddeeff', '69c4e0d86a7b0430d8cdb78070b4c55a', + '000102030405060708090a0b0c0d0e0f', 'FIPS 197 C.1 (AES-128)'), + + ('00112233445566778899aabbccddeeff', 'dda97ca4864cdfe06eaf70a0ec0d7191', + '000102030405060708090a0b0c0d0e0f1011121314151617', + 'FIPS 197 C.2 (AES-192)'), + + ('00112233445566778899aabbccddeeff', '8ea2b7ca516745bfeafc49904b496089', + '000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', + 'FIPS 197 C.3 (AES-256)'), + + # Rijndael128 test vectors + # Downloaded 2008-09-13 from + # http://www.iaik.tugraz.at/Research/krypto/AES/old/~rijmen/rijndael/testvalues.tar.gz + + # ecb_tbl.txt, KEYSIZE=128 + ('506812a45f08c889b97f5980038b8359', 'd8f532538289ef7d06b506a4fd5be9c9', + '00010203050607080a0b0c0d0f101112', + 'ecb-tbl-128: I=1'), + ('5c6d71ca30de8b8b00549984d2ec7d4b', '59ab30f4d4ee6e4ff9907ef65b1fb68c', + '14151617191a1b1c1e1f202123242526', + 'ecb-tbl-128: I=2'), + ('53f3f4c64f8616e4e7c56199f48f21f6', 'bf1ed2fcb2af3fd41443b56d85025cb1', + '28292a2b2d2e2f30323334353738393a', + 'ecb-tbl-128: I=3'), + ('a1eb65a3487165fb0f1c27ff9959f703', '7316632d5c32233edcb0780560eae8b2', + '3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-128: I=4'), + ('3553ecf0b1739558b08e350a98a39bfa', '408c073e3e2538072b72625e68b8364b', + '50515253555657585a5b5c5d5f606162', + 'ecb-tbl-128: I=5'), + ('67429969490b9711ae2b01dc497afde8', 'e1f94dfa776597beaca262f2f6366fea', + '64656667696a6b6c6e6f707173747576', + 'ecb-tbl-128: I=6'), + ('93385c1f2aec8bed192f5a8e161dd508', 'f29e986c6a1c27d7b29ffd7ee92b75f1', + '78797a7b7d7e7f80828384858788898a', + 'ecb-tbl-128: I=7'), + ('b5bf946be19beb8db3983b5f4c6e8ddb', '131c886a57f8c2e713aba6955e2b55b5', + '8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-128: I=8'), + ('41321ee10e21bd907227c4450ff42324', 'd2ab7662df9b8c740210e5eeb61c199d', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2', + 'ecb-tbl-128: I=9'), + ('00a82f59c91c8486d12c0a80124f6089', '14c10554b2859c484cab5869bbe7c470', + 'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-128: I=10'), + ('7ce0fd076754691b4bbd9faf8a1372fe', 'db4d498f0a49cf55445d502c1f9ab3b5', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9da', + 'ecb-tbl-128: I=11'), + ('23605a8243d07764541bc5ad355b3129', '6d96fef7d66590a77a77bb2056667f7f', + 'dcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-128: I=12'), + ('12a8cfa23ea764fd876232b4e842bc44', '316fb68edba736c53e78477bf913725c', + 'f0f1f2f3f5f6f7f8fafbfcfdfe010002', + 'ecb-tbl-128: I=13'), + ('bcaf32415e8308b3723e5fdd853ccc80', '6936f2b93af8397fd3a771fc011c8c37', + '04050607090a0b0c0e0f101113141516', + 'ecb-tbl-128: I=14'), + ('89afae685d801ad747ace91fc49adde0', 'f3f92f7a9c59179c1fcc2c2ba0b082cd', + '2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-128: I=15'), + ('f521d07b484357c4a69e76124a634216', '6a95ea659ee3889158e7a9152ff04ebc', + '40414243454647484a4b4c4d4f505152', + 'ecb-tbl-128: I=16'), + ('3e23b3bc065bcc152407e23896d77783', '1959338344e945670678a5d432c90b93', + '54555657595a5b5c5e5f606163646566', + 'ecb-tbl-128: I=17'), + ('79f0fba002be1744670e7e99290d8f52', 'e49bddd2369b83ee66e6c75a1161b394', + '68696a6b6d6e6f70727374757778797a', + 'ecb-tbl-128: I=18'), + ('da23fe9d5bd63e1d72e3dafbe21a6c2a', 'd3388f19057ff704b70784164a74867d', + '7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-128: I=19'), + ('e3f5698ba90b6a022efd7db2c7e6c823', '23aa03e2d5e4cd24f3217e596480d1e1', + 'a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-128: I=20'), + ('bdc2691d4f1b73d2700679c3bcbf9c6e', 'c84113d68b666ab2a50a8bdb222e91b9', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2', + 'ecb-tbl-128: I=21'), + ('ba74e02093217ee1ba1b42bd5624349a', 'ac02403981cd4340b507963db65cb7b6', + '08090a0b0d0e0f10121314151718191a', + 'ecb-tbl-128: I=22'), + ('b5c593b5851c57fbf8b3f57715e8f680', '8d1299236223359474011f6bf5088414', + '6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-128: I=23'), + ('3da9bd9cec072381788f9387c3bbf4ee', '5a1d6ab8605505f7977e55b9a54d9b90', + '80818283858687888a8b8c8d8f909192', + 'ecb-tbl-128: I=24'), + ('4197f3051121702ab65d316b3c637374', '72e9c2d519cf555e4208805aabe3b258', + '94959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-128: I=25'), + ('9f46c62ec4f6ee3f6e8c62554bc48ab7', 'a8f3e81c4a23a39ef4d745dffe026e80', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9ba', + 'ecb-tbl-128: I=26'), + ('0220673fe9e699a4ebc8e0dbeb6979c8', '546f646449d31458f9eb4ef5483aee6c', + 'bcbdbebfc1c2c3c4c6c7c8c9cbcccdce', + 'ecb-tbl-128: I=27'), + ('b2b99171337ded9bc8c2c23ff6f18867', '4dbe4bc84ac797c0ee4efb7f1a07401c', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2', + 'ecb-tbl-128: I=28'), + ('a7facf4e301e984e5efeefd645b23505', '25e10bfb411bbd4d625ac8795c8ca3b3', + 'e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-128: I=29'), + ('f7c762e4a9819160fd7acfb6c4eedcdd', '315637405054ec803614e43def177579', + 'f8f9fafbfdfefe00020304050708090a', + 'ecb-tbl-128: I=30'), + ('9b64fc21ea08709f4915436faa70f1be', '60c5bc8a1410247295c6386c59e572a8', + '0c0d0e0f11121314161718191b1c1d1e', + 'ecb-tbl-128: I=31'), + ('52af2c3de07ee6777f55a4abfc100b3f', '01366fc8ca52dfe055d6a00a76471ba6', + '20212223252627282a2b2c2d2f303132', + 'ecb-tbl-128: I=32'), + ('2fca001224386c57aa3f968cbe2c816f', 'ecc46595516ec612449c3f581e7d42ff', + '34353637393a3b3c3e3f404143444546', + 'ecb-tbl-128: I=33'), + ('4149c73658a4a9c564342755ee2c132f', '6b7ffe4c602a154b06ee9c7dab5331c9', + '48494a4b4d4e4f50525354555758595a', + 'ecb-tbl-128: I=34'), + ('af60005a00a1772f7c07a48a923c23d2', '7da234c14039a240dd02dd0fbf84eb67', + '5c5d5e5f61626364666768696b6c6d6e', + 'ecb-tbl-128: I=35'), + ('6fccbc28363759914b6f0280afaf20c6', 'c7dc217d9e3604ffe7e91f080ecd5a3a', + '70717273757677787a7b7c7d7f808182', + 'ecb-tbl-128: I=36'), + ('7d82a43ddf4fefa2fc5947499884d386', '37785901863f5c81260ea41e7580cda5', + '84858687898a8b8c8e8f909193949596', + 'ecb-tbl-128: I=37'), + ('5d5a990eaab9093afe4ce254dfa49ef9', 'a07b9338e92ed105e6ad720fccce9fe4', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aa', + 'ecb-tbl-128: I=38'), + ('4cd1e2fd3f4434b553aae453f0ed1a02', 'ae0fb9722418cc21a7da816bbc61322c', + 'acadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-128: I=39'), + ('5a2c9a9641d4299125fa1b9363104b5e', 'c826a193080ff91ffb21f71d3373c877', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2', + 'ecb-tbl-128: I=40'), + ('b517fe34c0fa217d341740bfd4fe8dd4', '1181b11b0e494e8d8b0aa6b1d5ac2c48', + 'd4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-128: I=41'), + ('014baf2278a69d331d5180103643e99a', '6743c3d1519ab4f2cd9a78ab09a511bd', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fa', + 'ecb-tbl-128: I=42'), + ('b529bd8164f20d0aa443d4932116841c', 'dc55c076d52bacdf2eefd952946a439d', + 'fcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-128: I=43'), + ('2e596dcbb2f33d4216a1176d5bd1e456', '711b17b590ffc72b5c8e342b601e8003', + '10111213151617181a1b1c1d1f202122', + 'ecb-tbl-128: I=44'), + ('7274a1ea2b7ee2424e9a0e4673689143', '19983bb0950783a537e1339f4aa21c75', + '24252627292a2b2c2e2f303133343536', + 'ecb-tbl-128: I=45'), + ('ae20020bd4f13e9d90140bee3b5d26af', '3ba7762e15554169c0f4fa39164c410c', + '38393a3b3d3e3f40424344454748494a', + 'ecb-tbl-128: I=46'), + ('baac065da7ac26e855e79c8849d75a02', 'a0564c41245afca7af8aa2e0e588ea89', + '4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-128: I=47'), + ('7c917d8d1d45fab9e2540e28832540cc', '5e36a42a2e099f54ae85ecd92e2381ed', + '60616263656667686a6b6c6d6f707172', + 'ecb-tbl-128: I=48'), + ('bde6f89e16daadb0e847a2a614566a91', '770036f878cd0f6ca2268172f106f2fe', + '74757677797a7b7c7e7f808183848586', + 'ecb-tbl-128: I=49'), + ('c9de163725f1f5be44ebb1db51d07fbc', '7e4e03908b716116443ccf7c94e7c259', + '88898a8b8d8e8f90929394959798999a', + 'ecb-tbl-128: I=50'), + ('3af57a58f0c07dffa669572b521e2b92', '482735a48c30613a242dd494c7f9185d', + '9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-128: I=51'), + ('3d5ebac306dde4604f1b4fbbbfcdae55', 'b4c0f6c9d4d7079addf9369fc081061d', + 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2', + 'ecb-tbl-128: I=52'), + ('c2dfa91bceb76a1183c995020ac0b556', 'd5810fe0509ac53edcd74f89962e6270', + 'c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-128: I=53'), + ('c70f54305885e9a0746d01ec56c8596b', '03f17a16b3f91848269ecdd38ebb2165', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9ea', + 'ecb-tbl-128: I=54'), + ('c4f81b610e98012ce000182050c0c2b2', 'da1248c3180348bad4a93b4d9856c9df', + 'ecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-128: I=55'), + ('eaab86b1d02a95d7404eff67489f97d4', '3d10d7b63f3452c06cdf6cce18be0c2c', + '00010203050607080a0b0c0d0f101112', + 'ecb-tbl-128: I=56'), + ('7c55bdb40b88870b52bec3738de82886', '4ab823e7477dfddc0e6789018fcb6258', + '14151617191a1b1c1e1f202123242526', + 'ecb-tbl-128: I=57'), + ('ba6eaa88371ff0a3bd875e3f2a975ce0', 'e6478ba56a77e70cfdaa5c843abde30e', + '28292a2b2d2e2f30323334353738393a', + 'ecb-tbl-128: I=58'), + ('08059130c4c24bd30cf0575e4e0373dc', '1673064895fbeaf7f09c5429ff75772d', + '3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-128: I=59'), + ('9a8eab004ef53093dfcf96f57e7eda82', '4488033ae9f2efd0ca9383bfca1a94e9', + '50515253555657585a5b5c5d5f606162', + 'ecb-tbl-128: I=60'), + ('0745b589e2400c25f117b1d796c28129', '978f3b8c8f9d6f46626cac3c0bcb9217', + '64656667696a6b6c6e6f707173747576', + 'ecb-tbl-128: I=61'), + ('2f1777781216cec3f044f134b1b92bbe', 'e08c8a7e582e15e5527f1d9e2eecb236', + '78797a7b7d7e7f80828384858788898a', + 'ecb-tbl-128: I=62'), + ('353a779ffc541b3a3805d90ce17580fc', 'cec155b76ac5ffda4cf4f9ca91e49a7a', + '8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-128: I=63'), + ('1a1eae4415cefcf08c4ac1c8f68bea8f', 'd5ac7165763225dd2a38cdc6862c29ad', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2', + 'ecb-tbl-128: I=64'), + ('e6e7e4e5b0b3b2b5d4d5aaab16111013', '03680fe19f7ce7275452020be70e8204', + 'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-128: I=65'), + ('f8f9fafbfbf8f9e677767170efe0e1e2', '461df740c9781c388e94bb861ceb54f6', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9da', + 'ecb-tbl-128: I=66'), + ('63626160a1a2a3a445444b4a75727370', '451bd60367f96483042742219786a074', + 'dcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-128: I=67'), + ('717073720605040b2d2c2b2a05fafbf9', 'e4dfa42671a02e57ef173b85c0ea9f2b', + 'f0f1f2f3f5f6f7f8fafbfcfdfe010002', + 'ecb-tbl-128: I=68'), + ('78797a7beae9e8ef3736292891969794', 'ed11b89e76274282227d854700a78b9e', + '04050607090a0b0c0e0f101113141516', + 'ecb-tbl-128: I=69'), + ('838281803231300fdddcdbdaa0afaead', '433946eaa51ea47af33895f2b90b3b75', + '18191a1b1d1e1f20222324252728292a', + 'ecb-tbl-128: I=70'), + ('18191a1bbfbcbdba75747b7a7f78797a', '6bc6d616a5d7d0284a5910ab35022528', + '2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-128: I=71'), + ('848586879b989996a3a2a5a4849b9a99', 'd2a920ecfe919d354b5f49eae9719c98', + '40414243454647484a4b4c4d4f505152', + 'ecb-tbl-128: I=72'), + ('0001020322212027cacbf4f551565754', '3a061b17f6a92885efbd0676985b373d', + '54555657595a5b5c5e5f606163646566', + 'ecb-tbl-128: I=73'), + ('cecfcccdafacadb2515057564a454447', 'fadeec16e33ea2f4688499d157e20d8f', + '68696a6b6d6e6f70727374757778797a', + 'ecb-tbl-128: I=74'), + ('92939091cdcecfc813121d1c80878685', '5cdefede59601aa3c3cda36fa6b1fa13', + '7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-128: I=75'), + ('d2d3d0d16f6c6d6259585f5ed1eeefec', '9574b00039844d92ebba7ee8719265f8', + '90919293959697989a9b9c9d9fa0a1a2', + 'ecb-tbl-128: I=76'), + ('acadaeaf878485820f0e1110d5d2d3d0', '9a9cf33758671787e5006928188643fa', + 'a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-128: I=77'), + ('9091929364676619e6e7e0e1757a7b78', '2cddd634c846ba66bb46cbfea4a674f9', + 'b8b9babbbdbebfc0c2c3c4c5c7c8c9ca', + 'ecb-tbl-128: I=78'), + ('babbb8b98a89888f74757a7b92959497', 'd28bae029393c3e7e26e9fafbbb4b98f', + 'cccdcecfd1d2d3d4d6d7d8d9dbdcddde', + 'ecb-tbl-128: I=79'), + ('8d8c8f8e6e6d6c633b3a3d3ccad5d4d7', 'ec27529b1bee0a9ab6a0d73ebc82e9b7', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2', + 'ecb-tbl-128: I=80'), + ('86878485010203040808f7f767606162', '3cb25c09472aff6ee7e2b47ccd7ccb17', + 'f4f5f6f7f9fafbfcfefe010103040506', + 'ecb-tbl-128: I=81'), + ('8e8f8c8d656667788a8b8c8d010e0f0c', 'dee33103a7283370d725e44ca38f8fe5', + '08090a0b0d0e0f10121314151718191a', + 'ecb-tbl-128: I=82'), + ('c8c9cacb858687807a7b7475e7e0e1e2', '27f9bcd1aac64bffc11e7815702c1a69', + '1c1d1e1f21222324262728292b2c2d2e', + 'ecb-tbl-128: I=83'), + ('6d6c6f6e5053525d8c8d8a8badd2d3d0', '5df534ffad4ed0749a9988e9849d0021', + '30313233353637383a3b3c3d3f404142', + 'ecb-tbl-128: I=84'), + ('28292a2b393a3b3c0607181903040506', 'a48bee75db04fb60ca2b80f752a8421b', + '44454647494a4b4c4e4f505153545556', + 'ecb-tbl-128: I=85'), + ('a5a4a7a6b0b3b28ddbdadddcbdb2b3b0', '024c8cf70bc86ee5ce03678cb7af45f9', + '58595a5b5d5e5f60626364656768696a', + 'ecb-tbl-128: I=86'), + ('323330316467666130313e3f2c2b2a29', '3c19ac0f8a3a3862ce577831301e166b', + '6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-128: I=87'), + ('27262524080b0a05171611100b141516', 'c5e355b796a57421d59ca6be82e73bca', + '80818283858687888a8b8c8d8f909192', + 'ecb-tbl-128: I=88'), + ('040506074142434435340b0aa3a4a5a6', 'd94033276417abfb05a69d15b6e386e2', + '94959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-128: I=89'), + ('242526271112130c61606766bdb2b3b0', '24b36559ea3a9b9b958fe6da3e5b8d85', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9ba', + 'ecb-tbl-128: I=90'), + ('4b4a4948252627209e9f9091cec9c8cb', '20fd4feaa0e8bf0cce7861d74ef4cb72', + 'bcbdbebfc1c2c3c4c6c7c8c9cbcccdce', + 'ecb-tbl-128: I=91'), + ('68696a6b6665646b9f9e9998d9e6e7e4', '350e20d5174277b9ec314c501570a11d', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2', + 'ecb-tbl-128: I=92'), + ('34353637c5c6c7c0f0f1eeef7c7b7a79', '87a29d61b7c604d238fe73045a7efd57', + 'e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-128: I=93'), + ('32333031c2c1c13f0d0c0b0a050a0b08', '2c3164c1cc7d0064816bdc0faa362c52', + 'f8f9fafbfdfefe00020304050708090a', + 'ecb-tbl-128: I=94'), + ('cdcccfcebebdbcbbabaaa5a4181f1e1d', '195fe5e8a05a2ed594f6e4400eee10b3', + '0c0d0e0f11121314161718191b1c1d1e', + 'ecb-tbl-128: I=95'), + ('212023223635343ba0a1a6a7445b5a59', 'e4663df19b9a21a5a284c2bd7f905025', + '20212223252627282a2b2c2d2f303132', + 'ecb-tbl-128: I=96'), + ('0e0f0c0da8abaaad2f2e515002050407', '21b88714cfb4e2a933bd281a2c4743fd', + '34353637393a3b3c3e3f404143444546', + 'ecb-tbl-128: I=97'), + ('070605042a2928378e8f8889bdb2b3b0', 'cbfc3980d704fd0fc54378ab84e17870', + '48494a4b4d4e4f50525354555758595a', + 'ecb-tbl-128: I=98'), + ('cbcac9c893909196a9a8a7a6a5a2a3a0', 'bc5144baa48bdeb8b63e22e03da418ef', + '5c5d5e5f61626364666768696b6c6d6e', + 'ecb-tbl-128: I=99'), + ('80818283c1c2c3cc9c9d9a9b0cf3f2f1', '5a1dbaef1ee2984b8395da3bdffa3ccc', + '70717273757677787a7b7c7d7f808182', + 'ecb-tbl-128: I=100'), + ('1213101125262720fafbe4e5b1b6b7b4', 'f0b11cd0729dfcc80cec903d97159574', + '84858687898a8b8c8e8f909193949596', + 'ecb-tbl-128: I=101'), + ('7f7e7d7c3033320d97969190222d2c2f', '9f95314acfddc6d1914b7f19a9cc8209', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aa', + 'ecb-tbl-128: I=102'), + ('4e4f4c4d484b4a4d81808f8e53545556', '595736f6f0f70914a94e9e007f022519', + 'acadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-128: I=103'), + ('dcdddedfb0b3b2bd15141312a1bebfbc', '1f19f57892cae586fcdfb4c694deb183', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2', + 'ecb-tbl-128: I=104'), + ('93929190282b2a2dc4c5fafb92959497', '540700ee1f6f3dab0b3eddf6caee1ef5', + 'd4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-128: I=105'), + ('f5f4f7f6c4c7c6d9373631307e717073', '14a342a91019a331687a2254e6626ca2', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fa', + 'ecb-tbl-128: I=106'), + ('93929190b6b5b4b364656a6b05020300', '7b25f3c3b2eea18d743ef283140f29ff', + 'fcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-128: I=107'), + ('babbb8b90d0e0f00a4a5a2a3043b3a39', '46c2587d66e5e6fa7f7ca6411ad28047', + '10111213151617181a1b1c1d1f202122', + 'ecb-tbl-128: I=108'), + ('d8d9dadb7f7c7d7a10110e0f787f7e7d', '09470e72229d954ed5ee73886dfeeba9', + '24252627292a2b2c2e2f303133343536', + 'ecb-tbl-128: I=109'), + ('fefffcfdefeced923b3a3d3c6768696a', 'd77c03de92d4d0d79ef8d4824ef365eb', + '38393a3b3d3e3f40424344454748494a', + 'ecb-tbl-128: I=110'), + ('d6d7d4d58a89888f96979899a5a2a3a0', '1d190219f290e0f1715d152d41a23593', + '4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-128: I=111'), + ('18191a1ba8abaaa5303136379b848586', 'a2cd332ce3a0818769616292e87f757b', + '60616263656667686a6b6c6d6f707172', + 'ecb-tbl-128: I=112'), + ('6b6a6968a4a7a6a1d6d72829b0b7b6b5', 'd54afa6ce60fbf9341a3690e21385102', + '74757677797a7b7c7e7f808183848586', + 'ecb-tbl-128: I=113'), + ('000102038a89889755545352a6a9a8ab', '06e5c364ded628a3f5e05e613e356f46', + '88898a8b8d8e8f90929394959798999a', + 'ecb-tbl-128: I=114'), + ('2d2c2f2eb3b0b1b6b6b7b8b9f2f5f4f7', 'eae63c0e62556dac85d221099896355a', + '9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-128: I=115'), + ('979695943536373856575051e09f9e9d', '1fed060e2c6fc93ee764403a889985a2', + 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2', + 'ecb-tbl-128: I=116'), + ('a4a5a6a7989b9a9db1b0afae7a7d7c7f', 'c25235c1a30fdec1c7cb5c5737b2a588', + 'c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-128: I=117'), + ('c1c0c3c2686b6a55a8a9aeafeae5e4e7', '796dbef95147d4d30873ad8b7b92efc0', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9ea', + 'ecb-tbl-128: I=118'), + ('c1c0c3c2141716118c8d828364636261', 'cbcf0fb34d98d0bd5c22ce37211a46bf', + 'ecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-128: I=119'), + ('93929190cccfcec196979091e0fffefd', '94b44da6466126cafa7c7fd09063fc24', + '00010203050607080a0b0c0d0f101112', + 'ecb-tbl-128: I=120'), + ('b4b5b6b7f9fafbfc25241b1a6e69686b', 'd78c5b5ebf9b4dbda6ae506c5074c8fe', + '14151617191a1b1c1e1f202123242526', + 'ecb-tbl-128: I=121'), + ('868784850704051ac7c6c1c08788898a', '6c27444c27204b043812cf8cf95f9769', + '28292a2b2d2e2f30323334353738393a', + 'ecb-tbl-128: I=122'), + ('f4f5f6f7aaa9a8affdfcf3f277707172', 'be94524ee5a2aa50bba8b75f4c0aebcf', + '3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-128: I=123'), + ('d3d2d1d00605040bc3c2c5c43e010003', 'a0aeaae91ba9f31f51aeb3588cf3a39e', + '50515253555657585a5b5c5d5f606162', + 'ecb-tbl-128: I=124'), + ('73727170424140476a6b74750d0a0b08', '275297779c28266ef9fe4c6a13c08488', + '64656667696a6b6c6e6f707173747576', + 'ecb-tbl-128: I=125'), + ('c2c3c0c10a0908f754555253a1aeafac', '86523d92bb8672cb01cf4a77fd725882', + '78797a7b7d7e7f80828384858788898a', + 'ecb-tbl-128: I=126'), + ('6d6c6f6ef8fbfafd82838c8df8fffefd', '4b8327640e9f33322a04dd96fcbf9a36', + '8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-128: I=127'), + ('f5f4f7f684878689a6a7a0a1d2cdcccf', 'ce52af650d088ca559425223f4d32694', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2', + 'ecb-tbl-128: I=128'), + + # ecb_tbl.txt, KEYSIZE=192 + ('2d33eef2c0430a8a9ebf45e809c40bb6', 'dff4945e0336df4c1c56bc700eff837f', + '00010203050607080a0b0c0d0f10111214151617191a1b1c', + 'ecb-tbl-192: I=1'), + ('6aa375d1fa155a61fb72353e0a5a8756', 'b6fddef4752765e347d5d2dc196d1252', + '1e1f20212324252628292a2b2d2e2f30323334353738393a', + 'ecb-tbl-192: I=2'), + ('bc3736518b9490dcb8ed60eb26758ed4', 'd23684e3d963b3afcf1a114aca90cbd6', + '3c3d3e3f41424344464748494b4c4d4e5051525355565758', + 'ecb-tbl-192: I=3'), + ('aa214402b46cffb9f761ec11263a311e', '3a7ac027753e2a18c2ceab9e17c11fd0', + '5a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-192: I=4'), + ('02aea86e572eeab66b2c3af5e9a46fd6', '8f6786bd007528ba26603c1601cdd0d8', + '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394', + 'ecb-tbl-192: I=5'), + ('e2aef6acc33b965c4fa1f91c75ff6f36', 'd17d073b01e71502e28b47ab551168b3', + '969798999b9c9d9ea0a1a2a3a5a6a7a8aaabacadafb0b1b2', + 'ecb-tbl-192: I=6'), + ('0659df46427162b9434865dd9499f91d', 'a469da517119fab95876f41d06d40ffa', + 'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6c8c9cacbcdcecfd0', + 'ecb-tbl-192: I=7'), + ('49a44239c748feb456f59c276a5658df', '6091aa3b695c11f5c0b6ad26d3d862ff', + 'd2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-192: I=8'), + ('66208f6e9d04525bdedb2733b6a6be37', '70f9e67f9f8df1294131662dc6e69364', + 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c', + 'ecb-tbl-192: I=9'), + ('3393f8dfc729c97f5480b950bc9666b0', 'd154dcafad8b207fa5cbc95e9996b559', + '0e0f10111314151618191a1b1d1e1f20222324252728292a', + 'ecb-tbl-192: I=10'), + ('606834c8ce063f3234cf1145325dbd71', '4934d541e8b46fa339c805a7aeb9e5da', + '2c2d2e2f31323334363738393b3c3d3e4041424345464748', + 'ecb-tbl-192: I=11'), + ('fec1c04f529bbd17d8cecfcc4718b17f', '62564c738f3efe186e1a127a0c4d3c61', + '4a4b4c4d4f50515254555657595a5b5c5e5f606163646566', + 'ecb-tbl-192: I=12'), + ('32df99b431ed5dc5acf8caf6dc6ce475', '07805aa043986eb23693e23bef8f3438', + '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384', + 'ecb-tbl-192: I=13'), + ('7fdc2b746f3f665296943b83710d1f82', 'df0b4931038bade848dee3b4b85aa44b', + '868788898b8c8d8e90919293959697989a9b9c9d9fa0a1a2', + 'ecb-tbl-192: I=14'), + ('8fba1510a3c5b87e2eaa3f7a91455ca2', '592d5fded76582e4143c65099309477c', + 'a4a5a6a7a9aaabacaeafb0b1b3b4b5b6b8b9babbbdbebfc0', + 'ecb-tbl-192: I=15'), + ('2c9b468b1c2eed92578d41b0716b223b', 'c9b8d6545580d3dfbcdd09b954ed4e92', + 'c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', + 'ecb-tbl-192: I=16'), + ('0a2bbf0efc6bc0034f8a03433fca1b1a', '5dccd5d6eb7c1b42acb008201df707a0', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfc', + 'ecb-tbl-192: I=17'), + ('25260e1f31f4104d387222e70632504b', 'a2a91682ffeb6ed1d34340946829e6f9', + 'fefe01010304050608090a0b0d0e0f10121314151718191a', + 'ecb-tbl-192: I=18'), + ('c527d25a49f08a5228d338642ae65137', 'e45d185b797000348d9267960a68435d', + '1c1d1e1f21222324262728292b2c2d2e3031323335363738', + 'ecb-tbl-192: I=19'), + ('3b49fc081432f5890d0e3d87e884a69e', '45e060dae5901cda8089e10d4f4c246b', + '3a3b3c3d3f40414244454647494a4b4c4e4f505153545556', + 'ecb-tbl-192: I=20'), + ('d173f9ed1e57597e166931df2754a083', 'f6951afacc0079a369c71fdcff45df50', + '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374', + 'ecb-tbl-192: I=21'), + ('8c2b7cafa5afe7f13562daeae1adede0', '9e95e00f351d5b3ac3d0e22e626ddad6', + '767778797b7c7d7e80818283858687888a8b8c8d8f909192', + 'ecb-tbl-192: I=22'), + ('aaf4ec8c1a815aeb826cab741339532c', '9cb566ff26d92dad083b51fdc18c173c', + '94959697999a9b9c9e9fa0a1a3a4a5a6a8a9aaabadaeafb0', + 'ecb-tbl-192: I=23'), + ('40be8c5d9108e663f38f1a2395279ecf', 'c9c82766176a9b228eb9a974a010b4fb', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebec', + 'ecb-tbl-192: I=24'), + ('0c8ad9bc32d43e04716753aa4cfbe351', 'd8e26aa02945881d5137f1c1e1386e88', + '2a2b2c2d2f30313234353637393a3b3c3e3f404143444546', + 'ecb-tbl-192: I=25'), + ('1407b1d5f87d63357c8dc7ebbaebbfee', 'c0e024ccd68ff5ffa4d139c355a77c55', + '48494a4b4d4e4f50525354555758595a5c5d5e5f61626364', + 'ecb-tbl-192: I=26'), + ('e62734d1ae3378c4549e939e6f123416', '0b18b3d16f491619da338640df391d43', + '84858687898a8b8c8e8f90919394959698999a9b9d9e9fa0', + 'ecb-tbl-192: I=27'), + ('5a752cff2a176db1a1de77f2d2cdee41', 'dbe09ac8f66027bf20cb6e434f252efc', + 'a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-192: I=28'), + ('a9c8c3a4eabedc80c64730ddd018cd88', '6d04e5e43c5b9cbe05feb9606b6480fe', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdc', + 'ecb-tbl-192: I=29'), + ('ee9b3dbbdb86180072130834d305999a', 'dd1d6553b96be526d9fee0fbd7176866', + '1a1b1c1d1f20212224252627292a2b2c2e2f303133343536', + 'ecb-tbl-192: I=30'), + ('a7fa8c3586b8ebde7568ead6f634a879', '0260ca7e3f979fd015b0dd4690e16d2a', + '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354', + 'ecb-tbl-192: I=31'), + ('37e0f4a87f127d45ac936fe7ad88c10a', '9893734de10edcc8a67c3b110b8b8cc6', + '929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-192: I=32'), + ('3f77d8b5d92bac148e4e46f697a535c5', '93b30b750516b2d18808d710c2ee84ef', + '464748494b4c4d4e50515253555657585a5b5c5d5f606162', + 'ecb-tbl-192: I=33'), + ('d25ebb686c40f7e2c4da1014936571ca', '16f65fa47be3cb5e6dfe7c6c37016c0e', + '828384858788898a8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-192: I=34'), + ('4f1c769d1e5b0552c7eca84dea26a549', 'f3847210d5391e2360608e5acb560581', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbc', + 'ecb-tbl-192: I=35'), + ('8548e2f882d7584d0fafc54372b6633a', '8754462cd223366d0753913e6af2643d', + 'bebfc0c1c3c4c5c6c8c9cacbcdcecfd0d2d3d4d5d7d8d9da', + 'ecb-tbl-192: I=36'), + ('87d7a336cb476f177cd2a51af2a62cdf', '1ea20617468d1b806a1fd58145462017', + 'dcdddedfe1e2e3e4e6e7e8e9ebecedeef0f1f2f3f5f6f7f8', + 'ecb-tbl-192: I=37'), + ('03b1feac668c4e485c1065dfc22b44ee', '3b155d927355d737c6be9dda60136e2e', + 'fafbfcfdfe01000204050607090a0b0c0e0f101113141516', + 'ecb-tbl-192: I=38'), + ('bda15e66819fa72d653a6866aa287962', '26144f7b66daa91b6333dbd3850502b3', + '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334', + 'ecb-tbl-192: I=39'), + ('4d0c7a0d2505b80bf8b62ceb12467f0a', 'e4f9a4ab52ced8134c649bf319ebcc90', + '363738393b3c3d3e40414243454647484a4b4c4d4f505152', + 'ecb-tbl-192: I=40'), + ('626d34c9429b37211330986466b94e5f', 'b9ddd29ac6128a6cab121e34a4c62b36', + '54555657595a5b5c5e5f60616364656668696a6b6d6e6f70', + 'ecb-tbl-192: I=41'), + ('333c3e6bf00656b088a17e5ff0e7f60a', '6fcddad898f2ce4eff51294f5eaaf5c9', + '727374757778797a7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-192: I=42'), + ('687ed0cdc0d2a2bc8c466d05ef9d2891', 'c9a6fe2bf4028080bea6f7fc417bd7e3', + '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabac', + 'ecb-tbl-192: I=43'), + ('487830e78cc56c1693e64b2a6660c7b6', '6a2026846d8609d60f298a9c0673127f', + 'aeafb0b1b3b4b5b6b8b9babbbdbebfc0c2c3c4c5c7c8c9ca', + 'ecb-tbl-192: I=44'), + ('7a48d6b7b52b29392aa2072a32b66160', '2cb25c005e26efea44336c4c97a4240b', + 'cccdcecfd1d2d3d4d6d7d8d9dbdcdddee0e1e2e3e5e6e7e8', + 'ecb-tbl-192: I=45'), + ('907320e64c8c5314d10f8d7a11c8618d', '496967ab8680ddd73d09a0e4c7dcc8aa', + 'eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', + 'ecb-tbl-192: I=46'), + ('b561f2ca2d6e65a4a98341f3ed9ff533', 'd5af94de93487d1f3a8c577cb84a66a4', + '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324', + 'ecb-tbl-192: I=47'), + ('df769380d212792d026f049e2e3e48ef', '84bdac569cae2828705f267cc8376e90', + '262728292b2c2d2e30313233353637383a3b3c3d3f404142', + 'ecb-tbl-192: I=48'), + ('79f374bc445bdabf8fccb8843d6054c6', 'f7401dda5ad5ab712b7eb5d10c6f99b6', + '44454647494a4b4c4e4f50515354555658595a5b5d5e5f60', + 'ecb-tbl-192: I=49'), + ('4e02f1242fa56b05c68dbae8fe44c9d6', '1c9d54318539ebd4c3b5b7e37bf119f0', + '626364656768696a6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-192: I=50'), + ('cf73c93cbff57ac635a6f4ad2a4a1545', 'aca572d65fb2764cffd4a6eca090ea0d', + '80818283858687888a8b8c8d8f90919294959697999a9b9c', + 'ecb-tbl-192: I=51'), + ('9923548e2875750725b886566784c625', '36d9c627b8c2a886a10ccb36eae3dfbb', + '9e9fa0a1a3a4a5a6a8a9aaabadaeafb0b2b3b4b5b7b8b9ba', + 'ecb-tbl-192: I=52'), + ('4888336b723a022c9545320f836a4207', '010edbf5981e143a81d646e597a4a568', + 'bcbdbebfc1c2c3c4c6c7c8c9cbcccdced0d1d2d3d5d6d7d8', + 'ecb-tbl-192: I=53'), + ('f84d9a5561b0608b1160dee000c41ba8', '8db44d538dc20cc2f40f3067fd298e60', + 'dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-192: I=54'), + ('c23192a0418e30a19b45ae3e3625bf22', '930eb53bc71e6ac4b82972bdcd5aafb3', + 'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314', + 'ecb-tbl-192: I=55'), + ('b84e0690b28b0025381ad82a15e501a7', '6c42a81edcbc9517ccd89c30c95597b4', + '161718191b1c1d1e20212223252627282a2b2c2d2f303132', + 'ecb-tbl-192: I=56'), + ('acef5e5c108876c4f06269f865b8f0b0', 'da389847ad06df19d76ee119c71e1dd3', + '34353637393a3b3c3e3f40414344454648494a4b4d4e4f50', + 'ecb-tbl-192: I=57'), + ('0f1b3603e0f5ddea4548246153a5e064', 'e018fdae13d3118f9a5d1a647a3f0462', + '525354555758595a5c5d5e5f61626364666768696b6c6d6e', + 'ecb-tbl-192: I=58'), + ('fbb63893450d42b58c6d88cd3c1809e3', '2aa65db36264239d3846180fabdfad20', + '70717273757677787a7b7c7d7f80818284858687898a8b8c', + 'ecb-tbl-192: I=59'), + ('4bef736df150259dae0c91354e8a5f92', '1472163e9a4f780f1ceb44b07ecf4fdb', + '8e8f90919394959698999a9b9d9e9fa0a2a3a4a5a7a8a9aa', + 'ecb-tbl-192: I=60'), + ('7d2d46242056ef13d3c3fc93c128f4c7', 'c8273fdc8f3a9f72e91097614b62397c', + 'acadaeafb1b2b3b4b6b7b8b9bbbcbdbec0c1c2c3c5c6c7c8', + 'ecb-tbl-192: I=61'), + ('e9c1ba2df415657a256edb33934680fd', '66c8427dcd733aaf7b3470cb7d976e3f', + 'cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-192: I=62'), + ('e23ee277b0aa0a1dfb81f7527c3514f1', '146131cb17f1424d4f8da91e6f80c1d0', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304', + 'ecb-tbl-192: I=63'), + ('3e7445b0b63caaf75e4a911e12106b4c', '2610d0ad83659081ae085266a88770dc', + '060708090b0c0d0e10111213151617181a1b1c1d1f202122', + 'ecb-tbl-192: I=64'), + ('767774752023222544455a5be6e1e0e3', '38a2b5a974b0575c5d733917fb0d4570', + '24252627292a2b2c2e2f30313334353638393a3b3d3e3f40', + 'ecb-tbl-192: I=65'), + ('72737475717e7f7ce9e8ebea696a6b6c', 'e21d401ebc60de20d6c486e4f39a588b', + '424344454748494a4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-192: I=66'), + ('dfdedddc25262728c9c8cfcef1eeefec', 'e51d5f88c670b079c0ca1f0c2c4405a2', + '60616263656667686a6b6c6d6f70717274757677797a7b7c', + 'ecb-tbl-192: I=67'), + ('fffe0100707776755f5e5d5c7675746b', '246a94788a642fb3d1b823c8762380c8', + '7e7f80818384858688898a8b8d8e8f90929394959798999a', + 'ecb-tbl-192: I=68'), + ('e0e1e2e3424140479f9e9190292e2f2c', 'b80c391c5c41a4c3b30c68e0e3d7550f', + '9c9d9e9fa1a2a3a4a6a7a8a9abacadaeb0b1b2b3b5b6b7b8', + 'ecb-tbl-192: I=69'), + ('2120272690efeeed3b3a39384e4d4c4b', 'b77c4754fc64eb9a1154a9af0bb1f21c', + 'babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-192: I=70'), + ('ecedeeef5350516ea1a0a7a6a3acadae', 'fb554de520d159a06bf219fc7f34a02f', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4', + 'ecb-tbl-192: I=71'), + ('32333c3d25222320e9e8ebeacecdccc3', 'a89fba152d76b4927beed160ddb76c57', + 'f6f7f8f9fbfcfdfe00010203050607080a0b0c0d0f101112', + 'ecb-tbl-192: I=72'), + ('40414243626160678a8bb4b511161714', '5676eab4a98d2e8473b3f3d46424247c', + '14151617191a1b1c1e1f20212324252628292a2b2d2e2f30', + 'ecb-tbl-192: I=73'), + ('94959293f5fafbf81f1e1d1c7c7f7e79', '4e8f068bd7ede52a639036ec86c33568', + '323334353738393a3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-192: I=74'), + ('bebfbcbd191a1b14cfcec9c8546b6a69', 'f0193c4d7aff1791ee4c07eb4a1824fc', + '50515253555657585a5b5c5d5f60616264656667696a6b6c', + 'ecb-tbl-192: I=75'), + ('2c2d3233898e8f8cbbbab9b8333031ce', 'ac8686eeca9ba761afe82d67b928c33f', + '6e6f70717374757678797a7b7d7e7f80828384858788898a', + 'ecb-tbl-192: I=76'), + ('84858687bfbcbdba37363938fdfafbf8', '5faf8573e33b145b6a369cd3606ab2c9', + '8c8d8e8f91929394969798999b9c9d9ea0a1a2a3a5a6a7a8', + 'ecb-tbl-192: I=77'), + ('828384857669686b909192930b08090e', '31587e9944ab1c16b844ecad0df2e7da', + 'aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-192: I=78'), + ('bebfbcbd9695948b707176779e919093', 'd017fecd91148aba37f6f3068aa67d8a', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4', + 'ecb-tbl-192: I=79'), + ('8b8a85846067666521202322d0d3d2dd', '788ef2f021a73cba2794b616078a8500', + 'e6e7e8e9ebecedeef0f1f2f3f5f6f7f8fafbfcfdfe010002', + 'ecb-tbl-192: I=80'), + ('76777475f1f2f3f4f8f9e6e777707172', '5d1ef20dced6bcbc12131ac7c54788aa', + '04050607090a0b0c0e0f10111314151618191a1b1d1e1f20', + 'ecb-tbl-192: I=81'), + ('a4a5a2a34f404142b4b5b6b727242522', 'b3c8cf961faf9ea05fdde6d1e4d8f663', + '222324252728292a2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-192: I=82'), + ('94959697e1e2e3ec16171011839c9d9e', '143075c70605861c7fac6526199e459f', + '40414243454647484a4b4c4d4f50515254555657595a5b5c', + 'ecb-tbl-192: I=83'), + ('03023d3c06010003dedfdcddfffcfde2', 'a5ae12eade9a87268d898bfc8fc0252a', + '5e5f60616364656668696a6b6d6e6f70727374757778797a', + 'ecb-tbl-192: I=84'), + ('10111213f1f2f3f4cecfc0c1dbdcddde', '0924f7cf2e877a4819f5244a360dcea9', + '7c7d7e7f81828384868788898b8c8d8e9091929395969798', + 'ecb-tbl-192: I=85'), + ('67666160724d4c4f1d1c1f1e73707176', '3d9e9635afcc3e291cc7ab3f27d1c99a', + '9a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-192: I=86'), + ('e6e7e4e5a8abaad584858283909f9e9d', '9d80feebf87510e2b8fb98bb54fd788c', + 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4', + 'ecb-tbl-192: I=87'), + ('71707f7e565150537d7c7f7e6162636c', '5f9d1a082a1a37985f174002eca01309', + 'd6d7d8d9dbdcdddee0e1e2e3e5e6e7e8eaebecedeff0f1f2', + 'ecb-tbl-192: I=88'), + ('64656667212223245555aaaa03040506', 'a390ebb1d1403930184a44b4876646e4', + 'f4f5f6f7f9fafbfcfefe01010304050608090a0b0d0e0f10', + 'ecb-tbl-192: I=89'), + ('9e9f9899aba4a5a6cfcecdcc2b28292e', '700fe918981c3195bb6c4bcb46b74e29', + '121314151718191a1c1d1e1f21222324262728292b2c2d2e', + 'ecb-tbl-192: I=90'), + ('c7c6c5c4d1d2d3dc626364653a454447', '907984406f7bf2d17fb1eb15b673d747', + '30313233353637383a3b3c3d3f40414244454647494a4b4c', + 'ecb-tbl-192: I=91'), + ('f6f7e8e9e0e7e6e51d1c1f1e5b585966', 'c32a956dcfc875c2ac7c7cc8b8cc26e1', + '4e4f50515354555658595a5b5d5e5f60626364656768696a', + 'ecb-tbl-192: I=92'), + ('bcbdbebf5d5e5f5868696667f4f3f2f1', '02646e2ebfa9b820cf8424e9b9b6eb51', + '6c6d6e6f71727374767778797b7c7d7e8081828385868788', + 'ecb-tbl-192: I=93'), + ('40414647b0afaead9b9a99989b98999e', '621fda3a5bbd54c6d3c685816bd4ead8', + '8a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-192: I=94'), + ('69686b6a0201001f0f0e0908b4bbbab9', 'd4e216040426dfaf18b152469bc5ac2f', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4', + 'ecb-tbl-192: I=95'), + ('c7c6c9c8d8dfdedd5a5b5859bebdbcb3', '9d0635b9d33b6cdbd71f5d246ea17cc8', + 'c6c7c8c9cbcccdced0d1d2d3d5d6d7d8dadbdcdddfe0e1e2', + 'ecb-tbl-192: I=96'), + ('dedfdcdd787b7a7dfffee1e0b2b5b4b7', '10abad1bd9bae5448808765583a2cc1a', + 'e4e5e6e7e9eaebeceeeff0f1f3f4f5f6f8f9fafbfdfefe00', + 'ecb-tbl-192: I=97'), + ('4d4c4b4a606f6e6dd0d1d2d3fbf8f9fe', '6891889e16544e355ff65a793c39c9a8', + '020304050708090a0c0d0e0f11121314161718191b1c1d1e', + 'ecb-tbl-192: I=98'), + ('b7b6b5b4d7d4d5dae5e4e3e2e1fefffc', 'cc735582e68072c163cd9ddf46b91279', + '20212223252627282a2b2c2d2f30313234353637393a3b3c', + 'ecb-tbl-192: I=99'), + ('cecfb0b1f7f0f1f2aeafacad3e3d3c23', 'c5c68b9aeeb7f878df578efa562f9574', + '3e3f40414344454648494a4b4d4e4f50525354555758595a', + 'ecb-tbl-192: I=100'), + ('cacbc8c9cdcecfc812131c1d494e4f4c', '5f4764395a667a47d73452955d0d2ce8', + '5c5d5e5f61626364666768696b6c6d6e7071727375767778', + 'ecb-tbl-192: I=101'), + ('9d9c9b9ad22d2c2fb1b0b3b20c0f0e09', '701448331f66106cefddf1eb8267c357', + '7a7b7c7d7f80818284858687898a8b8c8e8f909193949596', + 'ecb-tbl-192: I=102'), + ('7a7b787964676659959493924f404142', 'cb3ee56d2e14b4e1941666f13379d657', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4', + 'ecb-tbl-192: I=103'), + ('aaaba4a5cec9c8cb1f1e1d1caba8a9a6', '9fe16efd18ab6e1981191851fedb0764', + 'b6b7b8b9bbbcbdbec0c1c2c3c5c6c7c8cacbcccdcfd0d1d2', + 'ecb-tbl-192: I=104'), + ('93929190282b2a2dc4c5fafb92959497', '3dc9ba24e1b223589b147adceb4c8e48', + 'd4d5d6d7d9dadbdcdedfe0e1e3e4e5e6e8e9eaebedeeeff0', + 'ecb-tbl-192: I=105'), + ('efeee9e8ded1d0d339383b3a888b8a8d', '1c333032682e7d4de5e5afc05c3e483c', + 'f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-192: I=106'), + ('7f7e7d7ca2a1a0af78797e7f112e2f2c', 'd593cc99a95afef7e92038e05a59d00a', + '10111213151617181a1b1c1d1f20212224252627292a2b2c', + 'ecb-tbl-192: I=107'), + ('84859a9b2b2c2d2e868784852625245b', '51e7f96f53b4353923452c222134e1ec', + '2e2f30313334353638393a3b3d3e3f40424344454748494a', + 'ecb-tbl-192: I=108'), + ('b0b1b2b3070405026869666710171615', '4075b357a1a2b473400c3b25f32f81a4', + '4c4d4e4f51525354565758595b5c5d5e6061626365666768', + 'ecb-tbl-192: I=109'), + ('acadaaabbda2a3a00d0c0f0e595a5b5c', '302e341a3ebcd74f0d55f61714570284', + '6a6b6c6d6f70717274757677797a7b7c7e7f808183848586', + 'ecb-tbl-192: I=110'), + ('121310115655544b5253545569666764', '57abdd8231280da01c5042b78cf76522', + '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4', + 'ecb-tbl-192: I=111'), + ('dedfd0d166616063eaebe8e94142434c', '17f9ea7eea17ac1adf0e190fef799e92', + 'a6a7a8a9abacadaeb0b1b2b3b5b6b7b8babbbcbdbfc0c1c2', + 'ecb-tbl-192: I=112'), + ('dbdad9d81417161166677879e0e7e6e5', '2e1bdd563dd87ee5c338dd6d098d0a7a', + 'c4c5c6c7c9cacbcccecfd0d1d3d4d5d6d8d9dadbdddedfe0', + 'ecb-tbl-192: I=113'), + ('6a6b6c6de0efeeed2b2a2928c0c3c2c5', 'eb869996e6f8bfb2bfdd9e0c4504dbb2', + 'e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-192: I=114'), + ('b1b0b3b21714151a1a1b1c1d5649484b', 'c2e01549e9decf317468b3e018c61ba8', + '00010203050607080a0b0c0d0f10111214151617191a1b1c', + 'ecb-tbl-192: I=115'), + ('39380706a3a4a5a6c4c5c6c77271706f', '8da875d033c01dd463b244a1770f4a22', + '1e1f20212324252628292a2b2d2e2f30323334353738393a', + 'ecb-tbl-192: I=116'), + ('5c5d5e5f1013121539383736e2e5e4e7', '8ba0dcf3a186844f026d022f8839d696', + '3c3d3e3f41424344464748494b4c4d4e5051525355565758', + 'ecb-tbl-192: I=117'), + ('43424544ead5d4d72e2f2c2d64676661', 'e9691ff9a6cc6970e51670a0fd5b88c1', + '5a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-192: I=118'), + ('55545756989b9a65f8f9feff18171615', 'f2baec06faeed30f88ee63ba081a6e5b', + '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394', + 'ecb-tbl-192: I=119'), + ('05040b0a525554573c3d3e3f4a494847', '9c39d4c459ae5753394d6094adc21e78', + '969798999b9c9d9ea0a1a2a3a5a6a7a8aaabacadafb0b1b2', + 'ecb-tbl-192: I=120'), + ('14151617595a5b5c8584fbfa8e89888b', '6345b532a11904502ea43ba99c6bd2b2', + 'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6c8c9cacbcdcecfd0', + 'ecb-tbl-192: I=121'), + ('7c7d7a7bfdf2f3f029282b2a51525354', '5ffae3061a95172e4070cedce1e428c8', + 'd2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-192: I=122'), + ('38393a3b1e1d1c1341404746c23d3c3e', '0a4566be4cdf9adce5dec865b5ab34cd', + 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c', + 'ecb-tbl-192: I=123'), + ('8d8c939240474645818083827c7f7e41', 'ca17fcce79b7404f2559b22928f126fb', + '0e0f10111314151618191a1b1d1e1f20222324252728292a', + 'ecb-tbl-192: I=124'), + ('3b3a39381a19181f32333c3d45424340', '97ca39b849ed73a6470a97c821d82f58', + '2c2d2e2f31323334363738393b3c3d3e4041424345464748', + 'ecb-tbl-192: I=125'), + ('f0f1f6f738272625828380817f7c7d7a', '8198cb06bc684c6d3e9b7989428dcf7a', + '4a4b4c4d4f50515254555657595a5b5c5e5f606163646566', + 'ecb-tbl-192: I=126'), + ('89888b8a0407061966676061141b1a19', 'f53c464c705ee0f28d9a4c59374928bd', + '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384', + 'ecb-tbl-192: I=127'), + ('d3d2dddcaaadacaf9c9d9e9fe8ebeae5', '9adb3d4cca559bb98c3e2ed73dbf1154', + '868788898b8c8d8e90919293959697989a9b9c9d9fa0a1a2', + 'ecb-tbl-192: I=128'), + + # ecb_tbl.txt, KEYSIZE=256 + ('834eadfccac7e1b30664b1aba44815ab', '1946dabf6a03a2a2c3d0b05080aed6fc', + '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', + 'ecb-tbl-256: I=1'), + ('d9dc4dba3021b05d67c0518f72b62bf1', '5ed301d747d3cc715445ebdec62f2fb4', + '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-256: I=2'), + ('a291d86301a4a739f7392173aa3c604c', '6585c8f43d13a6beab6419fc5935b9d0', + '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-256: I=3'), + ('4264b2696498de4df79788a9f83e9390', '2a5b56a596680fcc0e05f5e0f151ecae', + '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-256: I=4'), + ('ee9932b3721804d5a83ef5949245b6f6', 'f5d6ff414fd2c6181494d20c37f2b8c4', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-256: I=5'), + ('e6248f55c5fdcbca9cbbb01c88a2ea77', '85399c01f59fffb5204f19f8482f00b8', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-256: I=6'), + ('b8358e41b9dff65fd461d55a99266247', '92097b4c88a041ddf98144bc8d22e8e7', + 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516', + 'ecb-tbl-256: I=7'), + ('f0e2d72260af58e21e015ab3a4c0d906', '89bd5b73b356ab412aef9f76cea2d65c', + '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-256: I=8'), + ('475b8b823ce8893db3c44a9f2a379ff7', '2536969093c55ff9454692f2fac2f530', + '40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566', + 'ecb-tbl-256: I=9'), + ('688f5281945812862f5f3076cf80412f', '07fc76a872843f3f6e0081ee9396d637', + '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-256: I=10'), + ('08d1d2bc750af553365d35e75afaceaa', 'e38ba8ec2aa741358dcc93e8f141c491', + '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-256: I=11'), + ('8707121f47cc3efceca5f9a8474950a1', 'd028ee23e4a89075d0b03e868d7d3a42', + 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', + 'ecb-tbl-256: I=12'), + ('e51aa0b135dba566939c3b6359a980c5', '8cd9423dfc459e547155c5d1d522e540', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', + 'ecb-tbl-256: I=13'), + ('069a007fc76a459f98baf917fedf9521', '080e9517eb1677719acf728086040ae3', + '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e', + 'ecb-tbl-256: I=14'), + ('726165c1723fbcf6c026d7d00b091027', '7c1700211a3991fc0ecded0ab3e576b0', + '30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556', + 'ecb-tbl-256: I=15'), + ('d7c544de91d55cfcde1f84ca382200ce', 'dabcbcc855839251db51e224fbe87435', + '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-256: I=16'), + ('fed3c9a161b9b5b2bd611b41dc9da357', '68d56fad0406947a4dd27a7448c10f1d', + '80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-256: I=17'), + ('4f634cdc6551043409f30b635832cf82', 'da9a11479844d1ffee24bbf3719a9925', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce', + 'ecb-tbl-256: I=18'), + ('109ce98db0dfb36734d9f3394711b4e6', '5e4ba572f8d23e738da9b05ba24b8d81', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-256: I=19'), + ('4ea6dfaba2d8a02ffdffa89835987242', 'a115a2065d667e3f0b883837a6e903f8', + '70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596', + 'ecb-tbl-256: I=20'), + ('5ae094f54af58e6e3cdbf976dac6d9ef', '3e9e90dc33eac2437d86ad30b137e66e', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-256: I=21'), + ('764d8e8e0f29926dbe5122e66354fdbe', '01ce82d8fbcdae824cb3c48e495c3692', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-256: I=22'), + ('3f0418f888cdf29a982bf6b75410d6a9', '0c9cff163ce936faaf083cfd3dea3117', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-256: I=23'), + ('e4a3e7cb12cdd56aa4a75197a9530220', '5131ba9bd48f2bba85560680df504b52', + '10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536', + 'ecb-tbl-256: I=24'), + ('211677684aac1ec1a160f44c4ebf3f26', '9dc503bbf09823aec8a977a5ad26ccb2', + '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-256: I=25'), + ('d21e439ff749ac8f18d6d4b105e03895', '9a6db0c0862e506a9e397225884041d7', + '60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586', + 'ecb-tbl-256: I=26'), + ('d9f6ff44646c4725bd4c0103ff5552a7', '430bf9570804185e1ab6365fc6a6860c', + '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-256: I=27'), + ('0b1256c2a00b976250cfc5b0c37ed382', '3525ebc02f4886e6a5a3762813e8ce8a', + 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-256: I=28'), + ('b056447ffc6dc4523a36cc2e972a3a79', '07fa265c763779cce224c7bad671027b', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-256: I=29'), + ('5e25ca78f0de55802524d38da3fe4456', 'e8b72b4e8be243438c9fff1f0e205872', + '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', + 'ecb-tbl-256: I=30'), + ('a5bcf4728fa5eaad8567c0dc24675f83', '109d4f999a0e11ace1f05e6b22cbcb50', + '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-256: I=31'), + ('814e59f97ed84646b78b2ca022e9ca43', '45a5e8d4c3ed58403ff08d68a0cc4029', + '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-256: I=32'), + ('15478beec58f4775c7a7f5d4395514d7', '196865964db3d417b6bd4d586bcb7634', + '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-256: I=33'), + ('253548ffca461c67c8cbc78cd59f4756', '60436ad45ac7d30d99195f815d98d2ae', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-256: I=34'), + ('fd7ad8d73b9b0f8cc41600640f503d65', 'bb07a23f0b61014b197620c185e2cd75', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-256: I=35'), + ('06199de52c6cbf8af954cd65830bcd56', '5bc0b2850129c854423aff0751fe343b', + 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516', + 'ecb-tbl-256: I=36'), + ('f17c4ffe48e44c61bd891e257e725794', '7541a78f96738e6417d2a24bd2beca40', + '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-256: I=37'), + ('9a5b4a402a3e8a59be6bf5cd8154f029', 'b0a303054412882e464591f1546c5b9e', + '40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566', + 'ecb-tbl-256: I=38'), + ('79bd40b91a7e07dc939d441782ae6b17', '778c06d8a355eeee214fcea14b4e0eef', + '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-256: I=39'), + ('d8ceaaf8976e5fbe1012d8c84f323799', '09614206d15cbace63227d06db6beebb', + '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-256: I=40'), + ('3316e2751e2e388b083da23dd6ac3fbe', '41b97fb20e427a9fdbbb358d9262255d', + 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', + 'ecb-tbl-256: I=41'), + ('8b7cfbe37de7dca793521819242c5816', 'c1940f703d845f957652c2d64abd7adf', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', + 'ecb-tbl-256: I=42'), + ('f23f033c0eebf8ec55752662fd58ce68', 'd2d44fcdae5332343366db297efcf21b', + '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e', + 'ecb-tbl-256: I=43'), + ('59eb34f6c8bdbacc5fc6ad73a59a1301', 'ea8196b79dbe167b6aa9896e287eed2b', + '30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556', + 'ecb-tbl-256: I=44'), + ('dcde8b6bd5cf7cc22d9505e3ce81261a', 'd6b0b0c4ba6c7dbe5ed467a1e3f06c2d', + '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-256: I=45'), + ('e33cf7e524fed781e7042ff9f4b35dc7', 'ec51eb295250c22c2fb01816fb72bcae', + '80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-256: I=46'), + ('27963c8facdf73062867d164df6d064c', 'aded6630a07ce9c7408a155d3bd0d36f', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce', + 'ecb-tbl-256: I=47'), + ('77b1ce386b551b995f2f2a1da994eef8', '697c9245b9937f32f5d1c82319f0363a', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-256: I=48'), + ('f083388b013679efcf0bb9b15d52ae5c', 'aad5ad50c6262aaec30541a1b7b5b19c', + 'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314161718191b1c1d1e', + 'ecb-tbl-256: I=49'), + ('c5009e0dab55db0abdb636f2600290c8', '7d34b893855341ec625bd6875ac18c0d', + '20212223252627282a2b2c2d2f30313234353637393a3b3c3e3f404143444546', + 'ecb-tbl-256: I=50'), + ('7804881e26cd532d8514d3683f00f1b9', '7ef05105440f83862f5d780e88f02b41', + '48494a4b4d4e4f50525354555758595a5c5d5e5f61626364666768696b6c6d6e', + 'ecb-tbl-256: I=51'), + ('46cddcd73d1eb53e675ca012870a92a3', 'c377c06403382061af2c9c93a8e70df6', + '70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596', + 'ecb-tbl-256: I=52'), + ('a9fb44062bb07fe130a8e8299eacb1ab', '1dbdb3ffdc052dacc83318853abc6de5', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-256: I=53'), + ('2b6ff8d7a5cc3a28a22d5a6f221af26b', '69a6eab00432517d0bf483c91c0963c7', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-256: I=54'), + ('1a9527c29b8add4b0e3e656dbb2af8b4', '0797f41dc217c80446e1d514bd6ab197', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-256: I=55'), + ('7f99cf2c75244df015eb4b0c1050aeae', '9dfd76575902a637c01343c58e011a03', + '10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536', + 'ecb-tbl-256: I=56'), + ('e84ff85b0d9454071909c1381646c4ed', 'acf4328ae78f34b9fa9b459747cc2658', + '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-256: I=57'), + ('89afd40f99521280d5399b12404f6db4', 'b0479aea12bac4fe2384cf98995150c6', + '60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586', + 'ecb-tbl-256: I=58'), + ('a09ef32dbc5119a35ab7fa38656f0329', '9dd52789efe3ffb99f33b3da5030109a', + '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-256: I=59'), + ('61773457f068c376c7829b93e696e716', 'abbb755e4621ef8f1214c19f649fb9fd', + 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-256: I=60'), + ('a34f0cae726cce41dd498747d891b967', 'da27fb8174357bce2bed0e7354f380f9', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-256: I=61'), + ('856f59496c7388ee2d2b1a27b7697847', 'c59a0663f0993838f6e5856593bdc5ef', + '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', + 'ecb-tbl-256: I=62'), + ('cb090c593ef7720bd95908fb93b49df4', 'ed60b264b5213e831607a99c0ce5e57e', + '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-256: I=63'), + ('a0ac75cd2f1923d460fc4d457ad95baf', 'e50548746846f3eb77b8c520640884ed', + '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-256: I=64'), + ('2a2b282974777689e8e9eeef525d5c5f', '28282cc7d21d6a2923641e52d188ef0c', + '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-256: I=65'), + ('909192939390919e0f0e09089788898a', '0dfa5b02abb18e5a815305216d6d4f8e', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-256: I=66'), + ('777675748d8e8f907170777649464744', '7359635c0eecefe31d673395fb46fb99', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-256: I=67'), + ('717073720605040b2d2c2b2a05fafbf9', '73c679f7d5aef2745c9737bb4c47fb36', + 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516', + 'ecb-tbl-256: I=68'), + ('64656667fefdfcc31b1a1d1ca5aaaba8', 'b192bd472a4d2eafb786e97458967626', + '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-256: I=69'), + ('dbdad9d86a696867b5b4b3b2c8d7d6d5', '0ec327f6c8a2b147598ca3fde61dc6a4', + '40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566', + 'ecb-tbl-256: I=70'), + ('5c5d5e5fe3e0e1fe31303736333c3d3e', 'fc418eb3c41b859b38d4b6f646629729', + '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-256: I=71'), + ('545556574b48494673727574546b6a69', '30249e5ac282b1c981ea64b609f3a154', + '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-256: I=72'), + ('ecedeeefc6c5c4bb56575051f5fafbf8', '5e6e08646d12150776bb43c2d78a9703', + 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', + 'ecb-tbl-256: I=73'), + ('464744452724252ac9c8cfced2cdcccf', 'faeb3d5de652cd3447dceb343f30394a', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', + 'ecb-tbl-256: I=74'), + ('e6e7e4e54142435c878681801c131211', 'a8e88706823f6993ef80d05c1c7b2cf0', + '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e', + 'ecb-tbl-256: I=75'), + ('72737071cfcccdc2f9f8fffe710e0f0c', '8ced86677e6e00a1a1b15968f2d3cce6', + '30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556', + 'ecb-tbl-256: I=76'), + ('505152537370714ec3c2c5c4010e0f0c', '9fc7c23858be03bdebb84e90db6786a9', + '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-256: I=77'), + ('a8a9aaab5c5f5e51aeafa8a93d222320', 'b4fbd65b33f70d8cf7f1111ac4649c36', + '80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-256: I=78'), + ('dedfdcddf6f5f4eb10111617fef1f0f3', 'c5c32d5ed03c4b53cc8c1bd0ef0dbbf6', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce', + 'ecb-tbl-256: I=79'), + ('bdbcbfbe5e5d5c530b0a0d0cfac5c4c7', 'd1a7f03b773e5c212464b63709c6a891', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-256: I=80'), + ('8a8b8889050606f8f4f5f2f3636c6d6e', '6b7161d8745947ac6950438ea138d028', + 'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314161718191b1c1d1e', + 'ecb-tbl-256: I=81'), + ('a6a7a4a54d4e4f40b2b3b4b539262724', 'fd47a9f7e366ee7a09bc508b00460661', + '20212223252627282a2b2c2d2f30313234353637393a3b3c3e3f404143444546', + 'ecb-tbl-256: I=82'), + ('9c9d9e9fe9eaebf40e0f08099b949596', '00d40b003dc3a0d9310b659b98c7e416', + '48494a4b4d4e4f50525354555758595a5c5d5e5f61626364666768696b6c6d6e', + 'ecb-tbl-256: I=83'), + ('2d2c2f2e1013121dcccdcacbed121310', 'eea4c79dcc8e2bda691f20ac48be0717', + '70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596', + 'ecb-tbl-256: I=84'), + ('f4f5f6f7edeeefd0eaebecedf7f8f9fa', 'e78f43b11c204403e5751f89d05a2509', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-256: I=85'), + ('3d3c3f3e282b2a2573727574150a0b08', 'd0f0e3d1f1244bb979931e38dd1786ef', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-256: I=86'), + ('b6b7b4b5f8fbfae5b4b5b2b3a0afaead', '042e639dc4e1e4dde7b75b749ea6f765', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-256: I=87'), + ('b7b6b5b4989b9a95878681809ba4a5a6', 'bc032fdd0efe29503a980a7d07ab46a8', + '10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536', + 'ecb-tbl-256: I=88'), + ('a8a9aaabe5e6e798e9e8efee4748494a', '0c93ac949c0da6446effb86183b6c910', + '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-256: I=89'), + ('ecedeeefd9dadbd4b9b8bfbe657a7b78', 'e0d343e14da75c917b4a5cec4810d7c2', + '60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586', + 'ecb-tbl-256: I=90'), + ('7f7e7d7c696a6b74cacbcccd929d9c9f', '0eafb821748408279b937b626792e619', + '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-256: I=91'), + ('08090a0b0605040bfffef9f8b9c6c7c4', 'fa1ac6e02d23b106a1fef18b274a553f', + 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-256: I=92'), + ('08090a0bf1f2f3ccfcfdfafb68676665', '0dadfe019cd12368075507df33c1a1e9', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-256: I=93'), + ('cacbc8c93a393837050403020d121310', '3a0879b414465d9ffbaf86b33a63a1b9', + '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', + 'ecb-tbl-256: I=94'), + ('e9e8ebea8281809f8f8e8988343b3a39', '62199fadc76d0be1805d3ba0b7d914bf', + '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-256: I=95'), + ('515053524645444bd0d1d6d7340b0a09', '1b06d6c5d333e742730130cf78e719b4', + '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-256: I=96'), + ('42434041ecefee1193929594c6c9c8cb', 'f1f848824c32e9dcdcbf21580f069329', + '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-256: I=97'), + ('efeeedecc2c1c0cf76777071455a5b58', '1a09050cbd684f784d8e965e0782f28a', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-256: I=98'), + ('5f5e5d5c3f3c3d221d1c1b1a19161714', '79c2969e7ded2ba7d088f3f320692360', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-256: I=99'), + ('000102034142434c1c1d1a1b8d727371', '091a658a2f7444c16accb669450c7b63', + 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516', + 'ecb-tbl-256: I=100'), + ('8e8f8c8db1b2b38c56575051050a0b08', '97c1e3a72cca65fa977d5ed0e8a7bbfc', + '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-256: I=101'), + ('a7a6a5a4e8ebeae57f7e7978cad5d4d7', '70c430c6db9a17828937305a2df91a2a', + '40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566', + 'ecb-tbl-256: I=102'), + ('8a8b888994979689454443429f909192', '629553457fbe2479098571c7c903fde8', + '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-256: I=103'), + ('8c8d8e8fe0e3e2ed45444342f1cecfcc', 'a25b25a61f612669e7d91265c7d476ba', + '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-256: I=104'), + ('fffefdfc4c4f4e31d8d9dedfb6b9b8bb', 'eb7e4e49b8ae0f024570dda293254fed', + 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', + 'ecb-tbl-256: I=105'), + ('fdfcfffecccfcec12f2e29286679787b', '38fe15d61cca84516e924adce5014f67', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', + 'ecb-tbl-256: I=106'), + ('67666564bab9b8a77071767719161714', '3ad208492249108c9f3ebeb167ad0583', + '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e', + 'ecb-tbl-256: I=107'), + ('9a9b98992d2e2f2084858283245b5a59', '299ba9f9bf5ab05c3580fc26edd1ed12', + '30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556', + 'ecb-tbl-256: I=108'), + ('a4a5a6a70b0809365c5d5a5b2c232221', '19dc705b857a60fb07717b2ea5717781', + '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-256: I=109'), + ('464744455754555af3f2f5f4afb0b1b2', 'ffc8aeb885b5efcad06b6dbebf92e76b', + '80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-256: I=110'), + ('323330317675746b7273747549464744', 'f58900c5e0b385253ff2546250a0142b', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce', + 'ecb-tbl-256: I=111'), + ('a8a9aaab181b1a15808186872b141516', '2ee67b56280bc462429cee6e3370cbc1', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-256: I=112'), + ('e7e6e5e4202323ddaaabacad343b3a39', '20db650a9c8e9a84ab4d25f7edc8f03f', + 'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314161718191b1c1d1e', + 'ecb-tbl-256: I=113'), + ('a8a9aaab2221202fedecebea1e010003', '3c36da169525cf818843805f25b78ae5', + '20212223252627282a2b2c2d2f30313234353637393a3b3c3e3f404143444546', + 'ecb-tbl-256: I=114'), + ('f9f8fbfa5f5c5d42424344450e010003', '9a781d960db9e45e37779042fea51922', + '48494a4b4d4e4f50525354555758595a5c5d5e5f61626364666768696b6c6d6e', + 'ecb-tbl-256: I=115'), + ('57565554f5f6f7f89697909120dfdedd', '6560395ec269c672a3c288226efdba77', + '70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596', + 'ecb-tbl-256: I=116'), + ('f8f9fafbcccfcef1dddcdbda0e010003', '8c772b7a189ac544453d5916ebb27b9a', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-256: I=117'), + ('d9d8dbda7073727d80818687c2dddcdf', '77ca5468cc48e843d05f78eed9d6578f', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-256: I=118'), + ('c5c4c7c6080b0a1588898e8f68676665', '72cdcc71dc82c60d4429c9e2d8195baa', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-256: I=119'), + ('83828180dcdfded186878081f0cfcecd', '8080d68ce60e94b40b5b8b69eeb35afa', + '10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536', + 'ecb-tbl-256: I=120'), + ('98999a9bdddedfa079787f7e0a050407', '44222d3cde299c04369d58ac0eba1e8e', + '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-256: I=121'), + ('cecfcccd4f4c4d429f9e9998dfc0c1c2', '9b8721b0a8dfc691c5bc5885dbfcb27a', + '60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586', + 'ecb-tbl-256: I=122'), + ('404142436665647b29282f2eaba4a5a6', '0dc015ce9a3a3414b5e62ec643384183', + '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-256: I=123'), + ('33323130e6e5e4eb23222524dea1a0a3', '705715448a8da412025ce38345c2a148', + 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-256: I=124'), + ('cfcecdccf6f5f4cbe6e7e0e199969794', 'c32b5b0b6fbae165266c569f4b6ecf0b', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-256: I=125'), + ('babbb8b97271707fdcdddadb29363734', '4dca6c75192a01ddca9476af2a521e87', + '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', + 'ecb-tbl-256: I=126'), + ('c9c8cbca4447465926272021545b5a59', '058691e627ecbc36ac07b6db423bd698', + '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-256: I=127'), + ('050407067477767956575051221d1c1f', '7444527095838fe080fc2bcdd30847eb', + '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-256: I=128'), + + # FIPS PUB 800-38A test vectors, 2001 edition. Annex F. + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + '3ad77bb40d7a3660a89ecaf32466ef97'+'f5d3d58503b9699de785895a96fdbaaf'+ + '43b1cd7f598ece23881b00e3ed030688'+'7b0c785e27e8ad3f8223207104725dd4', + '2b7e151628aed2a6abf7158809cf4f3c', + 'NIST 800-38A, F.1.1, ECB and AES-128'), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + 'bd334f1d6e45f25ff712a214571fa5cc'+'974104846d0ad3ad7734ecb3ecee4eef'+ + 'ef7afd2270e2e60adce0ba2face6444e'+'9a4b41ba738d6c72fb16691603c18e0e', + '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b', + 'NIST 800-38A, F.1.3, ECB and AES-192'), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + 'f3eed1bdb5d2a03c064b5a7e3db181f8'+'591ccb10d410ed26dc5ba74a31362870'+ + 'b6ed21b99ca6f4f9f153e7b1beafed1d'+'23304b7a39f9f3ff067d8d8f9e24ecc7', + '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4', + 'NIST 800-38A, F.1.3, ECB and AES-256'), + +] + +test_data_8_lanes = [] +for td in test_data: + test_data_8_lanes.append((td[0] * 8, td[1] * 8, td[2], td[3])) +test_data += test_data_8_lanes + +class TestMultipleBlocks(unittest.TestCase): + + def __init__(self, use_aesni): + unittest.TestCase.__init__(self) + self.use_aesni = use_aesni + + def runTest(self): + # Encrypt data which is 8*2+4 bytes long, so as to trigger (for the + # AESNI variant) both the path that parallelizes 8 lanes and the one + # that processes data serially + + tvs = [ + (b'a' * 16, 'c0b27011eb15bf144d2fc9fae80ea16d4c231cb230416c5fac02e6835ad9d7d0'), + (b'a' * 24, 'df8435ce361a78c535b41dcb57da952abbf9ee5954dc6fbcd75fd00fa626915d'), + (b'a' * 32, '211402de6c80db1f92ba255881178e1f70783b8cfd3b37808205e48b80486cd8') + ] + + for key, expected in tvs: + + cipher = AES.new(key, AES.MODE_ECB, use_aesni=self.use_aesni) + h = SHA256.new() + + pt = b"".join([ tobytes('{0:016x}'.format(x)) for x in range(20) ]) + ct = cipher.encrypt(pt) + self.assertEqual(SHA256.new(ct).hexdigest(), expected) + + +class TestIncompleteBlocks(unittest.TestCase): + + def __init__(self, use_aesni): + unittest.TestCase.__init__(self) + self.use_aesni = use_aesni + + def runTest(self): + # Encrypt data with length not multiple of 16 bytes + + cipher = AES.new(b'4'*16, AES.MODE_ECB, use_aesni=self.use_aesni) + + for msg_len in range(1, 16): + self.assertRaises(ValueError, cipher.encrypt, b'1' * msg_len) + self.assertRaises(ValueError, cipher.encrypt, b'1' * (msg_len+16)) + self.assertRaises(ValueError, cipher.decrypt, b'1' * msg_len) + self.assertRaises(ValueError, cipher.decrypt, b'1' * (msg_len+16)) + + self.assertEqual(cipher.encrypt(b''), b'') + self.assertEqual(cipher.decrypt(b''), b'') + + +class TestOutput(unittest.TestCase): + + def __init__(self, use_aesni): + unittest.TestCase.__init__(self) + self.use_aesni = use_aesni + + def runTest(self): + # Encrypt/Decrypt data and test output parameter + + cipher = AES.new(b'4'*16, AES.MODE_ECB, use_aesni=self.use_aesni) + + pt = b'5' * 16 + ct = cipher.encrypt(pt) + + output = bytearray(16) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + output = memoryview(bytearray(16)) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) + + shorter_output = bytearray(15) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +def get_tests(config={}): + from Crypto.Util import _cpu_features + from .common import make_block_tests + + tests = make_block_tests(AES, "AES", test_data, {'use_aesni': False}) + tests += [ TestMultipleBlocks(False) ] + tests += [ TestIncompleteBlocks(False) ] + if _cpu_features.have_aes_ni(): + # Run tests with AES-NI instructions if they are available. + tests += make_block_tests(AES, "AESNI", test_data, {'use_aesni': True}) + tests += [ TestMultipleBlocks(True) ] + tests += [ TestIncompleteBlocks(True) ] + tests += [ TestOutput(True) ] + else: + print("Skipping AESNI tests") + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_ARC2.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_ARC2.py new file mode 100644 index 000000000..fd9448c1d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_ARC2.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/ARC2.py: Self-test for the Alleged-RC2 cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.ARC2""" + +import unittest + +from Crypto.Util.py3compat import b, bchr + +from Crypto.Cipher import ARC2 + +# This is a list of (plaintext, ciphertext, key[, description[, extra_params]]) tuples. +test_data = [ + # Test vectors from RFC 2268 + + # 63-bit effective key length + ('0000000000000000', 'ebb773f993278eff', '0000000000000000', + 'RFC2268-1', dict(effective_keylen=63)), + + # 64-bit effective key length + ('ffffffffffffffff', '278b27e42e2f0d49', 'ffffffffffffffff', + 'RFC2268-2', dict(effective_keylen=64)), + ('1000000000000001', '30649edf9be7d2c2', '3000000000000000', + 'RFC2268-3', dict(effective_keylen=64)), + #('0000000000000000', '61a8a244adacccf0', '88', + # 'RFC2268-4', dict(effective_keylen=64)), + ('0000000000000000', '6ccf4308974c267f', '88bca90e90875a', + 'RFC2268-5', dict(effective_keylen=64)), + ('0000000000000000', '1a807d272bbe5db1', '88bca90e90875a7f0f79c384627bafb2', + 'RFC2268-6', dict(effective_keylen=64)), + + # 128-bit effective key length + ('0000000000000000', '2269552ab0f85ca6', '88bca90e90875a7f0f79c384627bafb2', + "RFC2268-7", dict(effective_keylen=128)), + ('0000000000000000', '5b78d3a43dfff1f1', + '88bca90e90875a7f0f79c384627bafb216f80a6f85920584c42fceb0be255daf1e', + "RFC2268-8", dict(effective_keylen=129)), + + # Test vectors from PyCrypto 2.0.1's testdata.py + # 1024-bit effective key length + ('0000000000000000', '624fb3e887419e48', '5068696c6970476c617373', + 'PCTv201-0'), + ('ffffffffffffffff', '79cadef44c4a5a85', '5068696c6970476c617373', + 'PCTv201-1'), + ('0001020304050607', '90411525b34e4c2c', '5068696c6970476c617373', + 'PCTv201-2'), + ('0011223344556677', '078656aaba61cbfb', '5068696c6970476c617373', + 'PCTv201-3'), + ('0000000000000000', 'd7bcc5dbb4d6e56a', 'ffffffffffffffff', + 'PCTv201-4'), + ('ffffffffffffffff', '7259018ec557b357', 'ffffffffffffffff', + 'PCTv201-5'), + ('0001020304050607', '93d20a497f2ccb62', 'ffffffffffffffff', + 'PCTv201-6'), + ('0011223344556677', 'cb15a7f819c0014d', 'ffffffffffffffff', + 'PCTv201-7'), + ('0000000000000000', '63ac98cdf3843a7a', 'ffffffffffffffff5065746572477265656e6177617953e5ffe553', + 'PCTv201-8'), + ('ffffffffffffffff', '3fb49e2fa12371dd', 'ffffffffffffffff5065746572477265656e6177617953e5ffe553', + 'PCTv201-9'), + ('0001020304050607', '46414781ab387d5f', 'ffffffffffffffff5065746572477265656e6177617953e5ffe553', + 'PCTv201-10'), + ('0011223344556677', 'be09dc81feaca271', 'ffffffffffffffff5065746572477265656e6177617953e5ffe553', + 'PCTv201-11'), + ('0000000000000000', 'e64221e608be30ab', '53e5ffe553', + 'PCTv201-12'), + ('ffffffffffffffff', '862bc60fdcd4d9a9', '53e5ffe553', + 'PCTv201-13'), + ('0001020304050607', '6a34da50fa5e47de', '53e5ffe553', + 'PCTv201-14'), + ('0011223344556677', '584644c34503122c', '53e5ffe553', + 'PCTv201-15'), +] + +class BufferOverflowTest(unittest.TestCase): + # Test a buffer overflow found in older versions of PyCrypto + + def runTest(self): + """ARC2 with keylength > 128""" + key = b("x") * 16384 + self.assertRaises(ValueError, ARC2.new, key, ARC2.MODE_ECB) + +class KeyLength(unittest.TestCase): + + def runTest(self): + ARC2.new(b'\x00' * 16, ARC2.MODE_ECB, effective_keylen=40) + self.assertRaises(ValueError, ARC2.new, bchr(0) * 4, ARC2.MODE_ECB) + self.assertRaises(ValueError, ARC2.new, bchr(0) * 129, ARC2.MODE_ECB) + + self.assertRaises(ValueError, ARC2.new, bchr(0) * 16, ARC2.MODE_ECB, + effective_keylen=39) + self.assertRaises(ValueError, ARC2.new, bchr(0) * 16, ARC2.MODE_ECB, + effective_keylen=1025) + + +class TestOutput(unittest.TestCase): + + def runTest(self): + # Encrypt/Decrypt data and test output parameter + + cipher = ARC2.new(b'4'*16, ARC2.MODE_ECB) + + pt = b'5' * 16 + ct = cipher.encrypt(pt) + + output = bytearray(16) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + output = memoryview(bytearray(16)) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) + + shorter_output = bytearray(7) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +def get_tests(config={}): + from Crypto.Cipher import ARC2 + from .common import make_block_tests + + tests = make_block_tests(ARC2, "ARC2", test_data) + tests.append(BufferOverflowTest()) + tests.append(KeyLength()) + tests += [TestOutput()] + + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_ARC4.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_ARC4.py new file mode 100644 index 000000000..753df4cb6 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_ARC4.py @@ -0,0 +1,471 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/ARC4.py: Self-test for the Alleged-RC4 cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.ARC4""" + +import unittest + +from Crypto.Util.py3compat import b +from Crypto.SelfTest.st_common import list_test_cases +from binascii import unhexlify + +from Crypto.Cipher import ARC4 + +# This is a list of (plaintext, ciphertext, key[, description]) tuples. +test_data = [ + # Test vectors from Eric Rescorla's message with the subject + # "RC4 compatibility testing", sent to the cipherpunks mailing list on + # September 13, 1994. + # http://cypherpunks.venona.com/date/1994/09/msg00420.html + + ('0123456789abcdef', '75b7878099e0c596', '0123456789abcdef', + 'Test vector 0'), + + ('0000000000000000', '7494c2e7104b0879', '0123456789abcdef', + 'Test vector 1'), + + ('0000000000000000', 'de188941a3375d3a', '0000000000000000', + 'Test vector 2'), + + ('00000000000000000000', 'd6a141a7ec3c38dfbd61', 'ef012345', + 'Test vector 3'), + + ('01' * 512, + '7595c3e6114a09780c4ad452338e1ffd9a1be9498f813d76533449b6778dcad8' + + 'c78a8d2ba9ac66085d0e53d59c26c2d1c490c1ebbe0ce66d1b6b1b13b6b919b8' + + '47c25a91447a95e75e4ef16779cde8bf0a95850e32af9689444fd377108f98fd' + + 'cbd4e726567500990bcc7e0ca3c4aaa304a387d20f3b8fbbcd42a1bd311d7a43' + + '03dda5ab078896ae80c18b0af66dff319616eb784e495ad2ce90d7f772a81747' + + 'b65f62093b1e0db9e5ba532fafec47508323e671327df9444432cb7367cec82f' + + '5d44c0d00b67d650a075cd4b70dedd77eb9b10231b6b5b741347396d62897421' + + 'd43df9b42e446e358e9c11a9b2184ecbef0cd8e7a877ef968f1390ec9b3d35a5' + + '585cb009290e2fcde7b5ec66d9084be44055a619d9dd7fc3166f9487f7cb2729' + + '12426445998514c15d53a18c864ce3a2b7555793988126520eacf2e3066e230c' + + '91bee4dd5304f5fd0405b35bd99c73135d3d9bc335ee049ef69b3867bf2d7bd1' + + 'eaa595d8bfc0066ff8d31509eb0c6caa006c807a623ef84c3d33c195d23ee320' + + 'c40de0558157c822d4b8c569d849aed59d4e0fd7f379586b4b7ff684ed6a189f' + + '7486d49b9c4bad9ba24b96abf924372c8a8fffb10d55354900a77a3db5f205e1' + + 'b99fcd8660863a159ad4abe40fa48934163ddde542a6585540fd683cbfd8c00f' + + '12129a284deacc4cdefe58be7137541c047126c8d49e2755ab181ab7e940b0c0', + '0123456789abcdef', + "Test vector 4"), + # shortest key - generated with arc4 package + ('7468697320697320616e206578616d706c65', + '7260677d38495a09585d69321e17eaf3cdd0', + '01'), +] + + +class RFC6229_Tests(unittest.TestCase): + # Test vectors from RFC 6229. Each test vector is a tuple with two items: + # the ARC4 key and a dictionary. The dictionary has keystream offsets as keys + # and the 16-byte keystream starting at the relevant offset as value. + rfc6229_data = [ + # Page 3 + ( + '0102030405', + { + 0: 'b2 39 63 05 f0 3d c0 27 cc c3 52 4a 0a 11 18 a8', + 16: '69 82 94 4f 18 fc 82 d5 89 c4 03 a4 7a 0d 09 19', + 240: '28 cb 11 32 c9 6c e2 86 42 1d ca ad b8 b6 9e ae', + 256: '1c fc f6 2b 03 ed db 64 1d 77 df cf 7f 8d 8c 93', + 496: '42 b7 d0 cd d9 18 a8 a3 3d d5 17 81 c8 1f 40 41', + 512: '64 59 84 44 32 a7 da 92 3c fb 3e b4 98 06 61 f6', + 752: 'ec 10 32 7b de 2b ee fd 18 f9 27 76 80 45 7e 22', + 768: 'eb 62 63 8d 4f 0b a1 fe 9f ca 20 e0 5b f8 ff 2b', + 1008: '45 12 90 48 e6 a0 ed 0b 56 b4 90 33 8f 07 8d a5', + 1024: '30 ab bc c7 c2 0b 01 60 9f 23 ee 2d 5f 6b b7 df', + 1520: '32 94 f7 44 d8 f9 79 05 07 e7 0f 62 e5 bb ce ea', + 1536: 'd8 72 9d b4 18 82 25 9b ee 4f 82 53 25 f5 a1 30', + 2032: '1e b1 4a 0c 13 b3 bf 47 fa 2a 0b a9 3a d4 5b 8b', + 2048: 'cc 58 2f 8b a9 f2 65 e2 b1 be 91 12 e9 75 d2 d7', + 3056: 'f2 e3 0f 9b d1 02 ec bf 75 aa ad e9 bc 35 c4 3c', + 3072: 'ec 0e 11 c4 79 dc 32 9d c8 da 79 68 fe 96 56 81', + 4080: '06 83 26 a2 11 84 16 d2 1f 9d 04 b2 cd 1c a0 50', + 4096: 'ff 25 b5 89 95 99 67 07 e5 1f bd f0 8b 34 d8 75' + } + ), + # Page 4 + ( + '01020304050607', + { + 0: '29 3f 02 d4 7f 37 c9 b6 33 f2 af 52 85 fe b4 6b', + 16: 'e6 20 f1 39 0d 19 bd 84 e2 e0 fd 75 20 31 af c1', + 240: '91 4f 02 53 1c 92 18 81 0d f6 0f 67 e3 38 15 4c', + 256: 'd0 fd b5 83 07 3c e8 5a b8 39 17 74 0e c0 11 d5', + 496: '75 f8 14 11 e8 71 cf fa 70 b9 0c 74 c5 92 e4 54', + 512: '0b b8 72 02 93 8d ad 60 9e 87 a5 a1 b0 79 e5 e4', + 752: 'c2 91 12 46 b6 12 e7 e7 b9 03 df ed a1 da d8 66', + 768: '32 82 8f 91 50 2b 62 91 36 8d e8 08 1d e3 6f c2', + 1008: 'f3 b9 a7 e3 b2 97 bf 9a d8 04 51 2f 90 63 ef f1', + 1024: '8e cb 67 a9 ba 1f 55 a5 a0 67 e2 b0 26 a3 67 6f', + 1520: 'd2 aa 90 2b d4 2d 0d 7c fd 34 0c d4 58 10 52 9f', + 1536: '78 b2 72 c9 6e 42 ea b4 c6 0b d9 14 e3 9d 06 e3', + 2032: 'f4 33 2f d3 1a 07 93 96 ee 3c ee 3f 2a 4f f0 49', + 2048: '05 45 97 81 d4 1f da 7f 30 c1 be 7e 12 46 c6 23', + 3056: 'ad fd 38 68 b8 e5 14 85 d5 e6 10 01 7e 3d d6 09', + 3072: 'ad 26 58 1c 0c 5b e4 5f 4c ea 01 db 2f 38 05 d5', + 4080: 'f3 17 2c ef fc 3b 3d 99 7c 85 cc d5 af 1a 95 0c', + 4096: 'e7 4b 0b 97 31 22 7f d3 7c 0e c0 8a 47 dd d8 b8' + } + ), + ( + '0102030405060708', + { + 0: '97 ab 8a 1b f0 af b9 61 32 f2 f6 72 58 da 15 a8', + 16: '82 63 ef db 45 c4 a1 86 84 ef 87 e6 b1 9e 5b 09', + 240: '96 36 eb c9 84 19 26 f4 f7 d1 f3 62 bd df 6e 18', + 256: 'd0 a9 90 ff 2c 05 fe f5 b9 03 73 c9 ff 4b 87 0a', + 496: '73 23 9f 1d b7 f4 1d 80 b6 43 c0 c5 25 18 ec 63', + 512: '16 3b 31 99 23 a6 bd b4 52 7c 62 61 26 70 3c 0f', + 752: '49 d6 c8 af 0f 97 14 4a 87 df 21 d9 14 72 f9 66', + 768: '44 17 3a 10 3b 66 16 c5 d5 ad 1c ee 40 c8 63 d0', + 1008: '27 3c 9c 4b 27 f3 22 e4 e7 16 ef 53 a4 7d e7 a4', + 1024: 'c6 d0 e7 b2 26 25 9f a9 02 34 90 b2 61 67 ad 1d', + 1520: '1f e8 98 67 13 f0 7c 3d 9a e1 c1 63 ff 8c f9 d3', + 1536: '83 69 e1 a9 65 61 0b e8 87 fb d0 c7 91 62 aa fb', + 2032: '0a 01 27 ab b4 44 84 b9 fb ef 5a bc ae 1b 57 9f', + 2048: 'c2 cd ad c6 40 2e 8e e8 66 e1 f3 7b db 47 e4 2c', + 3056: '26 b5 1e a3 7d f8 e1 d6 f7 6f c3 b6 6a 74 29 b3', + 3072: 'bc 76 83 20 5d 4f 44 3d c1 f2 9d da 33 15 c8 7b', + 4080: 'd5 fa 5a 34 69 d2 9a aa f8 3d 23 58 9d b8 c8 5b', + 4096: '3f b4 6e 2c 8f 0f 06 8e dc e8 cd cd 7d fc 58 62' + } + ), + # Page 5 + ( + '0102030405060708090a', + { + 0: 'ed e3 b0 46 43 e5 86 cc 90 7d c2 18 51 70 99 02', + 16: '03 51 6b a7 8f 41 3b eb 22 3a a5 d4 d2 df 67 11', + 240: '3c fd 6c b5 8e e0 fd de 64 01 76 ad 00 00 04 4d', + 256: '48 53 2b 21 fb 60 79 c9 11 4c 0f fd 9c 04 a1 ad', + 496: '3e 8c ea 98 01 71 09 97 90 84 b1 ef 92 f9 9d 86', + 512: 'e2 0f b4 9b db 33 7e e4 8b 8d 8d c0 f4 af ef fe', + 752: '5c 25 21 ea cd 79 66 f1 5e 05 65 44 be a0 d3 15', + 768: 'e0 67 a7 03 19 31 a2 46 a6 c3 87 5d 2f 67 8a cb', + 1008: 'a6 4f 70 af 88 ae 56 b6 f8 75 81 c0 e2 3e 6b 08', + 1024: 'f4 49 03 1d e3 12 81 4e c6 f3 19 29 1f 4a 05 16', + 1520: 'bd ae 85 92 4b 3c b1 d0 a2 e3 3a 30 c6 d7 95 99', + 1536: '8a 0f ed db ac 86 5a 09 bc d1 27 fb 56 2e d6 0a', + 2032: 'b5 5a 0a 5b 51 a1 2a 8b e3 48 99 c3 e0 47 51 1a', + 2048: 'd9 a0 9c ea 3c e7 5f e3 96 98 07 03 17 a7 13 39', + 3056: '55 22 25 ed 11 77 f4 45 84 ac 8c fa 6c 4e b5 fc', + 3072: '7e 82 cb ab fc 95 38 1b 08 09 98 44 21 29 c2 f8', + 4080: '1f 13 5e d1 4c e6 0a 91 36 9d 23 22 be f2 5e 3c', + 4096: '08 b6 be 45 12 4a 43 e2 eb 77 95 3f 84 dc 85 53' + } + ), + ( + '0102030405060708090a0b0c0d0e0f10', + { + 0: '9a c7 cc 9a 60 9d 1e f7 b2 93 28 99 cd e4 1b 97', + 16: '52 48 c4 95 90 14 12 6a 6e 8a 84 f1 1d 1a 9e 1c', + 240: '06 59 02 e4 b6 20 f6 cc 36 c8 58 9f 66 43 2f 2b', + 256: 'd3 9d 56 6b c6 bc e3 01 07 68 15 15 49 f3 87 3f', + 496: 'b6 d1 e6 c4 a5 e4 77 1c ad 79 53 8d f2 95 fb 11', + 512: 'c6 8c 1d 5c 55 9a 97 41 23 df 1d bc 52 a4 3b 89', + 752: 'c5 ec f8 8d e8 97 fd 57 fe d3 01 70 1b 82 a2 59', + 768: 'ec cb e1 3d e1 fc c9 1c 11 a0 b2 6c 0b c8 fa 4d', + 1008: 'e7 a7 25 74 f8 78 2a e2 6a ab cf 9e bc d6 60 65', + 1024: 'bd f0 32 4e 60 83 dc c6 d3 ce dd 3c a8 c5 3c 16', + 1520: 'b4 01 10 c4 19 0b 56 22 a9 61 16 b0 01 7e d2 97', + 1536: 'ff a0 b5 14 64 7e c0 4f 63 06 b8 92 ae 66 11 81', + 2032: 'd0 3d 1b c0 3c d3 3d 70 df f9 fa 5d 71 96 3e bd', + 2048: '8a 44 12 64 11 ea a7 8b d5 1e 8d 87 a8 87 9b f5', + 3056: 'fa be b7 60 28 ad e2 d0 e4 87 22 e4 6c 46 15 a3', + 3072: 'c0 5d 88 ab d5 03 57 f9 35 a6 3c 59 ee 53 76 23', + 4080: 'ff 38 26 5c 16 42 c1 ab e8 d3 c2 fe 5e 57 2b f8', + 4096: 'a3 6a 4c 30 1a e8 ac 13 61 0c cb c1 22 56 ca cc' + } + ), + # Page 6 + ( + '0102030405060708090a0b0c0d0e0f101112131415161718', + { + 0: '05 95 e5 7f e5 f0 bb 3c 70 6e da c8 a4 b2 db 11', + 16: 'df de 31 34 4a 1a f7 69 c7 4f 07 0a ee 9e 23 26', + 240: 'b0 6b 9b 1e 19 5d 13 d8 f4 a7 99 5c 45 53 ac 05', + 256: '6b d2 37 8e c3 41 c9 a4 2f 37 ba 79 f8 8a 32 ff', + 496: 'e7 0b ce 1d f7 64 5a db 5d 2c 41 30 21 5c 35 22', + 512: '9a 57 30 c7 fc b4 c9 af 51 ff da 89 c7 f1 ad 22', + 752: '04 85 05 5f d4 f6 f0 d9 63 ef 5a b9 a5 47 69 82', + 768: '59 1f c6 6b cd a1 0e 45 2b 03 d4 55 1f 6b 62 ac', + 1008: '27 53 cc 83 98 8a fa 3e 16 88 a1 d3 b4 2c 9a 02', + 1024: '93 61 0d 52 3d 1d 3f 00 62 b3 c2 a3 bb c7 c7 f0', + 1520: '96 c2 48 61 0a ad ed fe af 89 78 c0 3d e8 20 5a', + 1536: '0e 31 7b 3d 1c 73 b9 e9 a4 68 8f 29 6d 13 3a 19', + 2032: 'bd f0 e6 c3 cc a5 b5 b9 d5 33 b6 9c 56 ad a1 20', + 2048: '88 a2 18 b6 e2 ec e1 e6 24 6d 44 c7 59 d1 9b 10', + 3056: '68 66 39 7e 95 c1 40 53 4f 94 26 34 21 00 6e 40', + 3072: '32 cb 0a 1e 95 42 c6 b3 b8 b3 98 ab c3 b0 f1 d5', + 4080: '29 a0 b8 ae d5 4a 13 23 24 c6 2e 42 3f 54 b4 c8', + 4096: '3c b0 f3 b5 02 0a 98 b8 2a f9 fe 15 44 84 a1 68' + } + ), + ( + '0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20', + { + 0: 'ea a6 bd 25 88 0b f9 3d 3f 5d 1e 4c a2 61 1d 91', + 16: 'cf a4 5c 9f 7e 71 4b 54 bd fa 80 02 7c b1 43 80', + 240: '11 4a e3 44 de d7 1b 35 f2 e6 0f eb ad 72 7f d8', + 256: '02 e1 e7 05 6b 0f 62 39 00 49 64 22 94 3e 97 b6', + 496: '91 cb 93 c7 87 96 4e 10 d9 52 7d 99 9c 6f 93 6b', + 512: '49 b1 8b 42 f8 e8 36 7c be b5 ef 10 4b a1 c7 cd', + 752: '87 08 4b 3b a7 00 ba de 95 56 10 67 27 45 b3 74', + 768: 'e7 a7 b9 e9 ec 54 0d 5f f4 3b db 12 79 2d 1b 35', + 1008: 'c7 99 b5 96 73 8f 6b 01 8c 76 c7 4b 17 59 bd 90', + 1024: '7f ec 5b fd 9f 9b 89 ce 65 48 30 90 92 d7 e9 58', + 1520: '40 f2 50 b2 6d 1f 09 6a 4a fd 4c 34 0a 58 88 15', + 1536: '3e 34 13 5c 79 db 01 02 00 76 76 51 cf 26 30 73', + 2032: 'f6 56 ab cc f8 8d d8 27 02 7b 2c e9 17 d4 64 ec', + 2048: '18 b6 25 03 bf bc 07 7f ba bb 98 f2 0d 98 ab 34', + 3056: '8a ed 95 ee 5b 0d cb fb ef 4e b2 1d 3a 3f 52 f9', + 3072: '62 5a 1a b0 0e e3 9a 53 27 34 6b dd b0 1a 9c 18', + 4080: 'a1 3a 7c 79 c7 e1 19 b5 ab 02 96 ab 28 c3 00 b9', + 4096: 'f3 e4 c0 a2 e0 2d 1d 01 f7 f0 a7 46 18 af 2b 48' + } + ), + # Page 7 + ( + '833222772a', + { + 0: '80 ad 97 bd c9 73 df 8a 2e 87 9e 92 a4 97 ef da', + 16: '20 f0 60 c2 f2 e5 12 65 01 d3 d4 fe a1 0d 5f c0', + 240: 'fa a1 48 e9 90 46 18 1f ec 6b 20 85 f3 b2 0e d9', + 256: 'f0 da f5 ba b3 d5 96 83 98 57 84 6f 73 fb fe 5a', + 496: '1c 7e 2f c4 63 92 32 fe 29 75 84 b2 96 99 6b c8', + 512: '3d b9 b2 49 40 6c c8 ed ff ac 55 cc d3 22 ba 12', + 752: 'e4 f9 f7 e0 06 61 54 bb d1 25 b7 45 56 9b c8 97', + 768: '75 d5 ef 26 2b 44 c4 1a 9c f6 3a e1 45 68 e1 b9', + 1008: '6d a4 53 db f8 1e 82 33 4a 3d 88 66 cb 50 a1 e3', + 1024: '78 28 d0 74 11 9c ab 5c 22 b2 94 d7 a9 bf a0 bb', + 1520: 'ad b8 9c ea 9a 15 fb e6 17 29 5b d0 4b 8c a0 5c', + 1536: '62 51 d8 7f d4 aa ae 9a 7e 4a d5 c2 17 d3 f3 00', + 2032: 'e7 11 9b d6 dd 9b 22 af e8 f8 95 85 43 28 81 e2', + 2048: '78 5b 60 fd 7e c4 e9 fc b6 54 5f 35 0d 66 0f ab', + 3056: 'af ec c0 37 fd b7 b0 83 8e b3 d7 0b cd 26 83 82', + 3072: 'db c1 a7 b4 9d 57 35 8c c9 fa 6d 61 d7 3b 7c f0', + 4080: '63 49 d1 26 a3 7a fc ba 89 79 4f 98 04 91 4f dc', + 4096: 'bf 42 c3 01 8c 2f 7c 66 bf de 52 49 75 76 81 15' + } + ), + ( + '1910833222772a', + { + 0: 'bc 92 22 db d3 27 4d 8f c6 6d 14 cc bd a6 69 0b', + 16: '7a e6 27 41 0c 9a 2b e6 93 df 5b b7 48 5a 63 e3', + 240: '3f 09 31 aa 03 de fb 30 0f 06 01 03 82 6f 2a 64', + 256: 'be aa 9e c8 d5 9b b6 81 29 f3 02 7c 96 36 11 81', + 496: '74 e0 4d b4 6d 28 64 8d 7d ee 8a 00 64 b0 6c fe', + 512: '9b 5e 81 c6 2f e0 23 c5 5b e4 2f 87 bb f9 32 b8', + 752: 'ce 17 8f c1 82 6e fe cb c1 82 f5 79 99 a4 61 40', + 768: '8b df 55 cd 55 06 1c 06 db a6 be 11 de 4a 57 8a', + 1008: '62 6f 5f 4d ce 65 25 01 f3 08 7d 39 c9 2c c3 49', + 1024: '42 da ac 6a 8f 9a b9 a7 fd 13 7c 60 37 82 56 82', + 1520: 'cc 03 fd b7 91 92 a2 07 31 2f 53 f5 d4 dc 33 d9', + 1536: 'f7 0f 14 12 2a 1c 98 a3 15 5d 28 b8 a0 a8 a4 1d', + 2032: '2a 3a 30 7a b2 70 8a 9c 00 fe 0b 42 f9 c2 d6 a1', + 2048: '86 26 17 62 7d 22 61 ea b0 b1 24 65 97 ca 0a e9', + 3056: '55 f8 77 ce 4f 2e 1d db bf 8e 13 e2 cd e0 fd c8', + 3072: '1b 15 56 cb 93 5f 17 33 37 70 5f bb 5d 50 1f c1', + 4080: 'ec d0 e9 66 02 be 7f 8d 50 92 81 6c cc f2 c2 e9', + 4096: '02 78 81 fa b4 99 3a 1c 26 20 24 a9 4f ff 3f 61' + } + ), + # Page 8 + ( + '641910833222772a', + { + 0: 'bb f6 09 de 94 13 17 2d 07 66 0c b6 80 71 69 26', + 16: '46 10 1a 6d ab 43 11 5d 6c 52 2b 4f e9 36 04 a9', + 240: 'cb e1 ff f2 1c 96 f3 ee f6 1e 8f e0 54 2c bd f0', + 256: '34 79 38 bf fa 40 09 c5 12 cf b4 03 4b 0d d1 a7', + 496: '78 67 a7 86 d0 0a 71 47 90 4d 76 dd f1 e5 20 e3', + 512: '8d 3e 9e 1c ae fc cc b3 fb f8 d1 8f 64 12 0b 32', + 752: '94 23 37 f8 fd 76 f0 fa e8 c5 2d 79 54 81 06 72', + 768: 'b8 54 8c 10 f5 16 67 f6 e6 0e 18 2f a1 9b 30 f7', + 1008: '02 11 c7 c6 19 0c 9e fd 12 37 c3 4c 8f 2e 06 c4', + 1024: 'bd a6 4f 65 27 6d 2a ac b8 f9 02 12 20 3a 80 8e', + 1520: 'bd 38 20 f7 32 ff b5 3e c1 93 e7 9d 33 e2 7c 73', + 1536: 'd0 16 86 16 86 19 07 d4 82 e3 6c da c8 cf 57 49', + 2032: '97 b0 f0 f2 24 b2 d2 31 71 14 80 8f b0 3a f7 a0', + 2048: 'e5 96 16 e4 69 78 79 39 a0 63 ce ea 9a f9 56 d1', + 3056: 'c4 7e 0d c1 66 09 19 c1 11 01 20 8f 9e 69 aa 1f', + 3072: '5a e4 f1 28 96 b8 37 9a 2a ad 89 b5 b5 53 d6 b0', + 4080: '6b 6b 09 8d 0c 29 3b c2 99 3d 80 bf 05 18 b6 d9', + 4096: '81 70 cc 3c cd 92 a6 98 62 1b 93 9d d3 8f e7 b9' + } + ), + ( + '8b37641910833222772a', + { + 0: 'ab 65 c2 6e dd b2 87 60 0d b2 fd a1 0d 1e 60 5c', + 16: 'bb 75 90 10 c2 96 58 f2 c7 2d 93 a2 d1 6d 29 30', + 240: 'b9 01 e8 03 6e d1 c3 83 cd 3c 4c 4d d0 a6 ab 05', + 256: '3d 25 ce 49 22 92 4c 55 f0 64 94 33 53 d7 8a 6c', + 496: '12 c1 aa 44 bb f8 7e 75 e6 11 f6 9b 2c 38 f4 9b', + 512: '28 f2 b3 43 4b 65 c0 98 77 47 00 44 c6 ea 17 0d', + 752: 'bd 9e f8 22 de 52 88 19 61 34 cf 8a f7 83 93 04', + 768: '67 55 9c 23 f0 52 15 84 70 a2 96 f7 25 73 5a 32', + 1008: '8b ab 26 fb c2 c1 2b 0f 13 e2 ab 18 5e ab f2 41', + 1024: '31 18 5a 6d 69 6f 0c fa 9b 42 80 8b 38 e1 32 a2', + 1520: '56 4d 3d ae 18 3c 52 34 c8 af 1e 51 06 1c 44 b5', + 1536: '3c 07 78 a7 b5 f7 2d 3c 23 a3 13 5c 7d 67 b9 f4', + 2032: 'f3 43 69 89 0f cf 16 fb 51 7d ca ae 44 63 b2 dd', + 2048: '02 f3 1c 81 e8 20 07 31 b8 99 b0 28 e7 91 bf a7', + 3056: '72 da 64 62 83 22 8c 14 30 08 53 70 17 95 61 6f', + 3072: '4e 0a 8c 6f 79 34 a7 88 e2 26 5e 81 d6 d0 c8 f4', + 4080: '43 8d d5 ea fe a0 11 1b 6f 36 b4 b9 38 da 2a 68', + 4096: '5f 6b fc 73 81 58 74 d9 71 00 f0 86 97 93 57 d8' + } + ), + # Page 9 + ( + 'ebb46227c6cc8b37641910833222772a', + { + 0: '72 0c 94 b6 3e df 44 e1 31 d9 50 ca 21 1a 5a 30', + 16: 'c3 66 fd ea cf 9c a8 04 36 be 7c 35 84 24 d2 0b', + 240: 'b3 39 4a 40 aa bf 75 cb a4 22 82 ef 25 a0 05 9f', + 256: '48 47 d8 1d a4 94 2d bc 24 9d ef c4 8c 92 2b 9f', + 496: '08 12 8c 46 9f 27 53 42 ad da 20 2b 2b 58 da 95', + 512: '97 0d ac ef 40 ad 98 72 3b ac 5d 69 55 b8 17 61', + 752: '3c b8 99 93 b0 7b 0c ed 93 de 13 d2 a1 10 13 ac', + 768: 'ef 2d 67 6f 15 45 c2 c1 3d c6 80 a0 2f 4a db fe', + 1008: 'b6 05 95 51 4f 24 bc 9f e5 22 a6 ca d7 39 36 44', + 1024: 'b5 15 a8 c5 01 17 54 f5 90 03 05 8b db 81 51 4e', + 1520: '3c 70 04 7e 8c bc 03 8e 3b 98 20 db 60 1d a4 95', + 1536: '11 75 da 6e e7 56 de 46 a5 3e 2b 07 56 60 b7 70', + 2032: '00 a5 42 bb a0 21 11 cc 2c 65 b3 8e bd ba 58 7e', + 2048: '58 65 fd bb 5b 48 06 41 04 e8 30 b3 80 f2 ae de', + 3056: '34 b2 1a d2 ad 44 e9 99 db 2d 7f 08 63 f0 d9 b6', + 3072: '84 a9 21 8f c3 6e 8a 5f 2c cf be ae 53 a2 7d 25', + 4080: 'a2 22 1a 11 b8 33 cc b4 98 a5 95 40 f0 54 5f 4a', + 4096: '5b be b4 78 7d 59 e5 37 3f db ea 6c 6f 75 c2 9b' + } + ), + ( + 'c109163908ebe51debb46227c6cc8b37641910833222772a', + { + 0: '54 b6 4e 6b 5a 20 b5 e2 ec 84 59 3d c7 98 9d a7', + 16: 'c1 35 ee e2 37 a8 54 65 ff 97 dc 03 92 4f 45 ce', + 240: 'cf cc 92 2f b4 a1 4a b4 5d 61 75 aa bb f2 d2 01', + 256: '83 7b 87 e2 a4 46 ad 0e f7 98 ac d0 2b 94 12 4f', + 496: '17 a6 db d6 64 92 6a 06 36 b3 f4 c3 7a 4f 46 94', + 512: '4a 5f 9f 26 ae ee d4 d4 a2 5f 63 2d 30 52 33 d9', + 752: '80 a3 d0 1e f0 0c 8e 9a 42 09 c1 7f 4e eb 35 8c', + 768: 'd1 5e 7d 5f fa aa bc 02 07 bf 20 0a 11 77 93 a2', + 1008: '34 96 82 bf 58 8e aa 52 d0 aa 15 60 34 6a ea fa', + 1024: 'f5 85 4c db 76 c8 89 e3 ad 63 35 4e 5f 72 75 e3', + 1520: '53 2c 7c ec cb 39 df 32 36 31 84 05 a4 b1 27 9c', + 1536: 'ba ef e6 d9 ce b6 51 84 22 60 e0 d1 e0 5e 3b 90', + 2032: 'e8 2d 8c 6d b5 4e 3c 63 3f 58 1c 95 2b a0 42 07', + 2048: '4b 16 e5 0a bd 38 1b d7 09 00 a9 cd 9a 62 cb 23', + 3056: '36 82 ee 33 bd 14 8b d9 f5 86 56 cd 8f 30 d9 fb', + 3072: '1e 5a 0b 84 75 04 5d 9b 20 b2 62 86 24 ed fd 9e', + 4080: '63 ed d6 84 fb 82 62 82 fe 52 8f 9c 0e 92 37 bc', + 4096: 'e4 dd 2e 98 d6 96 0f ae 0b 43 54 54 56 74 33 91' + } + ), + # Page 10 + ( + '1ada31d5cf688221c109163908ebe51debb46227c6cc8b37641910833222772a', + { + 0: 'dd 5b cb 00 18 e9 22 d4 94 75 9d 7c 39 5d 02 d3', + 16: 'c8 44 6f 8f 77 ab f7 37 68 53 53 eb 89 a1 c9 eb', + 240: 'af 3e 30 f9 c0 95 04 59 38 15 15 75 c3 fb 90 98', + 256: 'f8 cb 62 74 db 99 b8 0b 1d 20 12 a9 8e d4 8f 0e', + 496: '25 c3 00 5a 1c b8 5d e0 76 25 98 39 ab 71 98 ab', + 512: '9d cb c1 83 e8 cb 99 4b 72 7b 75 be 31 80 76 9c', + 752: 'a1 d3 07 8d fa 91 69 50 3e d9 d4 49 1d ee 4e b2', + 768: '85 14 a5 49 58 58 09 6f 59 6e 4b cd 66 b1 06 65', + 1008: '5f 40 d5 9e c1 b0 3b 33 73 8e fa 60 b2 25 5d 31', + 1024: '34 77 c7 f7 64 a4 1b ac ef f9 0b f1 4f 92 b7 cc', + 1520: 'ac 4e 95 36 8d 99 b9 eb 78 b8 da 8f 81 ff a7 95', + 1536: '8c 3c 13 f8 c2 38 8b b7 3f 38 57 6e 65 b7 c4 46', + 2032: '13 c4 b9 c1 df b6 65 79 ed dd 8a 28 0b 9f 73 16', + 2048: 'dd d2 78 20 55 01 26 69 8e fa ad c6 4b 64 f6 6e', + 3056: 'f0 8f 2e 66 d2 8e d1 43 f3 a2 37 cf 9d e7 35 59', + 3072: '9e a3 6c 52 55 31 b8 80 ba 12 43 34 f5 7b 0b 70', + 4080: 'd5 a3 9e 3d fc c5 02 80 ba c4 a6 b5 aa 0d ca 7d', + 4096: '37 0b 1c 1f e6 55 91 6d 97 fd 0d 47 ca 1d 72 b8' + } + ) + ] + + def test_keystream(self): + for tv in self.rfc6229_data: + key = unhexlify(b((tv[0]))) + cipher = ARC4.new(key) + count = 0 + for offset in range(0, 4096+1, 16): + ct = cipher.encrypt(b('\x00')*16) + expected = tv[1].get(offset) + if expected: + expected = unhexlify(b(expected.replace(" ", ''))) + self.assertEqual(ct, expected) + count += 1 + self.assertEqual(count, len(tv[1])) + + +class Drop_Tests(unittest.TestCase): + key = b('\xAA')*16 + data = b('\x00')*5000 + + def setUp(self): + self.cipher = ARC4.new(self.key) + + def test_drop256_encrypt(self): + cipher_drop = ARC4.new(self.key, 256) + ct_drop = cipher_drop.encrypt(self.data[:16]) + ct = self.cipher.encrypt(self.data)[256:256+16] + self.assertEqual(ct_drop, ct) + + def test_drop256_decrypt(self): + cipher_drop = ARC4.new(self.key, 256) + pt_drop = cipher_drop.decrypt(self.data[:16]) + pt = self.cipher.decrypt(self.data)[256:256+16] + self.assertEqual(pt_drop, pt) + + +class KeyLength(unittest.TestCase): + + def runTest(self): + self.assertRaises(ValueError, ARC4.new, b'') + self.assertRaises(ValueError, ARC4.new, b'\x00' * 257) + + +def get_tests(config={}): + from .common import make_stream_tests + tests = make_stream_tests(ARC4, "ARC4", test_data) + tests += list_test_cases(RFC6229_Tests) + tests += list_test_cases(Drop_Tests) + tests.append(KeyLength()) + return tests + + +if __name__ == '__main__': + def suite(): + return unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_Blowfish.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_Blowfish.py new file mode 100644 index 000000000..4ce3a4155 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_Blowfish.py @@ -0,0 +1,160 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/test_Blowfish.py: Self-test for the Blowfish cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.Blowfish""" + +import unittest + +from Crypto.Util.py3compat import bchr + +from Crypto.Cipher import Blowfish + +# This is a list of (plaintext, ciphertext, key) tuples. +test_data = [ + # Test vectors from http://www.schneier.com/code/vectors.txt + ('0000000000000000', '4ef997456198dd78', '0000000000000000'), + ('ffffffffffffffff', '51866fd5b85ecb8a', 'ffffffffffffffff'), + ('1000000000000001', '7d856f9a613063f2', '3000000000000000'), + ('1111111111111111', '2466dd878b963c9d', '1111111111111111'), + ('1111111111111111', '61f9c3802281b096', '0123456789abcdef'), + ('0123456789abcdef', '7d0cc630afda1ec7', '1111111111111111'), + ('0000000000000000', '4ef997456198dd78', '0000000000000000'), + ('0123456789abcdef', '0aceab0fc6a0a28d', 'fedcba9876543210'), + ('01a1d6d039776742', '59c68245eb05282b', '7ca110454a1a6e57'), + ('5cd54ca83def57da', 'b1b8cc0b250f09a0', '0131d9619dc1376e'), + ('0248d43806f67172', '1730e5778bea1da4', '07a1133e4a0b2686'), + ('51454b582ddf440a', 'a25e7856cf2651eb', '3849674c2602319e'), + ('42fd443059577fa2', '353882b109ce8f1a', '04b915ba43feb5b6'), + ('059b5e0851cf143a', '48f4d0884c379918', '0113b970fd34f2ce'), + ('0756d8e0774761d2', '432193b78951fc98', '0170f175468fb5e6'), + ('762514b829bf486a', '13f04154d69d1ae5', '43297fad38e373fe'), + ('3bdd119049372802', '2eedda93ffd39c79', '07a7137045da2a16'), + ('26955f6835af609a', 'd887e0393c2da6e3', '04689104c2fd3b2f'), + ('164d5e404f275232', '5f99d04f5b163969', '37d06bb516cb7546'), + ('6b056e18759f5cca', '4a057a3b24d3977b', '1f08260d1ac2465e'), + ('004bd6ef09176062', '452031c1e4fada8e', '584023641aba6176'), + ('480d39006ee762f2', '7555ae39f59b87bd', '025816164629b007'), + ('437540c8698f3cfa', '53c55f9cb49fc019', '49793ebc79b3258f'), + ('072d43a077075292', '7a8e7bfa937e89a3', '4fb05e1515ab73a7'), + ('02fe55778117f12a', 'cf9c5d7a4986adb5', '49e95d6d4ca229bf'), + ('1d9d5c5018f728c2', 'd1abb290658bc778', '018310dc409b26d6'), + ('305532286d6f295a', '55cb3774d13ef201', '1c587f1c13924fef'), + ('0123456789abcdef', 'fa34ec4847b268b2', '0101010101010101'), + ('0123456789abcdef', 'a790795108ea3cae', '1f1f1f1f0e0e0e0e'), + ('0123456789abcdef', 'c39e072d9fac631d', 'e0fee0fef1fef1fe'), + ('ffffffffffffffff', '014933e0cdaff6e4', '0000000000000000'), + ('0000000000000000', 'f21e9a77b71c49bc', 'ffffffffffffffff'), + ('0000000000000000', '245946885754369a', '0123456789abcdef'), + ('ffffffffffffffff', '6b5c5a9c5d9e0a5a', 'fedcba9876543210'), + #('fedcba9876543210', 'f9ad597c49db005e', 'f0'), + #('fedcba9876543210', 'e91d21c1d961a6d6', 'f0e1'), + #('fedcba9876543210', 'e9c2b70a1bc65cf3', 'f0e1d2'), + ('fedcba9876543210', 'be1e639408640f05', 'f0e1d2c3'), + ('fedcba9876543210', 'b39e44481bdb1e6e', 'f0e1d2c3b4'), + ('fedcba9876543210', '9457aa83b1928c0d', 'f0e1d2c3b4a5'), + ('fedcba9876543210', '8bb77032f960629d', 'f0e1d2c3b4a596'), + ('fedcba9876543210', 'e87a244e2cc85e82', 'f0e1d2c3b4a59687'), + ('fedcba9876543210', '15750e7a4f4ec577', 'f0e1d2c3b4a5968778'), + ('fedcba9876543210', '122ba70b3ab64ae0', 'f0e1d2c3b4a596877869'), + ('fedcba9876543210', '3a833c9affc537f6', 'f0e1d2c3b4a5968778695a'), + ('fedcba9876543210', '9409da87a90f6bf2', 'f0e1d2c3b4a5968778695a4b'), + ('fedcba9876543210', '884f80625060b8b4', 'f0e1d2c3b4a5968778695a4b3c'), + ('fedcba9876543210', '1f85031c19e11968', 'f0e1d2c3b4a5968778695a4b3c2d'), + ('fedcba9876543210', '79d9373a714ca34f', 'f0e1d2c3b4a5968778695a4b3c2d1e'), + ('fedcba9876543210', '93142887ee3be15c', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f'), + ('fedcba9876543210', '03429e838ce2d14b', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f00'), + ('fedcba9876543210', 'a4299e27469ff67b', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f0011'), + ('fedcba9876543210', 'afd5aed1c1bc96a8', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f001122'), + ('fedcba9876543210', '10851c0e3858da9f', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f00112233'), + ('fedcba9876543210', 'e6f51ed79b9db21f', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f0011223344'), + ('fedcba9876543210', '64a6e14afd36b46f', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f001122334455'), + ('fedcba9876543210', '80c7d7d45a5479ad', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f00112233445566'), + ('fedcba9876543210', '05044b62fa52d080', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f0011223344556677'), +] + + +class KeyLength(unittest.TestCase): + + def runTest(self): + self.assertRaises(ValueError, Blowfish.new, bchr(0) * 3, + Blowfish.MODE_ECB) + self.assertRaises(ValueError, Blowfish.new, bchr(0) * 57, + Blowfish.MODE_ECB) + + +class TestOutput(unittest.TestCase): + + def runTest(self): + # Encrypt/Decrypt data and test output parameter + + cipher = Blowfish.new(b'4'*16, Blowfish.MODE_ECB) + + pt = b'5' * 16 + ct = cipher.encrypt(pt) + + output = bytearray(16) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + output = memoryview(bytearray(16)) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) + + shorter_output = bytearray(7) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +def get_tests(config={}): + from .common import make_block_tests + tests = make_block_tests(Blowfish, "Blowfish", test_data) + tests.append(KeyLength()) + tests += [TestOutput()] + return tests + + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_CAST.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_CAST.py new file mode 100644 index 000000000..ff13bd4a8 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_CAST.py @@ -0,0 +1,101 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/CAST.py: Self-test for the CAST-128 (CAST5) cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.CAST""" + +import unittest + +from Crypto.Util.py3compat import bchr + +from Crypto.Cipher import CAST + +# This is a list of (plaintext, ciphertext, key) tuples. +test_data = [ + # Test vectors from RFC 2144, B.1 + ('0123456789abcdef', '238b4fe5847e44b2', + '0123456712345678234567893456789a', + '128-bit key'), + + ('0123456789abcdef', 'eb6a711a2c02271b', + '01234567123456782345', + '80-bit key'), + + ('0123456789abcdef', '7ac816d16e9b302e', + '0123456712', + '40-bit key'), +] + + +class KeyLength(unittest.TestCase): + + def runTest(self): + self.assertRaises(ValueError, CAST.new, bchr(0) * 4, CAST.MODE_ECB) + self.assertRaises(ValueError, CAST.new, bchr(0) * 17, CAST.MODE_ECB) + + +class TestOutput(unittest.TestCase): + + def runTest(self): + # Encrypt/Decrypt data and test output parameter + + cipher = CAST.new(b'4'*16, CAST.MODE_ECB) + + pt = b'5' * 16 + ct = cipher.encrypt(pt) + + output = bytearray(16) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + output = memoryview(bytearray(16)) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) + + shorter_output = bytearray(7) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +def get_tests(config={}): + from .common import make_block_tests + + tests = make_block_tests(CAST, "CAST", test_data) + tests.append(KeyLength()) + tests.append(TestOutput()) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_CBC.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_CBC.py new file mode 100644 index 000000000..374fb5a43 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_CBC.py @@ -0,0 +1,556 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.loader import load_test_vectors +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Util.py3compat import tobytes, is_string +from Crypto.Cipher import AES, DES3, DES +from Crypto.Hash import SHAKE128 + + +def get_tag_random(tag, length): + return SHAKE128.new(data=tobytes(tag)).read(length) + +class BlockChainingTests(unittest.TestCase): + + key_128 = get_tag_random("key_128", 16) + key_192 = get_tag_random("key_192", 24) + iv_128 = get_tag_random("iv_128", 16) + iv_64 = get_tag_random("iv_64", 8) + data_128 = get_tag_random("data_128", 16) + + def test_loopback_128(self): + cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) + pt = get_tag_random("plaintext", 16 * 100) + ct = cipher.encrypt(pt) + + cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) + pt2 = cipher.decrypt(ct) + self.assertEqual(pt, pt2) + + def test_loopback_64(self): + cipher = DES3.new(self.key_192, self.des3_mode, self.iv_64) + pt = get_tag_random("plaintext", 8 * 100) + ct = cipher.encrypt(pt) + + cipher = DES3.new(self.key_192, self.des3_mode, self.iv_64) + pt2 = cipher.decrypt(ct) + self.assertEqual(pt, pt2) + + def test_iv(self): + # If not passed, the iv is created randomly + cipher = AES.new(self.key_128, self.aes_mode) + iv1 = cipher.iv + cipher = AES.new(self.key_128, self.aes_mode) + iv2 = cipher.iv + self.assertNotEqual(iv1, iv2) + self.assertEqual(len(iv1), 16) + + # IV can be passed in uppercase or lowercase + cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) + ct = cipher.encrypt(self.data_128) + + cipher = AES.new(self.key_128, self.aes_mode, iv=self.iv_128) + self.assertEqual(ct, cipher.encrypt(self.data_128)) + + cipher = AES.new(self.key_128, self.aes_mode, IV=self.iv_128) + self.assertEqual(ct, cipher.encrypt(self.data_128)) + + def test_iv_must_be_bytes(self): + self.assertRaises(TypeError, AES.new, self.key_128, self.aes_mode, + iv = u'test1234567890-*') + + def test_only_one_iv(self): + # Only one IV/iv keyword allowed + self.assertRaises(TypeError, AES.new, self.key_128, self.aes_mode, + iv=self.iv_128, IV=self.iv_128) + + def test_iv_with_matching_length(self): + self.assertRaises(ValueError, AES.new, self.key_128, self.aes_mode, + b"") + self.assertRaises(ValueError, AES.new, self.key_128, self.aes_mode, + self.iv_128[:15]) + self.assertRaises(ValueError, AES.new, self.key_128, self.aes_mode, + self.iv_128 + b"0") + + def test_block_size_128(self): + cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) + self.assertEqual(cipher.block_size, AES.block_size) + + def test_block_size_64(self): + cipher = DES3.new(self.key_192, self.des3_mode, self.iv_64) + self.assertEqual(cipher.block_size, DES3.block_size) + + def test_unaligned_data_128(self): + cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) + for wrong_length in range(1,16): + self.assertRaises(ValueError, cipher.encrypt, b"5" * wrong_length) + + cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) + for wrong_length in range(1,16): + self.assertRaises(ValueError, cipher.decrypt, b"5" * wrong_length) + + def test_unaligned_data_64(self): + cipher = DES3.new(self.key_192, self.des3_mode, self.iv_64) + for wrong_length in range(1,8): + self.assertRaises(ValueError, cipher.encrypt, b"5" * wrong_length) + + cipher = DES3.new(self.key_192, self.des3_mode, self.iv_64) + for wrong_length in range(1,8): + self.assertRaises(ValueError, cipher.decrypt, b"5" * wrong_length) + + def test_IV_iv_attributes(self): + data = get_tag_random("data", 16 * 100) + for func in "encrypt", "decrypt": + cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) + getattr(cipher, func)(data) + self.assertEqual(cipher.iv, self.iv_128) + self.assertEqual(cipher.IV, self.iv_128) + + def test_unknown_parameters(self): + self.assertRaises(TypeError, AES.new, self.key_128, self.aes_mode, + self.iv_128, 7) + self.assertRaises(TypeError, AES.new, self.key_128, self.aes_mode, + iv=self.iv_128, unknown=7) + # But some are only known by the base cipher (e.g. use_aesni consumed by the AES module) + AES.new(self.key_128, self.aes_mode, iv=self.iv_128, use_aesni=False) + + def test_null_encryption_decryption(self): + for func in "encrypt", "decrypt": + cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) + result = getattr(cipher, func)(b"") + self.assertEqual(result, b"") + + def test_either_encrypt_or_decrypt(self): + cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) + cipher.encrypt(b"") + self.assertRaises(TypeError, cipher.decrypt, b"") + + cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) + cipher.decrypt(b"") + self.assertRaises(TypeError, cipher.encrypt, b"") + + def test_data_must_be_bytes(self): + cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) + self.assertRaises(TypeError, cipher.encrypt, u'test1234567890-*') + + cipher = AES.new(self.key_128, self.aes_mode, self.iv_128) + self.assertRaises(TypeError, cipher.decrypt, u'test1234567890-*') + + def test_bytearray(self): + data = b"1" * 128 + data_ba = bytearray(data) + + # Encrypt + key_ba = bytearray(self.key_128) + iv_ba = bytearray(self.iv_128) + + cipher1 = AES.new(self.key_128, self.aes_mode, self.iv_128) + ref1 = cipher1.encrypt(data) + + cipher2 = AES.new(key_ba, self.aes_mode, iv_ba) + key_ba[:3] = b'\xFF\xFF\xFF' + iv_ba[:3] = b'\xFF\xFF\xFF' + ref2 = cipher2.encrypt(data_ba) + + self.assertEqual(ref1, ref2) + self.assertEqual(cipher1.iv, cipher2.iv) + + # Decrypt + key_ba = bytearray(self.key_128) + iv_ba = bytearray(self.iv_128) + + cipher3 = AES.new(self.key_128, self.aes_mode, self.iv_128) + ref3 = cipher3.decrypt(data) + + cipher4 = AES.new(key_ba, self.aes_mode, iv_ba) + key_ba[:3] = b'\xFF\xFF\xFF' + iv_ba[:3] = b'\xFF\xFF\xFF' + ref4 = cipher4.decrypt(data_ba) + + self.assertEqual(ref3, ref4) + + def test_memoryview(self): + data = b"1" * 128 + data_mv = memoryview(bytearray(data)) + + # Encrypt + key_mv = memoryview(bytearray(self.key_128)) + iv_mv = memoryview(bytearray(self.iv_128)) + + cipher1 = AES.new(self.key_128, self.aes_mode, self.iv_128) + ref1 = cipher1.encrypt(data) + + cipher2 = AES.new(key_mv, self.aes_mode, iv_mv) + key_mv[:3] = b'\xFF\xFF\xFF' + iv_mv[:3] = b'\xFF\xFF\xFF' + ref2 = cipher2.encrypt(data_mv) + + self.assertEqual(ref1, ref2) + self.assertEqual(cipher1.iv, cipher2.iv) + + # Decrypt + key_mv = memoryview(bytearray(self.key_128)) + iv_mv = memoryview(bytearray(self.iv_128)) + + cipher3 = AES.new(self.key_128, self.aes_mode, self.iv_128) + ref3 = cipher3.decrypt(data) + + cipher4 = AES.new(key_mv, self.aes_mode, iv_mv) + key_mv[:3] = b'\xFF\xFF\xFF' + iv_mv[:3] = b'\xFF\xFF\xFF' + ref4 = cipher4.decrypt(data_mv) + + self.assertEqual(ref3, ref4) + + def test_output_param(self): + + pt = b'5' * 128 + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + ct = cipher.encrypt(pt) + + output = bytearray(128) + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + + def test_output_param_same_buffer(self): + + pt = b'5' * 128 + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + ct = cipher.encrypt(pt) + + pt_ba = bytearray(pt) + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + res = cipher.encrypt(pt_ba, output=pt_ba) + self.assertEqual(ct, pt_ba) + self.assertEqual(res, None) + + ct_ba = bytearray(ct) + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + res = cipher.decrypt(ct_ba, output=ct_ba) + self.assertEqual(pt, ct_ba) + self.assertEqual(res, None) + + + def test_output_param_memoryview(self): + + pt = b'5' * 128 + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + ct = cipher.encrypt(pt) + + output = memoryview(bytearray(128)) + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + def test_output_param_neg(self): + LEN_PT = 128 + + pt = b'5' * LEN_PT + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + ct = cipher.encrypt(pt) + + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0' * LEN_PT) + + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0' * LEN_PT) + + shorter_output = bytearray(LEN_PT - 1) + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + cipher = AES.new(b'4'*16, self.aes_mode, iv=self.iv_128) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +class CbcTests(BlockChainingTests): + aes_mode = AES.MODE_CBC + des3_mode = DES3.MODE_CBC + + +class NistBlockChainingVectors(unittest.TestCase): + + def _do_kat_aes_test(self, file_name): + + test_vectors = load_test_vectors(("Cipher", "AES"), + file_name, + "AES CBC KAT", + { "count" : lambda x: int(x) } ) + if test_vectors is None: + return + + direction = None + for tv in test_vectors: + + # The test vector file contains some directive lines + if is_string(tv): + direction = tv + continue + + self.description = tv.desc + + cipher = AES.new(tv.key, self.aes_mode, tv.iv) + if direction == "[ENCRYPT]": + self.assertEqual(cipher.encrypt(tv.plaintext), tv.ciphertext) + elif direction == "[DECRYPT]": + self.assertEqual(cipher.decrypt(tv.ciphertext), tv.plaintext) + else: + assert False + + # See Section 6.4.2 in AESAVS + def _do_mct_aes_test(self, file_name): + + test_vectors = load_test_vectors(("Cipher", "AES"), + file_name, + "AES CBC Montecarlo", + { "count" : lambda x: int(x) } ) + if test_vectors is None: + return + + direction = None + for tv in test_vectors: + + # The test vector file contains some directive lines + if is_string(tv): + direction = tv + continue + + self.description = tv.desc + cipher = AES.new(tv.key, self.aes_mode, tv.iv) + + if direction == '[ENCRYPT]': + cts = [ tv.iv ] + for count in range(1000): + cts.append(cipher.encrypt(tv.plaintext)) + tv.plaintext = cts[-2] + self.assertEqual(cts[-1], tv.ciphertext) + elif direction == '[DECRYPT]': + pts = [ tv.iv] + for count in range(1000): + pts.append(cipher.decrypt(tv.ciphertext)) + tv.ciphertext = pts[-2] + self.assertEqual(pts[-1], tv.plaintext) + else: + assert False + + def _do_tdes_test(self, file_name): + + test_vectors = load_test_vectors(("Cipher", "TDES"), + file_name, + "TDES CBC KAT", + { "count" : lambda x: int(x) } ) + if test_vectors is None: + return + + direction = None + for tv in test_vectors: + + # The test vector file contains some directive lines + if is_string(tv): + direction = tv + continue + + self.description = tv.desc + if hasattr(tv, "keys"): + cipher = DES.new(tv.keys, self.des_mode, tv.iv) + else: + if tv.key1 != tv.key3: + key = tv.key1 + tv.key2 + tv.key3 # Option 3 + else: + key = tv.key1 + tv.key2 # Option 2 + cipher = DES3.new(key, self.des3_mode, tv.iv) + + if direction == "[ENCRYPT]": + self.assertEqual(cipher.encrypt(tv.plaintext), tv.ciphertext) + elif direction == "[DECRYPT]": + self.assertEqual(cipher.decrypt(tv.ciphertext), tv.plaintext) + else: + assert False + + +class NistCbcVectors(NistBlockChainingVectors): + aes_mode = AES.MODE_CBC + des_mode = DES.MODE_CBC + des3_mode = DES3.MODE_CBC + + +# Create one test method per file +nist_aes_kat_mmt_files = ( + # KAT + "CBCGFSbox128.rsp", + "CBCGFSbox192.rsp", + "CBCGFSbox256.rsp", + "CBCKeySbox128.rsp", + "CBCKeySbox192.rsp", + "CBCKeySbox256.rsp", + "CBCVarKey128.rsp", + "CBCVarKey192.rsp", + "CBCVarKey256.rsp", + "CBCVarTxt128.rsp", + "CBCVarTxt192.rsp", + "CBCVarTxt256.rsp", + # MMT + "CBCMMT128.rsp", + "CBCMMT192.rsp", + "CBCMMT256.rsp", + ) +nist_aes_mct_files = ( + "CBCMCT128.rsp", + "CBCMCT192.rsp", + "CBCMCT256.rsp", + ) + +for file_name in nist_aes_kat_mmt_files: + def new_func(self, file_name=file_name): + self._do_kat_aes_test(file_name) + setattr(NistCbcVectors, "test_AES_" + file_name, new_func) + +for file_name in nist_aes_mct_files: + def new_func(self, file_name=file_name): + self._do_mct_aes_test(file_name) + setattr(NistCbcVectors, "test_AES_" + file_name, new_func) +del file_name, new_func + +nist_tdes_files = ( + "TCBCMMT2.rsp", # 2TDES + "TCBCMMT3.rsp", # 3TDES + "TCBCinvperm.rsp", # Single DES + "TCBCpermop.rsp", + "TCBCsubtab.rsp", + "TCBCvarkey.rsp", + "TCBCvartext.rsp", + ) + +for file_name in nist_tdes_files: + def new_func(self, file_name=file_name): + self._do_tdes_test(file_name) + setattr(NistCbcVectors, "test_TDES_" + file_name, new_func) + +# END OF NIST CBC TEST VECTORS + + +class SP800TestVectors(unittest.TestCase): + """Class exercising the CBC test vectors found in Section F.2 + of NIST SP 800-3A""" + + def test_aes_128(self): + key = '2b7e151628aed2a6abf7158809cf4f3c' + iv = '000102030405060708090a0b0c0d0e0f' + plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ + 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ + '30c81c46a35ce411e5fbc1191a0a52ef' +\ + 'f69f2445df4f9b17ad2b417be66c3710' + ciphertext = '7649abac8119b246cee98e9b12e9197d' +\ + '5086cb9b507219ee95db113a917678b2' +\ + '73bed6b8e3c1743b7116e69e22229516' +\ + '3ff1caa1681fac09120eca307586e1a7' + + key = unhexlify(key) + iv = unhexlify(iv) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_CBC, iv) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_CBC, iv) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + def test_aes_192(self): + key = '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b' + iv = '000102030405060708090a0b0c0d0e0f' + plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ + 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ + '30c81c46a35ce411e5fbc1191a0a52ef' +\ + 'f69f2445df4f9b17ad2b417be66c3710' + ciphertext = '4f021db243bc633d7178183a9fa071e8' +\ + 'b4d9ada9ad7dedf4e5e738763f69145a' +\ + '571b242012fb7ae07fa9baac3df102e0' +\ + '08b0e27988598881d920a9e64f5615cd' + + key = unhexlify(key) + iv = unhexlify(iv) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_CBC, iv) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_CBC, iv) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + def test_aes_256(self): + key = '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4' + iv = '000102030405060708090a0b0c0d0e0f' + plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ + 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ + '30c81c46a35ce411e5fbc1191a0a52ef' +\ + 'f69f2445df4f9b17ad2b417be66c3710' + ciphertext = 'f58c4c04d6e5f1ba779eabfb5f7bfbd6' +\ + '9cfc4e967edb808d679f777bc6702c7d' +\ + '39f23369a9d9bacfa530e26304231461' +\ + 'b2eb05e2c39be9fcda6c19078c6a9d1b' + + key = unhexlify(key) + iv = unhexlify(iv) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_CBC, iv) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_CBC, iv) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(CbcTests) + if config.get('slow_tests'): + tests += list_test_cases(NistCbcVectors) + tests += list_test_cases(SP800TestVectors) + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_CCM.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_CCM.py new file mode 100644 index 000000000..e8ebc0b1e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_CCM.py @@ -0,0 +1,936 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors_wycheproof +from Crypto.Util.py3compat import tobytes, bchr +from Crypto.Cipher import AES +from Crypto.Hash import SHAKE128 + +from Crypto.Util.strxor import strxor + + +def get_tag_random(tag, length): + return SHAKE128.new(data=tobytes(tag)).read(length) + + +class CcmTests(unittest.TestCase): + + key_128 = get_tag_random("key_128", 16) + nonce_96 = get_tag_random("nonce_128", 12) + data = get_tag_random("data", 128) + + def test_loopback_128(self): + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + pt = get_tag_random("plaintext", 16 * 100) + ct = cipher.encrypt(pt) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + pt2 = cipher.decrypt(ct) + self.assertEqual(pt, pt2) + + def test_nonce(self): + # If not passed, the nonce is created randomly + cipher = AES.new(self.key_128, AES.MODE_CCM) + nonce1 = cipher.nonce + cipher = AES.new(self.key_128, AES.MODE_CCM) + nonce2 = cipher.nonce + self.assertEqual(len(nonce1), 11) + self.assertNotEqual(nonce1, nonce2) + + cipher = AES.new(self.key_128, AES.MODE_CCM, self.nonce_96) + ct = cipher.encrypt(self.data) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + self.assertEqual(ct, cipher.encrypt(self.data)) + + def test_nonce_must_be_bytes(self): + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_CCM, + nonce=u'test12345678') + + def test_nonce_length(self): + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CCM, + nonce=b"") + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CCM, + nonce=bchr(1) * 6) + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CCM, + nonce=bchr(1) * 14) + for x in range(7, 13 + 1): + AES.new(self.key_128, AES.MODE_CCM, nonce=bchr(1) * x) + + def test_block_size(self): + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + self.assertEqual(cipher.block_size, AES.block_size) + + def test_nonce_attribute(self): + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + self.assertEqual(cipher.nonce, self.nonce_96) + + # By default, a 11 bytes long nonce is randomly generated + nonce1 = AES.new(self.key_128, AES.MODE_CCM).nonce + nonce2 = AES.new(self.key_128, AES.MODE_CCM).nonce + self.assertEqual(len(nonce1), 11) + self.assertNotEqual(nonce1, nonce2) + + def test_unknown_parameters(self): + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_CCM, + self.nonce_96, 7) + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_CCM, + nonce=self.nonce_96, unknown=7) + + # But some are only known by the base cipher + # (e.g. use_aesni consumed by the AES module) + AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + use_aesni=False) + + def test_null_encryption_decryption(self): + for func in "encrypt", "decrypt": + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + result = getattr(cipher, func)(b"") + self.assertEqual(result, b"") + + def test_either_encrypt_or_decrypt(self): + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.encrypt(b"") + self.assertRaises(TypeError, cipher.decrypt, b"") + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.decrypt(b"") + self.assertRaises(TypeError, cipher.encrypt, b"") + + def test_data_must_be_bytes(self): + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.encrypt, u'test1234567890-*') + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.decrypt, u'test1234567890-*') + + def test_mac_len(self): + # Invalid MAC length + for mac_len in range(3, 17 + 1, 2): + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CCM, + nonce=self.nonce_96, mac_len=mac_len) + + # Valid MAC length + for mac_len in range(4, 16 + 1, 2): + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + mac_len=mac_len) + _, mac = cipher.encrypt_and_digest(self.data) + self.assertEqual(len(mac), mac_len) + + # Default MAC length + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + _, mac = cipher.encrypt_and_digest(self.data) + self.assertEqual(len(mac), 16) + + def test_invalid_mac(self): + from Crypto.Util.strxor import strxor_c + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + ct, mac = cipher.encrypt_and_digest(self.data) + + invalid_mac = strxor_c(mac, 0x01) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.decrypt_and_verify, ct, + invalid_mac) + + def test_hex_mac(self): + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + mac_hex = cipher.hexdigest() + self.assertEqual(cipher.digest(), unhexlify(mac_hex)) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.hexverify(mac_hex) + + def test_longer_assoc_data_than_declared(self): + # More than zero + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + assoc_len=0) + self.assertRaises(ValueError, cipher.update, b"1") + + # Too large + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + assoc_len=15) + self.assertRaises(ValueError, cipher.update, self.data) + + def test_shorter_assoc_data_than_expected(self): + DATA_LEN = len(self.data) + + # With plaintext + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + assoc_len=DATA_LEN + 1) + cipher.update(self.data) + self.assertRaises(ValueError, cipher.encrypt, self.data) + + # With empty plaintext + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + assoc_len=DATA_LEN + 1) + cipher.update(self.data) + self.assertRaises(ValueError, cipher.digest) + + # With ciphertext + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + assoc_len=DATA_LEN + 1) + cipher.update(self.data) + self.assertRaises(ValueError, cipher.decrypt, self.data) + + # With empty ciphertext + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.update(self.data) + mac = cipher.digest() + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + assoc_len=DATA_LEN + 1) + cipher.update(self.data) + self.assertRaises(ValueError, cipher.verify, mac) + + def test_shorter_and_longer_plaintext_than_declared(self): + DATA_LEN = len(self.data) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + msg_len=DATA_LEN + 1) + cipher.encrypt(self.data) + self.assertRaises(ValueError, cipher.digest) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + msg_len=DATA_LEN - 1) + self.assertRaises(ValueError, cipher.encrypt, self.data) + + def test_shorter_ciphertext_than_declared(self): + DATA_LEN = len(self.data) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + ct, mac = cipher.encrypt_and_digest(self.data) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + msg_len=DATA_LEN + 1) + cipher.decrypt(ct) + self.assertRaises(ValueError, cipher.verify, mac) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + msg_len=DATA_LEN - 1) + self.assertRaises(ValueError, cipher.decrypt, ct) + + def test_message_chunks(self): + # Validate that both associated data and plaintext/ciphertext + # can be broken up in chunks of arbitrary length + + auth_data = get_tag_random("authenticated data", 127) + plaintext = get_tag_random("plaintext", 127) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.update(auth_data) + ciphertext, ref_mac = cipher.encrypt_and_digest(plaintext) + + def break_up(data, chunk_length): + return [data[i:i+chunk_length] for i in range(0, len(data), + chunk_length)] + + # Encryption + for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + msg_len=127, assoc_len=127) + + for chunk in break_up(auth_data, chunk_length): + cipher.update(chunk) + pt2 = b"" + for chunk in break_up(ciphertext, chunk_length): + pt2 += cipher.decrypt(chunk) + self.assertEqual(plaintext, pt2) + cipher.verify(ref_mac) + + # Decryption + for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96, + msg_len=127, assoc_len=127) + + for chunk in break_up(auth_data, chunk_length): + cipher.update(chunk) + ct2 = b"" + for chunk in break_up(plaintext, chunk_length): + ct2 += cipher.encrypt(chunk) + self.assertEqual(ciphertext, ct2) + self.assertEqual(cipher.digest(), ref_mac) + + def test_bytearray(self): + + # Encrypt + key_ba = bytearray(self.key_128) + nonce_ba = bytearray(self.nonce_96) + header_ba = bytearray(self.data) + data_ba = bytearray(self.data) + + cipher1 = AES.new(self.key_128, + AES.MODE_CCM, + nonce=self.nonce_96) + cipher1.update(self.data) + ct = cipher1.encrypt(self.data) + tag = cipher1.digest() + + cipher2 = AES.new(key_ba, + AES.MODE_CCM, + nonce=nonce_ba) + key_ba[:3] = b"\xFF\xFF\xFF" + nonce_ba[:3] = b"\xFF\xFF\xFF" + cipher2.update(header_ba) + header_ba[:3] = b"\xFF\xFF\xFF" + ct_test = cipher2.encrypt(data_ba) + data_ba[:3] = b"\xFF\xFF\xFF" + tag_test = cipher2.digest() + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + key_ba = bytearray(self.key_128) + nonce_ba = bytearray(self.nonce_96) + header_ba = bytearray(self.data) + del data_ba + + cipher4 = AES.new(key_ba, + AES.MODE_CCM, + nonce=nonce_ba) + key_ba[:3] = b"\xFF\xFF\xFF" + nonce_ba[:3] = b"\xFF\xFF\xFF" + cipher4.update(header_ba) + header_ba[:3] = b"\xFF\xFF\xFF" + pt_test = cipher4.decrypt_and_verify(bytearray(ct_test), bytearray(tag_test)) + + self.assertEqual(self.data, pt_test) + + def test_memoryview(self): + + # Encrypt + key_mv = memoryview(bytearray(self.key_128)) + nonce_mv = memoryview(bytearray(self.nonce_96)) + header_mv = memoryview(bytearray(self.data)) + data_mv = memoryview(bytearray(self.data)) + + cipher1 = AES.new(self.key_128, + AES.MODE_CCM, + nonce=self.nonce_96) + cipher1.update(self.data) + ct = cipher1.encrypt(self.data) + tag = cipher1.digest() + + cipher2 = AES.new(key_mv, + AES.MODE_CCM, + nonce=nonce_mv) + key_mv[:3] = b"\xFF\xFF\xFF" + nonce_mv[:3] = b"\xFF\xFF\xFF" + cipher2.update(header_mv) + header_mv[:3] = b"\xFF\xFF\xFF" + ct_test = cipher2.encrypt(data_mv) + data_mv[:3] = b"\xFF\xFF\xFF" + tag_test = cipher2.digest() + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + key_mv = memoryview(bytearray(self.key_128)) + nonce_mv = memoryview(bytearray(self.nonce_96)) + header_mv = memoryview(bytearray(self.data)) + del data_mv + + cipher4 = AES.new(key_mv, + AES.MODE_CCM, + nonce=nonce_mv) + key_mv[:3] = b"\xFF\xFF\xFF" + nonce_mv[:3] = b"\xFF\xFF\xFF" + cipher4.update(header_mv) + header_mv[:3] = b"\xFF\xFF\xFF" + pt_test = cipher4.decrypt_and_verify(memoryview(ct_test), memoryview(tag_test)) + + self.assertEqual(self.data, pt_test) + + def test_output_param(self): + + pt = b'5' * 128 + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + ct = cipher.encrypt(pt) + tag = cipher.digest() + + output = bytearray(128) + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + res, tag_out = cipher.encrypt_and_digest(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + self.assertEqual(tag, tag_out) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + res = cipher.decrypt_and_verify(ct, tag, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + def test_output_param_memoryview(self): + + pt = b'5' * 128 + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + ct = cipher.encrypt(pt) + + output = memoryview(bytearray(128)) + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + def test_output_param_neg(self): + + pt = b'5' * 16 + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + ct = cipher.encrypt(pt) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) + + shorter_output = bytearray(15) + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +class CcmFSMTests(unittest.TestCase): + + key_128 = get_tag_random("key_128", 16) + nonce_96 = get_tag_random("nonce_128", 12) + data = get_tag_random("data", 16) + + def test_valid_init_encrypt_decrypt_digest_verify(self): + # No authenticated data, fixed plaintext + for assoc_len in (None, 0): + for msg_len in (None, len(self.data)): + # Verify path INIT->ENCRYPT->DIGEST + cipher = AES.new(self.key_128, AES.MODE_CCM, + nonce=self.nonce_96, + assoc_len=assoc_len, + msg_len=msg_len) + ct = cipher.encrypt(self.data) + mac = cipher.digest() + + # Verify path INIT->DECRYPT->VERIFY + cipher = AES.new(self.key_128, AES.MODE_CCM, + nonce=self.nonce_96, + assoc_len=assoc_len, + msg_len=msg_len) + cipher.decrypt(ct) + cipher.verify(mac) + + def test_valid_init_update_digest_verify(self): + # No plaintext, fixed authenticated data + for assoc_len in (None, len(self.data)): + for msg_len in (None, 0): + # Verify path INIT->UPDATE->DIGEST + cipher = AES.new(self.key_128, AES.MODE_CCM, + nonce=self.nonce_96, + assoc_len=assoc_len, + msg_len=msg_len) + cipher.update(self.data) + mac = cipher.digest() + + # Verify path INIT->UPDATE->VERIFY + cipher = AES.new(self.key_128, AES.MODE_CCM, + nonce=self.nonce_96, + assoc_len=assoc_len, + msg_len=msg_len) + cipher.update(self.data) + cipher.verify(mac) + + def test_valid_full_path(self): + # Fixed authenticated data, fixed plaintext + for assoc_len in (None, len(self.data)): + for msg_len in (None, len(self.data)): + # Verify path INIT->UPDATE->ENCRYPT->DIGEST + cipher = AES.new(self.key_128, AES.MODE_CCM, + nonce=self.nonce_96, + assoc_len=assoc_len, + msg_len=msg_len) + cipher.update(self.data) + ct = cipher.encrypt(self.data) + mac = cipher.digest() + + # Verify path INIT->UPDATE->DECRYPT->VERIFY + cipher = AES.new(self.key_128, AES.MODE_CCM, + nonce=self.nonce_96, + assoc_len=assoc_len, + msg_len=msg_len) + cipher.update(self.data) + cipher.decrypt(ct) + cipher.verify(mac) + + def test_valid_init_digest(self): + # Verify path INIT->DIGEST + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.digest() + + def test_valid_init_verify(self): + # Verify path INIT->VERIFY + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + mac = cipher.digest() + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.verify(mac) + + def test_valid_multiple_encrypt_or_decrypt(self): + # Only possible if msg_len is declared in advance + for method_name in "encrypt", "decrypt": + for auth_data in (None, b"333", self.data, + self.data + b"3"): + if auth_data is None: + assoc_len = None + else: + assoc_len = len(auth_data) + cipher = AES.new(self.key_128, AES.MODE_CCM, + nonce=self.nonce_96, + msg_len=64, + assoc_len=assoc_len) + if auth_data is not None: + cipher.update(auth_data) + method = getattr(cipher, method_name) + method(self.data) + method(self.data) + method(self.data) + method(self.data) + + def test_valid_multiple_digest_or_verify(self): + # Multiple calls to digest + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.update(self.data) + first_mac = cipher.digest() + for x in range(4): + self.assertEqual(first_mac, cipher.digest()) + + # Multiple calls to verify + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.update(self.data) + for x in range(5): + cipher.verify(first_mac) + + def test_valid_encrypt_and_digest_decrypt_and_verify(self): + # encrypt_and_digest + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.update(self.data) + ct, mac = cipher.encrypt_and_digest(self.data) + + # decrypt_and_verify + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.update(self.data) + pt = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(self.data, pt) + + def test_invalid_multiple_encrypt_decrypt_without_msg_len(self): + # Once per method, with or without assoc. data + for method_name in "encrypt", "decrypt": + for assoc_data_present in (True, False): + cipher = AES.new(self.key_128, AES.MODE_CCM, + nonce=self.nonce_96) + if assoc_data_present: + cipher.update(self.data) + method = getattr(cipher, method_name) + method(self.data) + self.assertRaises(TypeError, method, self.data) + + def test_invalid_mixing_encrypt_decrypt(self): + # Once per method, with or without assoc. data + for method1_name, method2_name in (("encrypt", "decrypt"), + ("decrypt", "encrypt")): + for assoc_data_present in (True, False): + cipher = AES.new(self.key_128, AES.MODE_CCM, + nonce=self.nonce_96, + msg_len=32) + if assoc_data_present: + cipher.update(self.data) + getattr(cipher, method1_name)(self.data) + self.assertRaises(TypeError, getattr(cipher, method2_name), + self.data) + + def test_invalid_encrypt_or_update_after_digest(self): + for method_name in "encrypt", "update": + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.encrypt(self.data) + cipher.digest() + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.encrypt_and_digest(self.data) + + def test_invalid_decrypt_or_update_after_verify(self): + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + ct = cipher.encrypt(self.data) + mac = cipher.digest() + + for method_name in "decrypt", "update": + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.decrypt(ct) + cipher.verify(mac) + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data) + + cipher = AES.new(self.key_128, AES.MODE_CCM, nonce=self.nonce_96) + cipher.decrypt_and_verify(ct, mac) + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data) + + +class TestVectors(unittest.TestCase): + """Class exercising the CCM test vectors found in Appendix C + of NIST SP 800-38C and in RFC 3610""" + + # List of test vectors, each made up of: + # - authenticated data + # - plaintext + # - ciphertext + # - MAC + # - AES key + # - nonce + test_vectors_hex = [ + # NIST SP 800 38C + ( '0001020304050607', + '20212223', + '7162015b', + '4dac255d', + '404142434445464748494a4b4c4d4e4f', + '10111213141516'), + ( '000102030405060708090a0b0c0d0e0f', + '202122232425262728292a2b2c2d2e2f', + 'd2a1f0e051ea5f62081a7792073d593d', + '1fc64fbfaccd', + '404142434445464748494a4b4c4d4e4f', + '1011121314151617'), + ( '000102030405060708090a0b0c0d0e0f10111213', + '202122232425262728292a2b2c2d2e2f3031323334353637', + 'e3b201a9f5b71a7a9b1ceaeccd97e70b6176aad9a4428aa5', + '484392fbc1b09951', + '404142434445464748494a4b4c4d4e4f', + '101112131415161718191a1b'), + ( (''.join(["%02X" % (x*16+y) for x in range(0,16) for y in range(0,16)]))*256, + '202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f', + '69915dad1e84c6376a68c2967e4dab615ae0fd1faec44cc484828529463ccf72', + 'b4ac6bec93e8598e7f0dadbcea5b', + '404142434445464748494a4b4c4d4e4f', + '101112131415161718191a1b1c'), + # RFC3610 + ( '0001020304050607', + '08090a0b0c0d0e0f101112131415161718191a1b1c1d1e', + '588c979a61c663d2f066d0c2c0f989806d5f6b61dac384', + '17e8d12cfdf926e0', + 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', + '00000003020100a0a1a2a3a4a5'), + ( + '0001020304050607', + '08090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', + '72c91a36e135f8cf291ca894085c87e3cc15c439c9e43a3b', + 'a091d56e10400916', + 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', + '00000004030201a0a1a2a3a4a5'), + ( '0001020304050607', + '08090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20', + '51b1e5f44a197d1da46b0f8e2d282ae871e838bb64da859657', + '4adaa76fbd9fb0c5', + 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', + '00000005040302A0A1A2A3A4A5'), + ( '000102030405060708090a0b', + '0c0d0e0f101112131415161718191a1b1c1d1e', + 'a28c6865939a9a79faaa5c4c2a9d4a91cdac8c', + '96c861b9c9e61ef1', + 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', + '00000006050403a0a1a2a3a4a5'), + ( '000102030405060708090a0b', + '0c0d0e0f101112131415161718191a1b1c1d1e1f', + 'dcf1fb7b5d9e23fb9d4e131253658ad86ebdca3e', + '51e83f077d9c2d93', + 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', + '00000007060504a0a1a2a3a4a5'), + ( '000102030405060708090a0b', + '0c0d0e0f101112131415161718191a1b1c1d1e1f20', + '6fc1b011f006568b5171a42d953d469b2570a4bd87', + '405a0443ac91cb94', + 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', + '00000008070605a0a1a2a3a4a5'), + ( '0001020304050607', + '08090a0b0c0d0e0f101112131415161718191a1b1c1d1e', + '0135d1b2c95f41d5d1d4fec185d166b8094e999dfed96c', + '048c56602c97acbb7490', + 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', + '00000009080706a0a1a2a3a4a5'), + ( '0001020304050607', + '08090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', + '7b75399ac0831dd2f0bbd75879a2fd8f6cae6b6cd9b7db24', + 'c17b4433f434963f34b4', + 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', + '0000000a090807a0a1a2a3a4a5'), + ( '0001020304050607', + '08090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20', + '82531a60cc24945a4b8279181ab5c84df21ce7f9b73f42e197', + 'ea9c07e56b5eb17e5f4e', + 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', + '0000000b0a0908a0a1a2a3a4a5'), + ( '000102030405060708090a0b', + '0c0d0e0f101112131415161718191a1b1c1d1e', + '07342594157785152b074098330abb141b947b', + '566aa9406b4d999988dd', + 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', + '0000000c0b0a09a0a1a2a3a4a5'), + ( '000102030405060708090a0b', + '0c0d0e0f101112131415161718191a1b1c1d1e1f', + '676bb20380b0e301e8ab79590a396da78b834934', + 'f53aa2e9107a8b6c022c', + 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', + '0000000d0c0b0aa0a1a2a3a4a5'), + ( '000102030405060708090a0b', + '0c0d0e0f101112131415161718191a1b1c1d1e1f20', + 'c0ffa0d6f05bdb67f24d43a4338d2aa4bed7b20e43', + 'cd1aa31662e7ad65d6db', + 'c0c1c2c3c4c5c6c7c8c9cacbcccdcecf', + '0000000e0d0c0ba0a1a2a3a4a5'), + ( '0be1a88bace018b1', + '08e8cf97d820ea258460e96ad9cf5289054d895ceac47c', + '4cb97f86a2a4689a877947ab8091ef5386a6ffbdd080f8', + 'e78cf7cb0cddd7b3', + 'd7828d13b2b0bdc325a76236df93cc6b', + '00412b4ea9cdbe3c9696766cfa'), + ( '63018f76dc8a1bcb', + '9020ea6f91bdd85afa0039ba4baff9bfb79c7028949cd0ec', + '4ccb1e7ca981befaa0726c55d378061298c85c92814abc33', + 'c52ee81d7d77c08a', + 'd7828d13b2b0bdc325a76236df93cc6b', + '0033568ef7b2633c9696766cfa'), + ( 'aa6cfa36cae86b40', + 'b916e0eacc1c00d7dcec68ec0b3bbb1a02de8a2d1aa346132e', + 'b1d23a2220ddc0ac900d9aa03c61fcf4a559a4417767089708', + 'a776796edb723506', + 'd7828d13b2b0bdc325a76236df93cc6b', + '00103fe41336713c9696766cfa'), + ( 'd0d0735c531e1becf049c244', + '12daac5630efa5396f770ce1a66b21f7b2101c', + '14d253c3967b70609b7cbb7c49916028324526', + '9a6f49975bcadeaf', + 'd7828d13b2b0bdc325a76236df93cc6b', + '00764c63b8058e3c9696766cfa'), + ( '77b60f011c03e1525899bcae', + 'e88b6a46c78d63e52eb8c546efb5de6f75e9cc0d', + '5545ff1a085ee2efbf52b2e04bee1e2336c73e3f', + '762c0c7744fe7e3c', + 'd7828d13b2b0bdc325a76236df93cc6b', + '00f8b678094e3b3c9696766cfa'), + ( 'cd9044d2b71fdb8120ea60c0', + '6435acbafb11a82e2f071d7ca4a5ebd93a803ba87f', + '009769ecabdf48625594c59251e6035722675e04c8', + '47099e5ae0704551', + 'd7828d13b2b0bdc325a76236df93cc6b', + '00d560912d3f703c9696766cfa'), + ( 'd85bc7e69f944fb8', + '8a19b950bcf71a018e5e6701c91787659809d67dbedd18', + 'bc218daa947427b6db386a99ac1aef23ade0b52939cb6a', + '637cf9bec2408897c6ba', + 'd7828d13b2b0bdc325a76236df93cc6b', + '0042fff8f1951c3c9696766cfa'), + ( '74a0ebc9069f5b37', + '1761433c37c5a35fc1f39f406302eb907c6163be38c98437', + '5810e6fd25874022e80361a478e3e9cf484ab04f447efff6', + 'f0a477cc2fc9bf548944', + 'd7828d13b2b0bdc325a76236df93cc6b', + '00920f40e56cdc3c9696766cfa'), + ( '44a3aa3aae6475ca', + 'a434a8e58500c6e41530538862d686ea9e81301b5ae4226bfa', + 'f2beed7bc5098e83feb5b31608f8e29c38819a89c8e776f154', + '4d4151a4ed3a8b87b9ce', + 'd7828d13b2b0bdc325a76236df93cc6b', + '0027ca0c7120bc3c9696766cfa'), + ( 'ec46bb63b02520c33c49fd70', + 'b96b49e21d621741632875db7f6c9243d2d7c2', + '31d750a09da3ed7fddd49a2032aabf17ec8ebf', + '7d22c8088c666be5c197', + 'd7828d13b2b0bdc325a76236df93cc6b', + '005b8ccbcd9af83c9696766cfa'), + ( '47a65ac78b3d594227e85e71', + 'e2fcfbb880442c731bf95167c8ffd7895e337076', + 'e882f1dbd38ce3eda7c23f04dd65071eb41342ac', + 'df7e00dccec7ae52987d', + 'd7828d13b2b0bdc325a76236df93cc6b', + '003ebe94044b9a3c9696766cfa'), + ( '6e37a6ef546d955d34ab6059', + 'abf21c0b02feb88f856df4a37381bce3cc128517d4', + 'f32905b88a641b04b9c9ffb58cc390900f3da12ab1', + '6dce9e82efa16da62059', + 'd7828d13b2b0bdc325a76236df93cc6b', + '008d493b30ae8b3c9696766cfa'), + ] + + test_vectors = [[unhexlify(x) for x in tv] for tv in test_vectors_hex] + + def runTest(self): + for assoc_data, pt, ct, mac, key, nonce in self.test_vectors: + # Encrypt + cipher = AES.new(key, AES.MODE_CCM, nonce, mac_len=len(mac)) + cipher.update(assoc_data) + ct2, mac2 = cipher.encrypt_and_digest(pt) + self.assertEqual(ct, ct2) + self.assertEqual(mac, mac2) + + # Decrypt + cipher = AES.new(key, AES.MODE_CCM, nonce, mac_len=len(mac)) + cipher.update(assoc_data) + pt2 = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(pt, pt2) + + +class TestVectorsWycheproof(unittest.TestCase): + + def __init__(self, wycheproof_warnings, **extra_params): + unittest.TestCase.__init__(self) + self._wycheproof_warnings = wycheproof_warnings + self._extra_params = extra_params + self._id = "None" + + def setUp(self): + + def filter_tag(group): + return group['tagSize'] // 8 + + self.tv = load_test_vectors_wycheproof(("Cipher", "wycheproof"), + "aes_ccm_test.json", + "Wycheproof AES CCM", + group_tag={'tag_size': filter_tag}) + + def shortDescription(self): + return self._id + + def warn(self, tv): + if tv.warning and self._wycheproof_warnings: + import warnings + warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) + + def test_encrypt(self, tv): + self._id = "Wycheproof Encrypt CCM Test #" + str(tv.id) + + try: + cipher = AES.new(tv.key, AES.MODE_CCM, tv.iv, mac_len=tv.tag_size, + **self._extra_params) + except ValueError as e: + if len(tv.iv) not in range(7, 13 + 1, 2) and "Length of parameter 'nonce'" in str(e): + assert not tv.valid + return + if tv.tag_size not in range(4, 16 + 1, 2) and "Parameter 'mac_len'" in str(e): + assert not tv.valid + return + raise e + + cipher.update(tv.aad) + ct, tag = cipher.encrypt_and_digest(tv.msg) + if tv.valid: + self.assertEqual(ct, tv.ct) + self.assertEqual(tag, tv.tag) + self.warn(tv) + + def test_decrypt(self, tv): + self._id = "Wycheproof Decrypt CCM Test #" + str(tv.id) + + try: + cipher = AES.new(tv.key, AES.MODE_CCM, tv.iv, mac_len=tv.tag_size, + **self._extra_params) + except ValueError as e: + if len(tv.iv) not in range(7, 13 + 1, 2) and "Length of parameter 'nonce'" in str(e): + assert not tv.valid + return + if tv.tag_size not in range(4, 16 + 1, 2) and "Parameter 'mac_len'" in str(e): + assert not tv.valid + return + raise e + + cipher.update(tv.aad) + try: + pt = cipher.decrypt_and_verify(tv.ct, tv.tag) + except ValueError: + assert not tv.valid + else: + assert tv.valid + self.assertEqual(pt, tv.msg) + self.warn(tv) + + def test_corrupt_decrypt(self, tv): + self._id = "Wycheproof Corrupt Decrypt CCM Test #" + str(tv.id) + if len(tv.iv) not in range(7, 13 + 1, 2) or len(tv.ct) == 0: + return + cipher = AES.new(tv.key, AES.MODE_CCM, tv.iv, mac_len=tv.tag_size, + **self._extra_params) + cipher.update(tv.aad) + ct_corrupt = strxor(tv.ct, b"\x00" * (len(tv.ct) - 1) + b"\x01") + self.assertRaises(ValueError, cipher.decrypt_and_verify, ct_corrupt, tv.tag) + + def runTest(self): + + for tv in self.tv: + self.test_encrypt(tv) + self.test_decrypt(tv) + self.test_corrupt_decrypt(tv) + + +def get_tests(config={}): + wycheproof_warnings = config.get('wycheproof_warnings') + + tests = [] + tests += list_test_cases(CcmTests) + tests += list_test_cases(CcmFSMTests) + tests += [TestVectors()] + tests += [TestVectorsWycheproof(wycheproof_warnings)] + + return tests + + +if __name__ == '__main__': + def suite(): + unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_CFB.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_CFB.py new file mode 100644 index 000000000..cb0c35295 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_CFB.py @@ -0,0 +1,411 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.loader import load_test_vectors +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Util.py3compat import tobytes, is_string +from Crypto.Cipher import AES, DES3, DES +from Crypto.Hash import SHAKE128 + +from Crypto.SelfTest.Cipher.test_CBC import BlockChainingTests + + +def get_tag_random(tag, length): + return SHAKE128.new(data=tobytes(tag)).read(length) + + +class CfbTests(BlockChainingTests): + + aes_mode = AES.MODE_CFB + des3_mode = DES3.MODE_CFB + + # Redefine test_unaligned_data_128/64 + + def test_unaligned_data_128(self): + plaintexts = [ b"7777777" ] * 100 + + cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=8) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=8) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=128) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=128) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + def test_unaligned_data_64(self): + plaintexts = [ b"7777777" ] * 100 + cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=8) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=8) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=64) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=64) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + # Extra + + def test_segment_size_128(self): + for bits in range(8, 129, 8): + cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, + segment_size=bits) + + for bits in 0, 7, 9, 127, 129: + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CFB, + self.iv_128, + segment_size=bits) + + def test_segment_size_64(self): + for bits in range(8, 65, 8): + cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, + segment_size=bits) + + for bits in 0, 7, 9, 63, 65: + self.assertRaises(ValueError, DES3.new, self.key_192, AES.MODE_CFB, + self.iv_64, + segment_size=bits) + + +class NistCfbVectors(unittest.TestCase): + + def _do_kat_aes_test(self, file_name, segment_size): + + test_vectors = load_test_vectors(("Cipher", "AES"), + file_name, + "AES CFB%d KAT" % segment_size, + { "count" : lambda x: int(x) } ) + if test_vectors is None: + return + + direction = None + for tv in test_vectors: + + # The test vector file contains some directive lines + if is_string(tv): + direction = tv + continue + + self.description = tv.desc + cipher = AES.new(tv.key, AES.MODE_CFB, tv.iv, + segment_size=segment_size) + if direction == "[ENCRYPT]": + self.assertEqual(cipher.encrypt(tv.plaintext), tv.ciphertext) + elif direction == "[DECRYPT]": + self.assertEqual(cipher.decrypt(tv.ciphertext), tv.plaintext) + else: + assert False + + # See Section 6.4.5 in AESAVS + def _do_mct_aes_test(self, file_name, segment_size): + + test_vectors = load_test_vectors(("Cipher", "AES"), + file_name, + "AES CFB%d Montecarlo" % segment_size, + { "count" : lambda x: int(x) } ) + if test_vectors is None: + return + + assert(segment_size in (8, 128)) + + direction = None + for tv in test_vectors: + + # The test vector file contains some directive lines + if is_string(tv): + direction = tv + continue + + self.description = tv.desc + cipher = AES.new(tv.key, AES.MODE_CFB, tv.iv, + segment_size=segment_size) + + def get_input(input_text, output_seq, j): + # CFB128 + if segment_size == 128: + if j >= 2: + return output_seq[-2] + return [input_text, tv.iv][j] + # CFB8 + if j == 0: + return input_text + elif j <= 16: + return tv.iv[j - 1:j] + return output_seq[j - 17] + + if direction == '[ENCRYPT]': + cts = [] + for j in range(1000): + plaintext = get_input(tv.plaintext, cts, j) + cts.append(cipher.encrypt(plaintext)) + self.assertEqual(cts[-1], tv.ciphertext) + elif direction == '[DECRYPT]': + pts = [] + for j in range(1000): + ciphertext = get_input(tv.ciphertext, pts, j) + pts.append(cipher.decrypt(ciphertext)) + self.assertEqual(pts[-1], tv.plaintext) + else: + assert False + + def _do_tdes_test(self, file_name, segment_size): + + test_vectors = load_test_vectors(("Cipher", "TDES"), + file_name, + "TDES CFB%d KAT" % segment_size, + { "count" : lambda x: int(x) } ) + if test_vectors is None: + return + + direction = None + for tv in test_vectors: + + # The test vector file contains some directive lines + if is_string(tv): + direction = tv + continue + + self.description = tv.desc + if hasattr(tv, "keys"): + cipher = DES.new(tv.keys, DES.MODE_CFB, tv.iv, + segment_size=segment_size) + else: + if tv.key1 != tv.key3: + key = tv.key1 + tv.key2 + tv.key3 # Option 3 + else: + key = tv.key1 + tv.key2 # Option 2 + cipher = DES3.new(key, DES3.MODE_CFB, tv.iv, + segment_size=segment_size) + if direction == "[ENCRYPT]": + self.assertEqual(cipher.encrypt(tv.plaintext), tv.ciphertext) + elif direction == "[DECRYPT]": + self.assertEqual(cipher.decrypt(tv.ciphertext), tv.plaintext) + else: + assert False + + +# Create one test method per file +nist_aes_kat_mmt_files = ( + # KAT + "CFB?GFSbox128.rsp", + "CFB?GFSbox192.rsp", + "CFB?GFSbox256.rsp", + "CFB?KeySbox128.rsp", + "CFB?KeySbox192.rsp", + "CFB?KeySbox256.rsp", + "CFB?VarKey128.rsp", + "CFB?VarKey192.rsp", + "CFB?VarKey256.rsp", + "CFB?VarTxt128.rsp", + "CFB?VarTxt192.rsp", + "CFB?VarTxt256.rsp", + # MMT + "CFB?MMT128.rsp", + "CFB?MMT192.rsp", + "CFB?MMT256.rsp", + ) +nist_aes_mct_files = ( + "CFB?MCT128.rsp", + "CFB?MCT192.rsp", + "CFB?MCT256.rsp", + ) + +for file_gen_name in nist_aes_kat_mmt_files: + for bits in "8", "128": + file_name = file_gen_name.replace("?", bits) + def new_func(self, file_name=file_name, bits=bits): + self._do_kat_aes_test(file_name, int(bits)) + setattr(NistCfbVectors, "test_AES_" + file_name, new_func) + +for file_gen_name in nist_aes_mct_files: + for bits in "8", "128": + file_name = file_gen_name.replace("?", bits) + def new_func(self, file_name=file_name, bits=bits): + self._do_mct_aes_test(file_name, int(bits)) + setattr(NistCfbVectors, "test_AES_" + file_name, new_func) +del file_name, new_func + +nist_tdes_files = ( + "TCFB?MMT2.rsp", # 2TDES + "TCFB?MMT3.rsp", # 3TDES + "TCFB?invperm.rsp", # Single DES + "TCFB?permop.rsp", + "TCFB?subtab.rsp", + "TCFB?varkey.rsp", + "TCFB?vartext.rsp", + ) + +for file_gen_name in nist_tdes_files: + for bits in "8", "64": + file_name = file_gen_name.replace("?", bits) + def new_func(self, file_name=file_name, bits=bits): + self._do_tdes_test(file_name, int(bits)) + setattr(NistCfbVectors, "test_TDES_" + file_name, new_func) + +# END OF NIST CBC TEST VECTORS + + +class SP800TestVectors(unittest.TestCase): + """Class exercising the CFB test vectors found in Section F.3 + of NIST SP 800-3A""" + + def test_aes_128_cfb8(self): + plaintext = '6bc1bee22e409f96e93d7e117393172aae2d' + ciphertext = '3b79424c9c0dd436bace9e0ed4586a4f32b9' + key = '2b7e151628aed2a6abf7158809cf4f3c' + iv = '000102030405060708090a0b0c0d0e0f' + + key = unhexlify(key) + iv = unhexlify(iv) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=8) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=8) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + def test_aes_192_cfb8(self): + plaintext = '6bc1bee22e409f96e93d7e117393172aae2d' + ciphertext = 'cda2521ef0a905ca44cd057cbf0d47a0678a' + key = '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b' + iv = '000102030405060708090a0b0c0d0e0f' + + key = unhexlify(key) + iv = unhexlify(iv) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=8) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=8) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + def test_aes_256_cfb8(self): + plaintext = '6bc1bee22e409f96e93d7e117393172aae2d' + ciphertext = 'dc1f1a8520a64db55fcc8ac554844e889700' + key = '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4' + iv = '000102030405060708090a0b0c0d0e0f' + + key = unhexlify(key) + iv = unhexlify(iv) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=8) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=8) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + def test_aes_128_cfb128(self): + plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ + 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ + '30c81c46a35ce411e5fbc1191a0a52ef' +\ + 'f69f2445df4f9b17ad2b417be66c3710' + ciphertext = '3b3fd92eb72dad20333449f8e83cfb4a' +\ + 'c8a64537a0b3a93fcde3cdad9f1ce58b' +\ + '26751f67a3cbb140b1808cf187a4f4df' +\ + 'c04b05357c5d1c0eeac4c66f9ff7f2e6' + key = '2b7e151628aed2a6abf7158809cf4f3c' + iv = '000102030405060708090a0b0c0d0e0f' + + key = unhexlify(key) + iv = unhexlify(iv) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=128) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=128) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + def test_aes_192_cfb128(self): + plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ + 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ + '30c81c46a35ce411e5fbc1191a0a52ef' +\ + 'f69f2445df4f9b17ad2b417be66c3710' + ciphertext = 'cdc80d6fddf18cab34c25909c99a4174' +\ + '67ce7f7f81173621961a2b70171d3d7a' +\ + '2e1e8a1dd59b88b1c8e60fed1efac4c9' +\ + 'c05f9f9ca9834fa042ae8fba584b09ff' + key = '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b' + iv = '000102030405060708090a0b0c0d0e0f' + + key = unhexlify(key) + iv = unhexlify(iv) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=128) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=128) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + def test_aes_256_cfb128(self): + plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ + 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ + '30c81c46a35ce411e5fbc1191a0a52ef' +\ + 'f69f2445df4f9b17ad2b417be66c3710' + + ciphertext = 'dc7e84bfda79164b7ecd8486985d3860' +\ + '39ffed143b28b1c832113c6331e5407b' +\ + 'df10132415e54b92a13ed0a8267ae2f9' +\ + '75a385741ab9cef82031623d55b1e471' + key = '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4' + iv = '000102030405060708090a0b0c0d0e0f' + + key = unhexlify(key) + iv = unhexlify(iv) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=128) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_CFB, iv, segment_size=128) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(CfbTests) + if config.get('slow_tests'): + tests += list_test_cases(NistCfbVectors) + tests += list_test_cases(SP800TestVectors) + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_CTR.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_CTR.py new file mode 100644 index 000000000..6fc43ef72 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_CTR.py @@ -0,0 +1,472 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +from binascii import hexlify, unhexlify + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Util.py3compat import tobytes, bchr +from Crypto.Cipher import AES, DES3 +from Crypto.Hash import SHAKE128, SHA256 +from Crypto.Util import Counter + +def get_tag_random(tag, length): + return SHAKE128.new(data=tobytes(tag)).read(length) + +class CtrTests(unittest.TestCase): + + key_128 = get_tag_random("key_128", 16) + key_192 = get_tag_random("key_192", 24) + nonce_32 = get_tag_random("nonce_32", 4) + nonce_64 = get_tag_random("nonce_64", 8) + ctr_64 = Counter.new(32, prefix=nonce_32) + ctr_128 = Counter.new(64, prefix=nonce_64) + + def test_loopback_128(self): + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) + pt = get_tag_random("plaintext", 16 * 100) + ct = cipher.encrypt(pt) + + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) + pt2 = cipher.decrypt(ct) + self.assertEqual(pt, pt2) + + def test_loopback_64(self): + cipher = DES3.new(self.key_192, DES3.MODE_CTR, counter=self.ctr_64) + pt = get_tag_random("plaintext", 8 * 100) + ct = cipher.encrypt(pt) + + cipher = DES3.new(self.key_192, DES3.MODE_CTR, counter=self.ctr_64) + pt2 = cipher.decrypt(ct) + self.assertEqual(pt, pt2) + + def test_invalid_counter_parameter(self): + # Counter object is required for ciphers with short block size + self.assertRaises(TypeError, DES3.new, self.key_192, AES.MODE_CTR) + # Positional arguments are not allowed (Counter must be passed as + # keyword) + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_CTR, self.ctr_128) + + def test_nonce_attribute(self): + # Nonce attribute is the prefix passed to Counter (DES3) + cipher = DES3.new(self.key_192, DES3.MODE_CTR, counter=self.ctr_64) + self.assertEqual(cipher.nonce, self.nonce_32) + + # Nonce attribute is the prefix passed to Counter (AES) + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) + self.assertEqual(cipher.nonce, self.nonce_64) + + # Nonce attribute is not defined if suffix is used in Counter + counter = Counter.new(64, prefix=self.nonce_32, suffix=self.nonce_32) + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=counter) + self.assertFalse(hasattr(cipher, "nonce")) + + def test_nonce_parameter(self): + # Nonce parameter becomes nonce attribute + cipher1 = AES.new(self.key_128, AES.MODE_CTR, nonce=self.nonce_64) + self.assertEqual(cipher1.nonce, self.nonce_64) + + counter = Counter.new(64, prefix=self.nonce_64, initial_value=0) + cipher2 = AES.new(self.key_128, AES.MODE_CTR, counter=counter) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + pt = get_tag_random("plaintext", 65536) + self.assertEqual(cipher1.encrypt(pt), cipher2.encrypt(pt)) + + # Nonce is implicitly created (for AES) when no parameters are passed + nonce1 = AES.new(self.key_128, AES.MODE_CTR).nonce + nonce2 = AES.new(self.key_128, AES.MODE_CTR).nonce + self.assertNotEqual(nonce1, nonce2) + self.assertEqual(len(nonce1), 8) + + # Nonce can be zero-length + cipher = AES.new(self.key_128, AES.MODE_CTR, nonce=b"") + self.assertEqual(b"", cipher.nonce) + cipher.encrypt(b'0'*300) + + # Nonce and Counter are mutually exclusive + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_CTR, + counter=self.ctr_128, nonce=self.nonce_64) + + def test_initial_value_parameter(self): + # Test with nonce parameter + cipher1 = AES.new(self.key_128, AES.MODE_CTR, + nonce=self.nonce_64, initial_value=0xFFFF) + counter = Counter.new(64, prefix=self.nonce_64, initial_value=0xFFFF) + cipher2 = AES.new(self.key_128, AES.MODE_CTR, counter=counter) + pt = get_tag_random("plaintext", 65536) + self.assertEqual(cipher1.encrypt(pt), cipher2.encrypt(pt)) + + # Test without nonce parameter + cipher1 = AES.new(self.key_128, AES.MODE_CTR, + initial_value=0xFFFF) + counter = Counter.new(64, prefix=cipher1.nonce, initial_value=0xFFFF) + cipher2 = AES.new(self.key_128, AES.MODE_CTR, counter=counter) + pt = get_tag_random("plaintext", 65536) + self.assertEqual(cipher1.encrypt(pt), cipher2.encrypt(pt)) + + # Initial_value and Counter are mutually exclusive + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_CTR, + counter=self.ctr_128, initial_value=0) + + def test_initial_value_bytes_parameter(self): + # Same result as when passing an integer + cipher1 = AES.new(self.key_128, AES.MODE_CTR, + nonce=self.nonce_64, + initial_value=b"\x00"*6+b"\xFF\xFF") + cipher2 = AES.new(self.key_128, AES.MODE_CTR, + nonce=self.nonce_64, initial_value=0xFFFF) + pt = get_tag_random("plaintext", 65536) + self.assertEqual(cipher1.encrypt(pt), cipher2.encrypt(pt)) + + # Fail if the iv is too large + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CTR, + initial_value=b"5"*17) + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CTR, + nonce=self.nonce_64, initial_value=b"5"*9) + + # Fail if the iv is too short + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CTR, + initial_value=b"5"*15) + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CTR, + nonce=self.nonce_64, initial_value=b"5"*7) + + def test_iv_with_matching_length(self): + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CTR, + counter=Counter.new(120)) + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_CTR, + counter=Counter.new(136)) + + def test_block_size_128(self): + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) + self.assertEqual(cipher.block_size, AES.block_size) + + def test_block_size_64(self): + cipher = DES3.new(self.key_192, DES3.MODE_CTR, counter=self.ctr_64) + self.assertEqual(cipher.block_size, DES3.block_size) + + def test_unaligned_data_128(self): + plaintexts = [ b"7777777" ] * 100 + + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + def test_unaligned_data_64(self): + plaintexts = [ b"7777777" ] * 100 + cipher = DES3.new(self.key_192, AES.MODE_CTR, counter=self.ctr_64) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = DES3.new(self.key_192, AES.MODE_CTR, counter=self.ctr_64) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + cipher = DES3.new(self.key_192, AES.MODE_CTR, counter=self.ctr_64) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = DES3.new(self.key_192, AES.MODE_CTR, counter=self.ctr_64) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + def test_unknown_parameters(self): + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_CTR, + 7, counter=self.ctr_128) + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_CTR, + counter=self.ctr_128, unknown=7) + # But some are only known by the base cipher (e.g. use_aesni consumed by the AES module) + AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128, use_aesni=False) + + def test_null_encryption_decryption(self): + for func in "encrypt", "decrypt": + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) + result = getattr(cipher, func)(b"") + self.assertEqual(result, b"") + + def test_either_encrypt_or_decrypt(self): + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) + cipher.encrypt(b"") + self.assertRaises(TypeError, cipher.decrypt, b"") + + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=self.ctr_128) + cipher.decrypt(b"") + self.assertRaises(TypeError, cipher.encrypt, b"") + + def test_wrap_around(self): + # Counter is only 8 bits, so we can only encrypt/decrypt 256 blocks (=4096 bytes) + counter = Counter.new(8, prefix=bchr(9) * 15) + max_bytes = 4096 + + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=counter) + cipher.encrypt(b'9' * max_bytes) + self.assertRaises(OverflowError, cipher.encrypt, b'9') + + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=counter) + self.assertRaises(OverflowError, cipher.encrypt, b'9' * (max_bytes + 1)) + + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=counter) + cipher.decrypt(b'9' * max_bytes) + self.assertRaises(OverflowError, cipher.decrypt, b'9') + + cipher = AES.new(self.key_128, AES.MODE_CTR, counter=counter) + self.assertRaises(OverflowError, cipher.decrypt, b'9' * (max_bytes + 1)) + + def test_bytearray(self): + data = b"1" * 16 + iv = b"\x00" * 6 + b"\xFF\xFF" + + # Encrypt + cipher1 = AES.new(self.key_128, AES.MODE_CTR, + nonce=self.nonce_64, + initial_value=iv) + ref1 = cipher1.encrypt(data) + + cipher2 = AES.new(self.key_128, AES.MODE_CTR, + nonce=bytearray(self.nonce_64), + initial_value=bytearray(iv)) + ref2 = cipher2.encrypt(bytearray(data)) + + self.assertEqual(ref1, ref2) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + cipher3 = AES.new(self.key_128, AES.MODE_CTR, + nonce=self.nonce_64, + initial_value=iv) + ref3 = cipher3.decrypt(data) + + cipher4 = AES.new(self.key_128, AES.MODE_CTR, + nonce=bytearray(self.nonce_64), + initial_value=bytearray(iv)) + ref4 = cipher4.decrypt(bytearray(data)) + + self.assertEqual(ref3, ref4) + + def test_very_long_data(self): + cipher = AES.new(b'A' * 32, AES.MODE_CTR, nonce=b'') + ct = cipher.encrypt(b'B' * 1000000) + digest = SHA256.new(ct).hexdigest() + self.assertEqual(digest, "96204fc470476561a3a8f3b6fe6d24be85c87510b638142d1d0fb90989f8a6a6") + + def test_output_param(self): + + pt = b'5' * 128 + cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) + ct = cipher.encrypt(pt) + + output = bytearray(128) + cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + def test_output_param_memoryview(self): + + pt = b'5' * 128 + cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) + ct = cipher.encrypt(pt) + + output = memoryview(bytearray(128)) + cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + def test_output_param_neg(self): + LEN_PT = 128 + + pt = b'5' * LEN_PT + cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) + ct = cipher.encrypt(pt) + + cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0' * LEN_PT) + + cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0' * LEN_PT) + + shorter_output = bytearray(LEN_PT - 1) + cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + cipher = AES.new(b'4'*16, AES.MODE_CTR, nonce=self.nonce_64) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +class SP800TestVectors(unittest.TestCase): + """Class exercising the CTR test vectors found in Section F.5 + of NIST SP 800-38A""" + + def test_aes_128(self): + plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ + 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ + '30c81c46a35ce411e5fbc1191a0a52ef' +\ + 'f69f2445df4f9b17ad2b417be66c3710' + ciphertext = '874d6191b620e3261bef6864990db6ce' +\ + '9806f66b7970fdff8617187bb9fffdff' +\ + '5ae4df3edbd5d35e5b4f09020db03eab' +\ + '1e031dda2fbe03d1792170a0f3009cee' + key = '2b7e151628aed2a6abf7158809cf4f3c' + counter = Counter.new(nbits=16, + prefix=unhexlify('f0f1f2f3f4f5f6f7f8f9fafbfcfd'), + initial_value=0xfeff) + + key = unhexlify(key) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_CTR, counter=counter) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_CTR, counter=counter) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + def test_aes_192(self): + plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ + 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ + '30c81c46a35ce411e5fbc1191a0a52ef' +\ + 'f69f2445df4f9b17ad2b417be66c3710' + ciphertext = '1abc932417521ca24f2b0459fe7e6e0b' +\ + '090339ec0aa6faefd5ccc2c6f4ce8e94' +\ + '1e36b26bd1ebc670d1bd1d665620abf7' +\ + '4f78a7f6d29809585a97daec58c6b050' + key = '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b' + counter = Counter.new(nbits=16, + prefix=unhexlify('f0f1f2f3f4f5f6f7f8f9fafbfcfd'), + initial_value=0xfeff) + + key = unhexlify(key) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_CTR, counter=counter) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_CTR, counter=counter) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + def test_aes_256(self): + plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ + 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ + '30c81c46a35ce411e5fbc1191a0a52ef' +\ + 'f69f2445df4f9b17ad2b417be66c3710' + ciphertext = '601ec313775789a5b7a7f504bbf3d228' +\ + 'f443e3ca4d62b59aca84e990cacaf5c5' +\ + '2b0930daa23de94ce87017ba2d84988d' +\ + 'dfc9c58db67aada613c2dd08457941a6' + key = '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4' + counter = Counter.new(nbits=16, + prefix=unhexlify('f0f1f2f3f4f5f6f7f8f9fafbfcfd'), + initial_value=0xfeff) + key = unhexlify(key) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_CTR, counter=counter) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_CTR, counter=counter) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + +class RFC3686TestVectors(unittest.TestCase): + + # Each item is a test vector with: + # - plaintext + # - ciphertext + # - key (AES 128, 192 or 256 bits) + # - counter prefix (4 byte nonce + 8 byte nonce) + data = ( + ('53696e676c6520626c6f636b206d7367', + 'e4095d4fb7a7b3792d6175a3261311b8', + 'ae6852f8121067cc4bf7a5765577f39e', + '000000300000000000000000'), + ('000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', + '5104a106168a72d9790d41ee8edad388eb2e1efc46da57c8fce630df9141be28', + '7e24067817fae0d743d6ce1f32539163', + '006cb6dbc0543b59da48d90b'), + ('000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223', + 'c1cf48a89f2ffdd9cf4652e9efdb72d74540a42bde6d7836d59a5ceaaef3105325b2072f', + '7691be035e5020a8ac6e618529f9a0dc', + '00e0017b27777f3f4a1786f0'), + ('53696e676c6520626c6f636b206d7367', + '4b55384fe259c9c84e7935a003cbe928', + '16af5b145fc9f579c175f93e3bfb0eed863d06ccfdb78515', + '0000004836733c147d6d93cb'), + ('000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', + '453243fc609b23327edfaafa7131cd9f8490701c5ad4a79cfc1fe0ff42f4fb00', + '7c5cb2401b3dc33c19e7340819e0f69c678c3db8e6f6a91a', + '0096b03b020c6eadc2cb500d'), + ('000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223', + '96893fc55e5c722f540b7dd1ddf7e758d288bc95c69165884536c811662f2188abee0935', + '02bf391ee8ecb159b959617b0965279bf59b60a786d3e0fe', + '0007bdfd5cbd60278dcc0912'), + ('53696e676c6520626c6f636b206d7367', + '145ad01dbf824ec7560863dc71e3e0c0', + '776beff2851db06f4c8a0542c8696f6c6a81af1eec96b4d37fc1d689e6c1c104', + '00000060db5672c97aa8f0b2'), + ('000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', + 'f05e231b3894612c49ee000b804eb2a9b8306b508f839d6a5530831d9344af1c', + 'f6d66d6bd52d59bb0796365879eff886c66dd51a5b6a99744b50590c87a23884', + '00faac24c1585ef15a43d875'), + ('000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223', + 'eb6c52821d0bbbf7ce7594462aca4faab407df866569fd07f48cc0b583d6071f1ec0e6b8', + 'ff7a617ce69148e4f1726e2f43581de2aa62d9f805532edff1eed687fb54153d', + '001cc5b751a51d70a1c11148') + ) + + bindata = [] + for tv in data: + bindata.append([unhexlify(x) for x in tv]) + + def runTest(self): + for pt, ct, key, prefix in self.bindata: + counter = Counter.new(32, prefix=prefix) + cipher = AES.new(key, AES.MODE_CTR, counter=counter) + result = cipher.encrypt(pt) + self.assertEqual(hexlify(ct), hexlify(result)) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(CtrTests) + tests += list_test_cases(SP800TestVectors) + tests += [ RFC3686TestVectors() ] + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_ChaCha20.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_ChaCha20.py new file mode 100644 index 000000000..4396ac289 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_ChaCha20.py @@ -0,0 +1,529 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import os +import re +import unittest +from binascii import hexlify, unhexlify + +from Crypto.Util.py3compat import b, tobytes, bchr +from Crypto.Util.strxor import strxor_c +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Cipher import ChaCha20 + + +class ChaCha20Test(unittest.TestCase): + + def test_new_positive(self): + cipher = ChaCha20.new(key=b("0")*32, nonce=b"0"*8) + self.assertEqual(cipher.nonce, b"0" * 8) + cipher = ChaCha20.new(key=b("0")*32, nonce=b"0"*12) + self.assertEqual(cipher.nonce, b"0" * 12) + + def test_new_negative(self): + new = ChaCha20.new + self.assertRaises(TypeError, new) + self.assertRaises(TypeError, new, nonce=b("0")) + self.assertRaises(ValueError, new, nonce=b("0")*8, key=b("0")) + self.assertRaises(ValueError, new, nonce=b("0"), key=b("0")*32) + + def test_default_nonce(self): + cipher1 = ChaCha20.new(key=bchr(1) * 32) + cipher2 = ChaCha20.new(key=bchr(1) * 32) + self.assertEqual(len(cipher1.nonce), 8) + self.assertNotEqual(cipher1.nonce, cipher2.nonce) + + def test_nonce(self): + key = b'A' * 32 + + nonce1 = b'P' * 8 + cipher1 = ChaCha20.new(key=key, nonce=nonce1) + self.assertEqual(nonce1, cipher1.nonce) + + nonce2 = b'Q' * 12 + cipher2 = ChaCha20.new(key=key, nonce=nonce2) + self.assertEqual(nonce2, cipher2.nonce) + + def test_eiter_encrypt_or_decrypt(self): + """Verify that a cipher cannot be used for both decrypting and encrypting""" + + c1 = ChaCha20.new(key=b("5") * 32, nonce=b("6") * 8) + c1.encrypt(b("8")) + self.assertRaises(TypeError, c1.decrypt, b("9")) + + c2 = ChaCha20.new(key=b("5") * 32, nonce=b("6") * 8) + c2.decrypt(b("8")) + self.assertRaises(TypeError, c2.encrypt, b("9")) + + def test_round_trip(self): + pt = b("A") * 1024 + c1 = ChaCha20.new(key=b("5") * 32, nonce=b("6") * 8) + c2 = ChaCha20.new(key=b("5") * 32, nonce=b("6") * 8) + ct = c1.encrypt(pt) + self.assertEqual(c2.decrypt(ct), pt) + + self.assertEqual(c1.encrypt(b("")), b("")) + self.assertEqual(c2.decrypt(b("")), b("")) + + def test_streaming(self): + """Verify that an arbitrary number of bytes can be encrypted/decrypted""" + from Crypto.Hash import SHA1 + + segments = (1, 3, 5, 7, 11, 17, 23) + total = sum(segments) + + pt = b("") + while len(pt) < total: + pt += SHA1.new(pt).digest() + + cipher1 = ChaCha20.new(key=b("7") * 32, nonce=b("t") * 8) + ct = cipher1.encrypt(pt) + + cipher2 = ChaCha20.new(key=b("7") * 32, nonce=b("t") * 8) + cipher3 = ChaCha20.new(key=b("7") * 32, nonce=b("t") * 8) + idx = 0 + for segment in segments: + self.assertEqual(cipher2.decrypt(ct[idx:idx+segment]), pt[idx:idx+segment]) + self.assertEqual(cipher3.encrypt(pt[idx:idx+segment]), ct[idx:idx+segment]) + idx += segment + + def test_seek(self): + cipher1 = ChaCha20.new(key=b("9") * 32, nonce=b("e") * 8) + + offset = 64 * 900 + 7 + pt = b("1") * 64 + + cipher1.encrypt(b("0") * offset) + ct1 = cipher1.encrypt(pt) + + cipher2 = ChaCha20.new(key=b("9") * 32, nonce=b("e") * 8) + cipher2.seek(offset) + ct2 = cipher2.encrypt(pt) + + self.assertEqual(ct1, ct2) + + def test_seek_tv(self): + # Test Vector #4, A.1 from + # http://tools.ietf.org/html/draft-nir-cfrg-chacha20-poly1305-04 + key = bchr(0) + bchr(255) + bchr(0) * 30 + nonce = bchr(0) * 8 + cipher = ChaCha20.new(key=key, nonce=nonce) + cipher.seek(64 * 2) + expected_key_stream = unhexlify(b( + "72d54dfbf12ec44b362692df94137f32" + "8fea8da73990265ec1bbbea1ae9af0ca" + "13b25aa26cb4a648cb9b9d1be65b2c09" + "24a66c54d545ec1b7374f4872e99f096" + )) + ct = cipher.encrypt(bchr(0) * len(expected_key_stream)) + self.assertEqual(expected_key_stream, ct) + + def test_rfc7539(self): + # from https://tools.ietf.org/html/rfc7539 Annex A.1 + # Each item is: key, nonce, block #, plaintext, ciphertext + tvs = [ + # Test Vector #1 + ( + "00"*32, + "00"*12, + 0, + "00"*16*4, + "76b8e0ada0f13d90405d6ae55386bd28" + "bdd219b8a08ded1aa836efcc8b770dc7" + "da41597c5157488d7724e03fb8d84a37" + "6a43b8f41518a11cc387b669b2ee6586" + ), + # Test Vector #2 + ( + "00"*31 + "01", + "00"*11 + "02", + 1, + "416e79207375626d697373696f6e2074" + "6f20746865204945544620696e74656e" + "6465642062792074686520436f6e7472" + "696275746f7220666f72207075626c69" + "636174696f6e20617320616c6c206f72" + "2070617274206f6620616e2049455446" + "20496e7465726e65742d447261667420" + "6f722052464320616e6420616e792073" + "746174656d656e74206d616465207769" + "7468696e2074686520636f6e74657874" + "206f6620616e20494554462061637469" + "7669747920697320636f6e7369646572" + "656420616e20224945544620436f6e74" + "7269627574696f6e222e205375636820" + "73746174656d656e747320696e636c75" + "6465206f72616c2073746174656d656e" + "747320696e2049455446207365737369" + "6f6e732c2061732077656c6c20617320" + "7772697474656e20616e6420656c6563" + "74726f6e696320636f6d6d756e696361" + "74696f6e73206d61646520617420616e" + "792074696d65206f7220706c6163652c" + "20776869636820617265206164647265" + "7373656420746f", + "a3fbf07df3fa2fde4f376ca23e827370" + "41605d9f4f4f57bd8cff2c1d4b7955ec" + "2a97948bd3722915c8f3d337f7d37005" + "0e9e96d647b7c39f56e031ca5eb6250d" + "4042e02785ececfa4b4bb5e8ead0440e" + "20b6e8db09d881a7c6132f420e527950" + "42bdfa7773d8a9051447b3291ce1411c" + "680465552aa6c405b7764d5e87bea85a" + "d00f8449ed8f72d0d662ab052691ca66" + "424bc86d2df80ea41f43abf937d3259d" + "c4b2d0dfb48a6c9139ddd7f76966e928" + "e635553ba76c5c879d7b35d49eb2e62b" + "0871cdac638939e25e8a1e0ef9d5280f" + "a8ca328b351c3c765989cbcf3daa8b6c" + "cc3aaf9f3979c92b3720fc88dc95ed84" + "a1be059c6499b9fda236e7e818b04b0b" + "c39c1e876b193bfe5569753f88128cc0" + "8aaa9b63d1a16f80ef2554d7189c411f" + "5869ca52c5b83fa36ff216b9c1d30062" + "bebcfd2dc5bce0911934fda79a86f6e6" + "98ced759c3ff9b6477338f3da4f9cd85" + "14ea9982ccafb341b2384dd902f3d1ab" + "7ac61dd29c6f21ba5b862f3730e37cfd" + "c4fd806c22f221" + ), + # Test Vector #3 + ( + "1c9240a5eb55d38af333888604f6b5f0" + "473917c1402b80099dca5cbc207075c0", + "00"*11 + "02", + 42, + "2754776173206272696c6c69672c2061" + "6e642074686520736c6974687920746f" + "7665730a446964206779726520616e64" + "2067696d626c6520696e207468652077" + "6162653a0a416c6c206d696d73792077" + "6572652074686520626f726f676f7665" + "732c0a416e6420746865206d6f6d6520" + "7261746873206f757467726162652e", + "62e6347f95ed87a45ffae7426f27a1df" + "5fb69110044c0d73118effa95b01e5cf" + "166d3df2d721caf9b21e5fb14c616871" + "fd84c54f9d65b283196c7fe4f60553eb" + "f39c6402c42234e32a356b3e764312a6" + "1a5532055716ead6962568f87d3f3f77" + "04c6a8d1bcd1bf4d50d6154b6da731b1" + "87b58dfd728afa36757a797ac188d1" + ) + ] + + for tv in tvs: + key = unhexlify(tv[0]) + nonce = unhexlify(tv[1]) + offset = tv[2] * 64 + pt = unhexlify(tv[3]) + ct_expect = unhexlify(tv[4]) + + cipher = ChaCha20.new(key=key, nonce=nonce) + if offset != 0: + cipher.seek(offset) + ct = cipher.encrypt(pt) + assert(ct == ct_expect) + + +class XChaCha20Test(unittest.TestCase): + + # From https://tools.ietf.org/html/draft-arciszewski-xchacha-03 + + def test_hchacha20(self): + # Section 2.2.1 + + from Crypto.Cipher.ChaCha20 import _HChaCha20 + + key = b"00:01:02:03:04:05:06:07:08:09:0a:0b:0c:0d:0e:0f:10:11:12:13:14:15:16:17:18:19:1a:1b:1c:1d:1e:1f" + key = unhexlify(key.replace(b":", b"")) + + nonce = b"00:00:00:09:00:00:00:4a:00:00:00:00:31:41:59:27" + nonce = unhexlify(nonce.replace(b":", b"")) + + subkey = _HChaCha20(key, nonce) + + expected = b"82413b42 27b27bfe d30e4250 8a877d73 a0f9e4d5 8a74a853 c12ec413 26d3ecdc" + expected = unhexlify(expected.replace(b" ", b"")) + + self.assertEqual(subkey, expected) + + def test_nonce(self): + key = b'A' * 32 + nonce = b'P' * 24 + cipher = ChaCha20.new(key=key, nonce=nonce) + self.assertEqual(nonce, cipher.nonce) + + def test_encrypt(self): + # Section A.3.2 + + pt = b""" + 5468652064686f6c65202870726f6e6f756e6365642022646f6c652229206973 + 20616c736f206b6e6f776e2061732074686520417369617469632077696c6420 + 646f672c2072656420646f672c20616e642077686973746c696e6720646f672e + 2049742069732061626f7574207468652073697a65206f662061204765726d61 + 6e20736865706865726420627574206c6f6f6b73206d6f7265206c696b652061 + 206c6f6e672d6c656767656420666f782e205468697320686967686c7920656c + 757369766520616e6420736b696c6c6564206a756d70657220697320636c6173 + 736966696564207769746820776f6c7665732c20636f796f7465732c206a6163 + 6b616c732c20616e6420666f78657320696e20746865207461786f6e6f6d6963 + 2066616d696c792043616e696461652e""" + pt = unhexlify(pt.replace(b"\n", b"").replace(b" ", b"")) + + key = unhexlify(b"808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9f") + iv = unhexlify(b"404142434445464748494a4b4c4d4e4f5051525354555658") + + ct = b""" + 7d0a2e6b7f7c65a236542630294e063b7ab9b555a5d5149aa21e4ae1e4fbce87 + ecc8e08a8b5e350abe622b2ffa617b202cfad72032a3037e76ffdcdc4376ee05 + 3a190d7e46ca1de04144850381b9cb29f051915386b8a710b8ac4d027b8b050f + 7cba5854e028d564e453b8a968824173fc16488b8970cac828f11ae53cabd201 + 12f87107df24ee6183d2274fe4c8b1485534ef2c5fbc1ec24bfc3663efaa08bc + 047d29d25043532db8391a8a3d776bf4372a6955827ccb0cdd4af403a7ce4c63 + d595c75a43e045f0cce1f29c8b93bd65afc5974922f214a40b7c402cdb91ae73 + c0b63615cdad0480680f16515a7ace9d39236464328a37743ffc28f4ddb324f4 + d0f5bbdc270c65b1749a6efff1fbaa09536175ccd29fb9e6057b307320d31683 + 8a9c71f70b5b5907a66f7ea49aadc409""" + ct = unhexlify(ct.replace(b"\n", b"").replace(b" ", b"")) + + cipher = ChaCha20.new(key=key, nonce=iv) + cipher.seek(64) # Counter = 1 + ct_test = cipher.encrypt(pt) + self.assertEqual(ct, ct_test) + + +class ByteArrayTest(unittest.TestCase): + """Verify we can encrypt or decrypt bytearrays""" + + def runTest(self): + + data = b"0123" + key = b"9" * 32 + nonce = b"t" * 8 + + # Encryption + data_ba = bytearray(data) + key_ba = bytearray(key) + nonce_ba = bytearray(nonce) + + cipher1 = ChaCha20.new(key=key, nonce=nonce) + ct = cipher1.encrypt(data) + + cipher2 = ChaCha20.new(key=key_ba, nonce=nonce_ba) + key_ba[:1] = b'\xFF' + nonce_ba[:1] = b'\xFF' + ct_test = cipher2.encrypt(data_ba) + + self.assertEqual(ct, ct_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decryption + key_ba = bytearray(key) + nonce_ba = bytearray(nonce) + ct_ba = bytearray(ct) + + cipher3 = ChaCha20.new(key=key_ba, nonce=nonce_ba) + key_ba[:1] = b'\xFF' + nonce_ba[:1] = b'\xFF' + pt_test = cipher3.decrypt(ct_ba) + + self.assertEqual(data, pt_test) + + +class MemoryviewTest(unittest.TestCase): + """Verify we can encrypt or decrypt bytearrays""" + + def runTest(self): + + data = b"0123" + key = b"9" * 32 + nonce = b"t" * 8 + + # Encryption + data_mv = memoryview(bytearray(data)) + key_mv = memoryview(bytearray(key)) + nonce_mv = memoryview(bytearray(nonce)) + + cipher1 = ChaCha20.new(key=key, nonce=nonce) + ct = cipher1.encrypt(data) + + cipher2 = ChaCha20.new(key=key_mv, nonce=nonce_mv) + key_mv[:1] = b'\xFF' + nonce_mv[:1] = b'\xFF' + ct_test = cipher2.encrypt(data_mv) + + self.assertEqual(ct, ct_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decryption + key_mv = memoryview(bytearray(key)) + nonce_mv = memoryview(bytearray(nonce)) + ct_mv = memoryview(bytearray(ct)) + + cipher3 = ChaCha20.new(key=key_mv, nonce=nonce_mv) + key_mv[:1] = b'\xFF' + nonce_mv[:1] = b'\xFF' + pt_test = cipher3.decrypt(ct_mv) + + self.assertEqual(data, pt_test) + + +class ChaCha20_AGL_NIR(unittest.TestCase): + + # From http://tools.ietf.org/html/draft-agl-tls-chacha20poly1305-04 + # and http://tools.ietf.org/html/draft-nir-cfrg-chacha20-poly1305-04 + tv = [ + ( "00" * 32, + "00" * 8, + "76b8e0ada0f13d90405d6ae55386bd28bdd219b8a08ded1aa836efcc" + "8b770dc7da41597c5157488d7724e03fb8d84a376a43b8f41518a11c" + "c387b669b2ee6586" + "9f07e7be5551387a98ba977c732d080d" + "cb0f29a048e3656912c6533e32ee7aed" + "29b721769ce64e43d57133b074d839d5" + "31ed1f28510afb45ace10a1f4b794d6f" + ), + ( "00" * 31 + "01", + "00" * 8, + "4540f05a9f1fb296d7736e7b208e3c96eb4fe1834688d2604f450952" + "ed432d41bbe2a0b6ea7566d2a5d1e7e20d42af2c53d792b1c43fea81" + "7e9ad275ae546963" + "3aeb5224ecf849929b9d828db1ced4dd" + "832025e8018b8160b82284f3c949aa5a" + "8eca00bbb4a73bdad192b5c42f73f2fd" + "4e273644c8b36125a64addeb006c13a0" + ), + ( "00" * 32, + "00" * 7 + "01", + "de9cba7bf3d69ef5e786dc63973f653a0b49e015adbff7134fcb7df1" + "37821031e85a050278a7084527214f73efc7fa5b5277062eb7a0433e" + "445f41e3" + ), + ( "00" * 32, + "01" + "00" * 7, + "ef3fdfd6c61578fbf5cf35bd3dd33b8009631634d21e42ac33960bd1" + "38e50d32111e4caf237ee53ca8ad6426194a88545ddc497a0b466e7d" + "6bbdb0041b2f586b" + ), + ( "000102030405060708090a0b0c0d0e0f101112131415161718191a1b" + "1c1d1e1f", + "0001020304050607", + "f798a189f195e66982105ffb640bb7757f579da31602fc93ec01ac56" + "f85ac3c134a4547b733b46413042c9440049176905d3be59ea1c53f1" + "5916155c2be8241a38008b9a26bc35941e2444177c8ade6689de9526" + "4986d95889fb60e84629c9bd9a5acb1cc118be563eb9b3a4a472f82e" + "09a7e778492b562ef7130e88dfe031c79db9d4f7c7a899151b9a4750" + "32b63fc385245fe054e3dd5a97a5f576fe064025d3ce042c566ab2c5" + "07b138db853e3d6959660996546cc9c4a6eafdc777c040d70eaf46f7" + "6dad3979e5c5360c3317166a1c894c94a371876a94df7628fe4eaaf2" + "ccb27d5aaae0ad7ad0f9d4b6ad3b54098746d4524d38407a6deb3ab7" + "8fab78c9" + ), + ( "00" * 32, + "00" * 7 + "02", + "c2c64d378cd536374ae204b9ef933fcd" + "1a8b2288b3dfa49672ab765b54ee27c7" + "8a970e0e955c14f3a88e741b97c286f7" + "5f8fc299e8148362fa198a39531bed6d" + ), + ] + + def runTest(self): + for (key, nonce, stream) in self.tv: + c = ChaCha20.new(key=unhexlify(b(key)), nonce=unhexlify(b(nonce))) + ct = unhexlify(b(stream)) + pt = b("\x00") * len(ct) + self.assertEqual(c.encrypt(pt), ct) + + +class TestOutput(unittest.TestCase): + + def runTest(self): + # Encrypt/Decrypt data and test output parameter + + key = b'4' * 32 + nonce = b'5' * 8 + cipher = ChaCha20.new(key=key, nonce=nonce) + + pt = b'5' * 300 + ct = cipher.encrypt(pt) + + output = bytearray(len(pt)) + cipher = ChaCha20.new(key=key, nonce=nonce) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + cipher = ChaCha20.new(key=key, nonce=nonce) + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + output = memoryview(bytearray(len(pt))) + cipher = ChaCha20.new(key=key, nonce=nonce) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher = ChaCha20.new(key=key, nonce=nonce) + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + cipher = ChaCha20.new(key=key, nonce=nonce) + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*len(pt)) + + cipher = ChaCha20.new(key=key, nonce=nonce) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*len(pt)) + + shorter_output = bytearray(len(pt) - 1) + + cipher = ChaCha20.new(key=key, nonce=nonce) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + + cipher = ChaCha20.new(key=key, nonce=nonce) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(ChaCha20Test) + tests += list_test_cases(XChaCha20Test) + tests.append(ChaCha20_AGL_NIR()) + tests.append(ByteArrayTest()) + tests.append(MemoryviewTest()) + tests.append(TestOutput()) + + return tests + + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_ChaCha20_Poly1305.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_ChaCha20_Poly1305.py new file mode 100644 index 000000000..9129c68de --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_ChaCha20_Poly1305.py @@ -0,0 +1,776 @@ +# =================================================================== +# +# Copyright (c) 2018, Helder Eijs +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors_wycheproof +from Crypto.Util.py3compat import tobytes +from Crypto.Cipher import ChaCha20_Poly1305 +from Crypto.Hash import SHAKE128 + +from Crypto.Util.strxor import strxor + + +def get_tag_random(tag, length): + return SHAKE128.new(data=tobytes(tag)).read(length) + + +class ChaCha20Poly1305Tests(unittest.TestCase): + + key_256 = get_tag_random("key_256", 32) + nonce_96 = get_tag_random("nonce_96", 12) + data_128 = get_tag_random("data_128", 16) + + def test_loopback(self): + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + pt = get_tag_random("plaintext", 16 * 100) + ct = cipher.encrypt(pt) + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + pt2 = cipher.decrypt(ct) + self.assertEqual(pt, pt2) + + def test_nonce(self): + # Nonce can only be 8 or 12 bytes + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=b'H' * 8) + self.assertEqual(len(cipher.nonce), 8) + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=b'H' * 12) + self.assertEqual(len(cipher.nonce), 12) + + # If not passed, the nonce is created randomly + cipher = ChaCha20_Poly1305.new(key=self.key_256) + nonce1 = cipher.nonce + cipher = ChaCha20_Poly1305.new(key=self.key_256) + nonce2 = cipher.nonce + self.assertEqual(len(nonce1), 12) + self.assertNotEqual(nonce1, nonce2) + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + ct = cipher.encrypt(self.data_128) + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + self.assertEqual(ct, cipher.encrypt(self.data_128)) + + def test_nonce_must_be_bytes(self): + self.assertRaises(TypeError, + ChaCha20_Poly1305.new, + key=self.key_256, + nonce=u'test12345678') + + def test_nonce_length(self): + # nonce can only be 8 or 12 bytes long + self.assertRaises(ValueError, + ChaCha20_Poly1305.new, + key=self.key_256, + nonce=b'0' * 7) + self.assertRaises(ValueError, + ChaCha20_Poly1305.new, + key=self.key_256, + nonce=b'') + + def test_block_size(self): + # Not based on block ciphers + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + self.assertFalse(hasattr(cipher, 'block_size')) + + def test_nonce_attribute(self): + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + self.assertEqual(cipher.nonce, self.nonce_96) + + # By default, a 12 bytes long nonce is randomly generated + nonce1 = ChaCha20_Poly1305.new(key=self.key_256).nonce + nonce2 = ChaCha20_Poly1305.new(key=self.key_256).nonce + self.assertEqual(len(nonce1), 12) + self.assertNotEqual(nonce1, nonce2) + + def test_unknown_parameters(self): + self.assertRaises(TypeError, + ChaCha20_Poly1305.new, + key=self.key_256, + param=9) + + def test_null_encryption_decryption(self): + for func in "encrypt", "decrypt": + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + result = getattr(cipher, func)(b"") + self.assertEqual(result, b"") + + def test_either_encrypt_or_decrypt(self): + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.encrypt(b"") + self.assertRaises(TypeError, cipher.decrypt, b"") + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.decrypt(b"") + self.assertRaises(TypeError, cipher.encrypt, b"") + + def test_data_must_be_bytes(self): + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.encrypt, u'test1234567890-*') + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.decrypt, u'test1234567890-*') + + def test_mac_len(self): + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + _, mac = cipher.encrypt_and_digest(self.data_128) + self.assertEqual(len(mac), 16) + + def test_invalid_mac(self): + from Crypto.Util.strxor import strxor_c + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + ct, mac = cipher.encrypt_and_digest(self.data_128) + + invalid_mac = strxor_c(mac, 0x01) + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.decrypt_and_verify, ct, + invalid_mac) + + def test_hex_mac(self): + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + mac_hex = cipher.hexdigest() + self.assertEqual(cipher.digest(), unhexlify(mac_hex)) + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.hexverify(mac_hex) + + def test_message_chunks(self): + # Validate that both associated data and plaintext/ciphertext + # can be broken up in chunks of arbitrary length + + auth_data = get_tag_random("authenticated data", 127) + plaintext = get_tag_random("plaintext", 127) + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.update(auth_data) + ciphertext, ref_mac = cipher.encrypt_and_digest(plaintext) + + def break_up(data, chunk_length): + return [data[i:i+chunk_length] for i in range(0, len(data), + chunk_length)] + + # Encryption + for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + + for chunk in break_up(auth_data, chunk_length): + cipher.update(chunk) + pt2 = b"" + for chunk in break_up(ciphertext, chunk_length): + pt2 += cipher.decrypt(chunk) + self.assertEqual(plaintext, pt2) + cipher.verify(ref_mac) + + # Decryption + for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + + for chunk in break_up(auth_data, chunk_length): + cipher.update(chunk) + ct2 = b"" + for chunk in break_up(plaintext, chunk_length): + ct2 += cipher.encrypt(chunk) + self.assertEqual(ciphertext, ct2) + self.assertEqual(cipher.digest(), ref_mac) + + def test_bytearray(self): + + # Encrypt + key_ba = bytearray(self.key_256) + nonce_ba = bytearray(self.nonce_96) + header_ba = bytearray(self.data_128) + data_ba = bytearray(self.data_128) + + cipher1 = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher1.update(self.data_128) + ct = cipher1.encrypt(self.data_128) + tag = cipher1.digest() + + cipher2 = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + key_ba[:3] = b'\xFF\xFF\xFF' + nonce_ba[:3] = b'\xFF\xFF\xFF' + cipher2.update(header_ba) + header_ba[:3] = b'\xFF\xFF\xFF' + ct_test = cipher2.encrypt(data_ba) + data_ba[:3] = b'\x99\x99\x99' + tag_test = cipher2.digest() + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + key_ba = bytearray(self.key_256) + nonce_ba = bytearray(self.nonce_96) + header_ba = bytearray(self.data_128) + ct_ba = bytearray(ct) + tag_ba = bytearray(tag) + del data_ba + + cipher3 = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + key_ba[:3] = b'\xFF\xFF\xFF' + nonce_ba[:3] = b'\xFF\xFF\xFF' + cipher3.update(header_ba) + header_ba[:3] = b'\xFF\xFF\xFF' + pt_test = cipher3.decrypt(ct_ba) + ct_ba[:3] = b'\xFF\xFF\xFF' + cipher3.verify(tag_ba) + + self.assertEqual(pt_test, self.data_128) + + def test_memoryview(self): + + # Encrypt + key_mv = memoryview(bytearray(self.key_256)) + nonce_mv = memoryview(bytearray(self.nonce_96)) + header_mv = memoryview(bytearray(self.data_128)) + data_mv = memoryview(bytearray(self.data_128)) + + cipher1 = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher1.update(self.data_128) + ct = cipher1.encrypt(self.data_128) + tag = cipher1.digest() + + cipher2 = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + key_mv[:3] = b'\xFF\xFF\xFF' + nonce_mv[:3] = b'\xFF\xFF\xFF' + cipher2.update(header_mv) + header_mv[:3] = b'\xFF\xFF\xFF' + ct_test = cipher2.encrypt(data_mv) + data_mv[:3] = b'\x99\x99\x99' + tag_test = cipher2.digest() + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + key_mv = memoryview(bytearray(self.key_256)) + nonce_mv = memoryview(bytearray(self.nonce_96)) + header_mv = memoryview(bytearray(self.data_128)) + ct_mv = memoryview(bytearray(ct)) + tag_mv = memoryview(bytearray(tag)) + del data_mv + + cipher3 = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + key_mv[:3] = b'\xFF\xFF\xFF' + nonce_mv[:3] = b'\xFF\xFF\xFF' + cipher3.update(header_mv) + header_mv[:3] = b'\xFF\xFF\xFF' + pt_test = cipher3.decrypt(ct_mv) + ct_mv[:3] = b'\x99\x99\x99' + cipher3.verify(tag_mv) + + self.assertEqual(pt_test, self.data_128) + + +class XChaCha20Poly1305Tests(unittest.TestCase): + + def test_nonce(self): + # Nonce can only be 24 bytes + cipher = ChaCha20_Poly1305.new(key=b'Y' * 32, + nonce=b'H' * 24) + self.assertEqual(len(cipher.nonce), 24) + self.assertEqual(cipher.nonce, b'H' * 24) + + def test_encrypt(self): + # From https://tools.ietf.org/html/draft-arciszewski-xchacha-03 + # Section A.3.1 + + pt = b""" + 4c616469657320616e642047656e746c656d656e206f662074686520636c6173 + 73206f66202739393a204966204920636f756c64206f6666657220796f75206f + 6e6c79206f6e652074697020666f7220746865206675747572652c2073756e73 + 637265656e20776f756c642062652069742e""" + pt = unhexlify(pt.replace(b"\n", b"").replace(b" ", b"")) + + aad = unhexlify(b"50515253c0c1c2c3c4c5c6c7") + key = unhexlify(b"808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9f") + iv = unhexlify(b"404142434445464748494a4b4c4d4e4f5051525354555657") + + ct = b""" + bd6d179d3e83d43b9576579493c0e939572a1700252bfaccbed2902c21396cbb + 731c7f1b0b4aa6440bf3a82f4eda7e39ae64c6708c54c216cb96b72e1213b452 + 2f8c9ba40db5d945b11b69b982c1bb9e3f3fac2bc369488f76b2383565d3fff9 + 21f9664c97637da9768812f615c68b13b52e""" + ct = unhexlify(ct.replace(b"\n", b"").replace(b" ", b"")) + + tag = unhexlify(b"c0875924c1c7987947deafd8780acf49") + + cipher = ChaCha20_Poly1305.new(key=key, nonce=iv) + cipher.update(aad) + ct_test, tag_test = cipher.encrypt_and_digest(pt) + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + + cipher = ChaCha20_Poly1305.new(key=key, nonce=iv) + cipher.update(aad) + cipher.decrypt_and_verify(ct, tag) + + +class ChaCha20Poly1305FSMTests(unittest.TestCase): + + key_256 = get_tag_random("key_256", 32) + nonce_96 = get_tag_random("nonce_96", 12) + data_128 = get_tag_random("data_128", 16) + + def test_valid_init_encrypt_decrypt_digest_verify(self): + # No authenticated data, fixed plaintext + # Verify path INIT->ENCRYPT->DIGEST + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + ct = cipher.encrypt(self.data_128) + mac = cipher.digest() + + # Verify path INIT->DECRYPT->VERIFY + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.decrypt(ct) + cipher.verify(mac) + + def test_valid_init_update_digest_verify(self): + # No plaintext, fixed authenticated data + # Verify path INIT->UPDATE->DIGEST + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.update(self.data_128) + mac = cipher.digest() + + # Verify path INIT->UPDATE->VERIFY + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.update(self.data_128) + cipher.verify(mac) + + def test_valid_full_path(self): + # Fixed authenticated data, fixed plaintext + # Verify path INIT->UPDATE->ENCRYPT->DIGEST + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.update(self.data_128) + ct = cipher.encrypt(self.data_128) + mac = cipher.digest() + + # Verify path INIT->UPDATE->DECRYPT->VERIFY + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.update(self.data_128) + cipher.decrypt(ct) + cipher.verify(mac) + + def test_valid_init_digest(self): + # Verify path INIT->DIGEST + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.digest() + + def test_valid_init_verify(self): + # Verify path INIT->VERIFY + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + mac = cipher.digest() + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.verify(mac) + + def test_valid_multiple_encrypt_or_decrypt(self): + for method_name in "encrypt", "decrypt": + for auth_data in (None, b"333", self.data_128, + self.data_128 + b"3"): + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + if auth_data is not None: + cipher.update(auth_data) + method = getattr(cipher, method_name) + method(self.data_128) + method(self.data_128) + method(self.data_128) + method(self.data_128) + + def test_valid_multiple_digest_or_verify(self): + # Multiple calls to digest + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.update(self.data_128) + first_mac = cipher.digest() + for x in range(4): + self.assertEqual(first_mac, cipher.digest()) + + # Multiple calls to verify + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.update(self.data_128) + for x in range(5): + cipher.verify(first_mac) + + def test_valid_encrypt_and_digest_decrypt_and_verify(self): + # encrypt_and_digest + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.update(self.data_128) + ct, mac = cipher.encrypt_and_digest(self.data_128) + + # decrypt_and_verify + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.update(self.data_128) + pt = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(self.data_128, pt) + + def test_invalid_mixing_encrypt_decrypt(self): + # Once per method, with or without assoc. data + for method1_name, method2_name in (("encrypt", "decrypt"), + ("decrypt", "encrypt")): + for assoc_data_present in (True, False): + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + if assoc_data_present: + cipher.update(self.data_128) + getattr(cipher, method1_name)(self.data_128) + self.assertRaises(TypeError, getattr(cipher, method2_name), + self.data_128) + + def test_invalid_encrypt_or_update_after_digest(self): + for method_name in "encrypt", "update": + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.encrypt(self.data_128) + cipher.digest() + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data_128) + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.encrypt_and_digest(self.data_128) + + def test_invalid_decrypt_or_update_after_verify(self): + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + ct = cipher.encrypt(self.data_128) + mac = cipher.digest() + + for method_name in "decrypt", "update": + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.decrypt(ct) + cipher.verify(mac) + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data_128) + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.decrypt(ct) + cipher.verify(mac) + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data_128) + + cipher = ChaCha20_Poly1305.new(key=self.key_256, + nonce=self.nonce_96) + cipher.decrypt_and_verify(ct, mac) + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data_128) + + +def compact(x): + return unhexlify(x.replace(" ", "").replace(":", "")) + + +class TestVectorsRFC(unittest.TestCase): + """Test cases from RFC7539""" + + # AAD, PT, CT, MAC, KEY, NONCE + test_vectors_hex = [ + ( '50 51 52 53 c0 c1 c2 c3 c4 c5 c6 c7', + '4c 61 64 69 65 73 20 61 6e 64 20 47 65 6e 74 6c' + '65 6d 65 6e 20 6f 66 20 74 68 65 20 63 6c 61 73' + '73 20 6f 66 20 27 39 39 3a 20 49 66 20 49 20 63' + '6f 75 6c 64 20 6f 66 66 65 72 20 79 6f 75 20 6f' + '6e 6c 79 20 6f 6e 65 20 74 69 70 20 66 6f 72 20' + '74 68 65 20 66 75 74 75 72 65 2c 20 73 75 6e 73' + '63 72 65 65 6e 20 77 6f 75 6c 64 20 62 65 20 69' + '74 2e', + 'd3 1a 8d 34 64 8e 60 db 7b 86 af bc 53 ef 7e c2' + 'a4 ad ed 51 29 6e 08 fe a9 e2 b5 a7 36 ee 62 d6' + '3d be a4 5e 8c a9 67 12 82 fa fb 69 da 92 72 8b' + '1a 71 de 0a 9e 06 0b 29 05 d6 a5 b6 7e cd 3b 36' + '92 dd bd 7f 2d 77 8b 8c 98 03 ae e3 28 09 1b 58' + 'fa b3 24 e4 fa d6 75 94 55 85 80 8b 48 31 d7 bc' + '3f f4 de f0 8e 4b 7a 9d e5 76 d2 65 86 ce c6 4b' + '61 16', + '1a:e1:0b:59:4f:09:e2:6a:7e:90:2e:cb:d0:60:06:91', + '80 81 82 83 84 85 86 87 88 89 8a 8b 8c 8d 8e 8f' + '90 91 92 93 94 95 96 97 98 99 9a 9b 9c 9d 9e 9f', + '07 00 00 00' + '40 41 42 43 44 45 46 47', + ), + ( 'f3 33 88 86 00 00 00 00 00 00 4e 91', + '49 6e 74 65 72 6e 65 74 2d 44 72 61 66 74 73 20' + '61 72 65 20 64 72 61 66 74 20 64 6f 63 75 6d 65' + '6e 74 73 20 76 61 6c 69 64 20 66 6f 72 20 61 20' + '6d 61 78 69 6d 75 6d 20 6f 66 20 73 69 78 20 6d' + '6f 6e 74 68 73 20 61 6e 64 20 6d 61 79 20 62 65' + '20 75 70 64 61 74 65 64 2c 20 72 65 70 6c 61 63' + '65 64 2c 20 6f 72 20 6f 62 73 6f 6c 65 74 65 64' + '20 62 79 20 6f 74 68 65 72 20 64 6f 63 75 6d 65' + '6e 74 73 20 61 74 20 61 6e 79 20 74 69 6d 65 2e' + '20 49 74 20 69 73 20 69 6e 61 70 70 72 6f 70 72' + '69 61 74 65 20 74 6f 20 75 73 65 20 49 6e 74 65' + '72 6e 65 74 2d 44 72 61 66 74 73 20 61 73 20 72' + '65 66 65 72 65 6e 63 65 20 6d 61 74 65 72 69 61' + '6c 20 6f 72 20 74 6f 20 63 69 74 65 20 74 68 65' + '6d 20 6f 74 68 65 72 20 74 68 61 6e 20 61 73 20' + '2f e2 80 9c 77 6f 72 6b 20 69 6e 20 70 72 6f 67' + '72 65 73 73 2e 2f e2 80 9d', + '64 a0 86 15 75 86 1a f4 60 f0 62 c7 9b e6 43 bd' + '5e 80 5c fd 34 5c f3 89 f1 08 67 0a c7 6c 8c b2' + '4c 6c fc 18 75 5d 43 ee a0 9e e9 4e 38 2d 26 b0' + 'bd b7 b7 3c 32 1b 01 00 d4 f0 3b 7f 35 58 94 cf' + '33 2f 83 0e 71 0b 97 ce 98 c8 a8 4a bd 0b 94 81' + '14 ad 17 6e 00 8d 33 bd 60 f9 82 b1 ff 37 c8 55' + '97 97 a0 6e f4 f0 ef 61 c1 86 32 4e 2b 35 06 38' + '36 06 90 7b 6a 7c 02 b0 f9 f6 15 7b 53 c8 67 e4' + 'b9 16 6c 76 7b 80 4d 46 a5 9b 52 16 cd e7 a4 e9' + '90 40 c5 a4 04 33 22 5e e2 82 a1 b0 a0 6c 52 3e' + 'af 45 34 d7 f8 3f a1 15 5b 00 47 71 8c bc 54 6a' + '0d 07 2b 04 b3 56 4e ea 1b 42 22 73 f5 48 27 1a' + '0b b2 31 60 53 fa 76 99 19 55 eb d6 31 59 43 4e' + 'ce bb 4e 46 6d ae 5a 10 73 a6 72 76 27 09 7a 10' + '49 e6 17 d9 1d 36 10 94 fa 68 f0 ff 77 98 71 30' + '30 5b ea ba 2e da 04 df 99 7b 71 4d 6c 6f 2c 29' + 'a6 ad 5c b4 02 2b 02 70 9b', + 'ee ad 9d 67 89 0c bb 22 39 23 36 fe a1 85 1f 38', + '1c 92 40 a5 eb 55 d3 8a f3 33 88 86 04 f6 b5 f0' + '47 39 17 c1 40 2b 80 09 9d ca 5c bc 20 70 75 c0', + '00 00 00 00 01 02 03 04 05 06 07 08', + ) + ] + + test_vectors = [[unhexlify(x.replace(" ", "").replace(":", "")) for x in tv] for tv in test_vectors_hex] + + def runTest(self): + for assoc_data, pt, ct, mac, key, nonce in self.test_vectors: + # Encrypt + cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) + cipher.update(assoc_data) + ct2, mac2 = cipher.encrypt_and_digest(pt) + self.assertEqual(ct, ct2) + self.assertEqual(mac, mac2) + + # Decrypt + cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) + cipher.update(assoc_data) + pt2 = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(pt, pt2) + + +class TestVectorsWycheproof(unittest.TestCase): + + def __init__(self, wycheproof_warnings): + unittest.TestCase.__init__(self) + self._wycheproof_warnings = wycheproof_warnings + self._id = "None" + + def load_tests(self, filename): + + def filter_tag(group): + return group['tagSize'] // 8 + + def filter_algo(root): + return root['algorithm'] + + result = load_test_vectors_wycheproof(("Cipher", "wycheproof"), + filename, + "Wycheproof ChaCha20-Poly1305", + root_tag={'algo': filter_algo}, + group_tag={'tag_size': filter_tag}) + return result + + def setUp(self): + self.tv = [] + self.tv.extend(self.load_tests("chacha20_poly1305_test.json")) + self.tv.extend(self.load_tests("xchacha20_poly1305_test.json")) + + def shortDescription(self): + return self._id + + def warn(self, tv): + if tv.warning and self._wycheproof_warnings: + import warnings + warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) + + def test_encrypt(self, tv): + self._id = "Wycheproof Encrypt %s Test #%s" % (tv.algo, tv.id) + + try: + cipher = ChaCha20_Poly1305.new(key=tv.key, nonce=tv.iv) + except ValueError as e: + assert len(tv.iv) not in (8, 12) and "Nonce must be" in str(e) + return + + cipher.update(tv.aad) + ct, tag = cipher.encrypt_and_digest(tv.msg) + if tv.valid: + self.assertEqual(ct, tv.ct) + self.assertEqual(tag, tv.tag) + self.warn(tv) + + def test_decrypt(self, tv): + self._id = "Wycheproof Decrypt %s Test #%s" % (tv.algo, tv.id) + + try: + cipher = ChaCha20_Poly1305.new(key=tv.key, nonce=tv.iv) + except ValueError as e: + assert len(tv.iv) not in (8, 12) and "Nonce must be" in str(e) + return + + cipher.update(tv.aad) + try: + pt = cipher.decrypt_and_verify(tv.ct, tv.tag) + except ValueError: + assert not tv.valid + else: + assert tv.valid + self.assertEqual(pt, tv.msg) + self.warn(tv) + + def test_corrupt_decrypt(self, tv): + self._id = "Wycheproof Corrupt Decrypt ChaCha20-Poly1305 Test #" + str(tv.id) + if len(tv.iv) == 0 or len(tv.ct) < 1: + return + cipher = ChaCha20_Poly1305.new(key=tv.key, nonce=tv.iv) + cipher.update(tv.aad) + ct_corrupt = strxor(tv.ct, b"\x00" * (len(tv.ct) - 1) + b"\x01") + self.assertRaises(ValueError, cipher.decrypt_and_verify, ct_corrupt, tv.tag) + + def runTest(self): + + for tv in self.tv: + self.test_encrypt(tv) + self.test_decrypt(tv) + self.test_corrupt_decrypt(tv) + + +class TestOutput(unittest.TestCase): + + def runTest(self): + # Encrypt/Decrypt data and test output parameter + + key = b'4' * 32 + nonce = b'5' * 12 + cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) + + pt = b'5' * 16 + ct = cipher.encrypt(pt) + + output = bytearray(16) + cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + output = memoryview(bytearray(16)) + cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) + + cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) + + shorter_output = bytearray(7) + + cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + + cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +def get_tests(config={}): + wycheproof_warnings = config.get('wycheproof_warnings') + + tests = [] + tests += list_test_cases(ChaCha20Poly1305Tests) + tests += list_test_cases(XChaCha20Poly1305Tests) + tests += list_test_cases(ChaCha20Poly1305FSMTests) + tests += [TestVectorsRFC()] + tests += [TestVectorsWycheproof(wycheproof_warnings)] + tests += [TestOutput()] + return tests + + +if __name__ == '__main__': + def suite(): + unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_DES.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_DES.py new file mode 100644 index 000000000..ee261bce3 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_DES.py @@ -0,0 +1,374 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/DES.py: Self-test for the (Single) DES cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.DES""" + +import unittest + +from Crypto.Cipher import DES + +# This is a list of (plaintext, ciphertext, key, description) tuples. +SP800_17_B1_KEY = '01' * 8 +SP800_17_B2_PT = '00' * 8 +test_data = [ + # Test vectors from Appendix A of NIST SP 800-17 + # "Modes of Operation Validation System (MOVS): Requirements and Procedures" + # http://csrc.nist.gov/publications/nistpubs/800-17/800-17.pdf + + # Appendix A - "Sample Round Outputs for the DES" + ('0000000000000000', '82dcbafbdeab6602', '10316e028c8f3b4a', + "NIST SP800-17 A"), + + # Table B.1 - Variable Plaintext Known Answer Test + ('8000000000000000', '95f8a5e5dd31d900', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #0'), + ('4000000000000000', 'dd7f121ca5015619', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #1'), + ('2000000000000000', '2e8653104f3834ea', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #2'), + ('1000000000000000', '4bd388ff6cd81d4f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #3'), + ('0800000000000000', '20b9e767b2fb1456', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #4'), + ('0400000000000000', '55579380d77138ef', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #5'), + ('0200000000000000', '6cc5defaaf04512f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #6'), + ('0100000000000000', '0d9f279ba5d87260', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #7'), + ('0080000000000000', 'd9031b0271bd5a0a', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #8'), + ('0040000000000000', '424250b37c3dd951', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #9'), + ('0020000000000000', 'b8061b7ecd9a21e5', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #10'), + ('0010000000000000', 'f15d0f286b65bd28', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #11'), + ('0008000000000000', 'add0cc8d6e5deba1', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #12'), + ('0004000000000000', 'e6d5f82752ad63d1', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #13'), + ('0002000000000000', 'ecbfe3bd3f591a5e', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #14'), + ('0001000000000000', 'f356834379d165cd', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #15'), + ('0000800000000000', '2b9f982f20037fa9', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #16'), + ('0000400000000000', '889de068a16f0be6', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #17'), + ('0000200000000000', 'e19e275d846a1298', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #18'), + ('0000100000000000', '329a8ed523d71aec', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #19'), + ('0000080000000000', 'e7fce22557d23c97', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #20'), + ('0000040000000000', '12a9f5817ff2d65d', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #21'), + ('0000020000000000', 'a484c3ad38dc9c19', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #22'), + ('0000010000000000', 'fbe00a8a1ef8ad72', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #23'), + ('0000008000000000', '750d079407521363', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #24'), + ('0000004000000000', '64feed9c724c2faf', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #25'), + ('0000002000000000', 'f02b263b328e2b60', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #26'), + ('0000001000000000', '9d64555a9a10b852', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #27'), + ('0000000800000000', 'd106ff0bed5255d7', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #28'), + ('0000000400000000', 'e1652c6b138c64a5', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #29'), + ('0000000200000000', 'e428581186ec8f46', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #30'), + ('0000000100000000', 'aeb5f5ede22d1a36', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #31'), + ('0000000080000000', 'e943d7568aec0c5c', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #32'), + ('0000000040000000', 'df98c8276f54b04b', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #33'), + ('0000000020000000', 'b160e4680f6c696f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #34'), + ('0000000010000000', 'fa0752b07d9c4ab8', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #35'), + ('0000000008000000', 'ca3a2b036dbc8502', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #36'), + ('0000000004000000', '5e0905517bb59bcf', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #37'), + ('0000000002000000', '814eeb3b91d90726', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #38'), + ('0000000001000000', '4d49db1532919c9f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #39'), + ('0000000000800000', '25eb5fc3f8cf0621', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #40'), + ('0000000000400000', 'ab6a20c0620d1c6f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #41'), + ('0000000000200000', '79e90dbc98f92cca', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #42'), + ('0000000000100000', '866ecedd8072bb0e', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #43'), + ('0000000000080000', '8b54536f2f3e64a8', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #44'), + ('0000000000040000', 'ea51d3975595b86b', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #45'), + ('0000000000020000', 'caffc6ac4542de31', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #46'), + ('0000000000010000', '8dd45a2ddf90796c', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #47'), + ('0000000000008000', '1029d55e880ec2d0', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #48'), + ('0000000000004000', '5d86cb23639dbea9', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #49'), + ('0000000000002000', '1d1ca853ae7c0c5f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #50'), + ('0000000000001000', 'ce332329248f3228', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #51'), + ('0000000000000800', '8405d1abe24fb942', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #52'), + ('0000000000000400', 'e643d78090ca4207', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #53'), + ('0000000000000200', '48221b9937748a23', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #54'), + ('0000000000000100', 'dd7c0bbd61fafd54', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #55'), + ('0000000000000080', '2fbc291a570db5c4', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #56'), + ('0000000000000040', 'e07c30d7e4e26e12', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #57'), + ('0000000000000020', '0953e2258e8e90a1', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #58'), + ('0000000000000010', '5b711bc4ceebf2ee', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #59'), + ('0000000000000008', 'cc083f1e6d9e85f6', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #60'), + ('0000000000000004', 'd2fd8867d50d2dfe', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #61'), + ('0000000000000002', '06e7ea22ce92708f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #62'), + ('0000000000000001', '166b40b44aba4bd6', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #63'), + + # Table B.2 - Variable Key Known Answer Test + (SP800_17_B2_PT, '95a8d72813daa94d', '8001010101010101', + 'NIST SP800-17 B.2 #0'), + (SP800_17_B2_PT, '0eec1487dd8c26d5', '4001010101010101', + 'NIST SP800-17 B.2 #1'), + (SP800_17_B2_PT, '7ad16ffb79c45926', '2001010101010101', + 'NIST SP800-17 B.2 #2'), + (SP800_17_B2_PT, 'd3746294ca6a6cf3', '1001010101010101', + 'NIST SP800-17 B.2 #3'), + (SP800_17_B2_PT, '809f5f873c1fd761', '0801010101010101', + 'NIST SP800-17 B.2 #4'), + (SP800_17_B2_PT, 'c02faffec989d1fc', '0401010101010101', + 'NIST SP800-17 B.2 #5'), + (SP800_17_B2_PT, '4615aa1d33e72f10', '0201010101010101', + 'NIST SP800-17 B.2 #6'), + (SP800_17_B2_PT, '2055123350c00858', '0180010101010101', + 'NIST SP800-17 B.2 #7'), + (SP800_17_B2_PT, 'df3b99d6577397c8', '0140010101010101', + 'NIST SP800-17 B.2 #8'), + (SP800_17_B2_PT, '31fe17369b5288c9', '0120010101010101', + 'NIST SP800-17 B.2 #9'), + (SP800_17_B2_PT, 'dfdd3cc64dae1642', '0110010101010101', + 'NIST SP800-17 B.2 #10'), + (SP800_17_B2_PT, '178c83ce2b399d94', '0108010101010101', + 'NIST SP800-17 B.2 #11'), + (SP800_17_B2_PT, '50f636324a9b7f80', '0104010101010101', + 'NIST SP800-17 B.2 #12'), + (SP800_17_B2_PT, 'a8468ee3bc18f06d', '0102010101010101', + 'NIST SP800-17 B.2 #13'), + (SP800_17_B2_PT, 'a2dc9e92fd3cde92', '0101800101010101', + 'NIST SP800-17 B.2 #14'), + (SP800_17_B2_PT, 'cac09f797d031287', '0101400101010101', + 'NIST SP800-17 B.2 #15'), + (SP800_17_B2_PT, '90ba680b22aeb525', '0101200101010101', + 'NIST SP800-17 B.2 #16'), + (SP800_17_B2_PT, 'ce7a24f350e280b6', '0101100101010101', + 'NIST SP800-17 B.2 #17'), + (SP800_17_B2_PT, '882bff0aa01a0b87', '0101080101010101', + 'NIST SP800-17 B.2 #18'), + (SP800_17_B2_PT, '25610288924511c2', '0101040101010101', + 'NIST SP800-17 B.2 #19'), + (SP800_17_B2_PT, 'c71516c29c75d170', '0101020101010101', + 'NIST SP800-17 B.2 #20'), + (SP800_17_B2_PT, '5199c29a52c9f059', '0101018001010101', + 'NIST SP800-17 B.2 #21'), + (SP800_17_B2_PT, 'c22f0a294a71f29f', '0101014001010101', + 'NIST SP800-17 B.2 #22'), + (SP800_17_B2_PT, 'ee371483714c02ea', '0101012001010101', + 'NIST SP800-17 B.2 #23'), + (SP800_17_B2_PT, 'a81fbd448f9e522f', '0101011001010101', + 'NIST SP800-17 B.2 #24'), + (SP800_17_B2_PT, '4f644c92e192dfed', '0101010801010101', + 'NIST SP800-17 B.2 #25'), + (SP800_17_B2_PT, '1afa9a66a6df92ae', '0101010401010101', + 'NIST SP800-17 B.2 #26'), + (SP800_17_B2_PT, 'b3c1cc715cb879d8', '0101010201010101', + 'NIST SP800-17 B.2 #27'), + (SP800_17_B2_PT, '19d032e64ab0bd8b', '0101010180010101', + 'NIST SP800-17 B.2 #28'), + (SP800_17_B2_PT, '3cfaa7a7dc8720dc', '0101010140010101', + 'NIST SP800-17 B.2 #29'), + (SP800_17_B2_PT, 'b7265f7f447ac6f3', '0101010120010101', + 'NIST SP800-17 B.2 #30'), + (SP800_17_B2_PT, '9db73b3c0d163f54', '0101010110010101', + 'NIST SP800-17 B.2 #31'), + (SP800_17_B2_PT, '8181b65babf4a975', '0101010108010101', + 'NIST SP800-17 B.2 #32'), + (SP800_17_B2_PT, '93c9b64042eaa240', '0101010104010101', + 'NIST SP800-17 B.2 #33'), + (SP800_17_B2_PT, '5570530829705592', '0101010102010101', + 'NIST SP800-17 B.2 #34'), + (SP800_17_B2_PT, '8638809e878787a0', '0101010101800101', + 'NIST SP800-17 B.2 #35'), + (SP800_17_B2_PT, '41b9a79af79ac208', '0101010101400101', + 'NIST SP800-17 B.2 #36'), + (SP800_17_B2_PT, '7a9be42f2009a892', '0101010101200101', + 'NIST SP800-17 B.2 #37'), + (SP800_17_B2_PT, '29038d56ba6d2745', '0101010101100101', + 'NIST SP800-17 B.2 #38'), + (SP800_17_B2_PT, '5495c6abf1e5df51', '0101010101080101', + 'NIST SP800-17 B.2 #39'), + (SP800_17_B2_PT, 'ae13dbd561488933', '0101010101040101', + 'NIST SP800-17 B.2 #40'), + (SP800_17_B2_PT, '024d1ffa8904e389', '0101010101020101', + 'NIST SP800-17 B.2 #41'), + (SP800_17_B2_PT, 'd1399712f99bf02e', '0101010101018001', + 'NIST SP800-17 B.2 #42'), + (SP800_17_B2_PT, '14c1d7c1cffec79e', '0101010101014001', + 'NIST SP800-17 B.2 #43'), + (SP800_17_B2_PT, '1de5279dae3bed6f', '0101010101012001', + 'NIST SP800-17 B.2 #44'), + (SP800_17_B2_PT, 'e941a33f85501303', '0101010101011001', + 'NIST SP800-17 B.2 #45'), + (SP800_17_B2_PT, 'da99dbbc9a03f379', '0101010101010801', + 'NIST SP800-17 B.2 #46'), + (SP800_17_B2_PT, 'b7fc92f91d8e92e9', '0101010101010401', + 'NIST SP800-17 B.2 #47'), + (SP800_17_B2_PT, 'ae8e5caa3ca04e85', '0101010101010201', + 'NIST SP800-17 B.2 #48'), + (SP800_17_B2_PT, '9cc62df43b6eed74', '0101010101010180', + 'NIST SP800-17 B.2 #49'), + (SP800_17_B2_PT, 'd863dbb5c59a91a0', '0101010101010140', + 'NIST SP800-17 B.2 #50'), + (SP800_17_B2_PT, 'a1ab2190545b91d7', '0101010101010120', + 'NIST SP800-17 B.2 #51'), + (SP800_17_B2_PT, '0875041e64c570f7', '0101010101010110', + 'NIST SP800-17 B.2 #52'), + (SP800_17_B2_PT, '5a594528bebef1cc', '0101010101010108', + 'NIST SP800-17 B.2 #53'), + (SP800_17_B2_PT, 'fcdb3291de21f0c0', '0101010101010104', + 'NIST SP800-17 B.2 #54'), + (SP800_17_B2_PT, '869efd7f9f265a09', '0101010101010102', + 'NIST SP800-17 B.2 #55'), +] + +class RonRivestTest(unittest.TestCase): + """ Ronald L. Rivest's DES test, see + http://people.csail.mit.edu/rivest/Destest.txt + ABSTRACT + -------- + + We present a simple way to test the correctness of a DES implementation: + Use the recurrence relation: + + X0 = 9474B8E8C73BCA7D (hexadecimal) + + X(i+1) = IF (i is even) THEN E(Xi,Xi) ELSE D(Xi,Xi) + + to compute a sequence of 64-bit values: X0, X1, X2, ..., X16. Here + E(X,K) denotes the DES encryption of X using key K, and D(X,K) denotes + the DES decryption of X using key K. If you obtain + + X16 = 1B1A2DDB4C642438 + + your implementation does not have any of the 36,568 possible single-fault + errors described herein. + """ + def runTest(self): + from binascii import b2a_hex + + X = [] + X[0:] = [b'\x94\x74\xB8\xE8\xC7\x3B\xCA\x7D'] + + for i in range(16): + c = DES.new(X[i],DES.MODE_ECB) + if not (i&1): # (num&1) returns 1 for odd numbers + X[i+1:] = [c.encrypt(X[i])] # even + else: + X[i+1:] = [c.decrypt(X[i])] # odd + + self.assertEqual(b2a_hex(X[16]), + b2a_hex(b'\x1B\x1A\x2D\xDB\x4C\x64\x24\x38')) + + +class TestOutput(unittest.TestCase): + + def runTest(self): + # Encrypt/Decrypt data and test output parameter + + cipher = DES.new(b'4'*8, DES.MODE_ECB) + + pt = b'5' * 8 + ct = cipher.encrypt(pt) + + output = bytearray(8) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + output = memoryview(bytearray(8)) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*8) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*8) + + shorter_output = bytearray(7) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +def get_tests(config={}): + from .common import make_block_tests + tests = make_block_tests(DES, "DES", test_data) + tests += [RonRivestTest()] + tests += [TestOutput()] + return tests + + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_DES3.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_DES3.py new file mode 100644 index 000000000..8d6a64851 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_DES3.py @@ -0,0 +1,195 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/DES3.py: Self-test for the Triple-DES cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.DES3""" + +import unittest +from binascii import hexlify, unhexlify + +from Crypto.Cipher import DES3 + +from Crypto.Util.strxor import strxor_c +from Crypto.Util.py3compat import bchr, tostr +from Crypto.SelfTest.loader import load_test_vectors +from Crypto.SelfTest.st_common import list_test_cases + +# This is a list of (plaintext, ciphertext, key, description) tuples. +test_data = [ + # Test vector from Appendix B of NIST SP 800-67 + # "Recommendation for the Triple Data Encryption Algorithm (TDEA) Block + # Cipher" + # http://csrc.nist.gov/publications/nistpubs/800-67/SP800-67.pdf + ('54686520717566636b2062726f776e20666f78206a756d70', + 'a826fd8ce53b855fcce21c8112256fe668d5c05dd9b6b900', + '0123456789abcdef23456789abcdef01456789abcdef0123', + 'NIST SP800-67 B.1'), + + # This test is designed to test the DES3 API, not the correctness of the + # output. + ('21e81b7ade88a259', '5c577d4d9b20c0f8', + '9b397ebf81b1181e282f4bb8adbadc6b', 'Two-key 3DES'), +] + +# NIST CAVP test vectors + +nist_tdes_mmt_files = ("TECBMMT2.rsp", "TECBMMT3.rsp") + +for tdes_file in nist_tdes_mmt_files: + + test_vectors = load_test_vectors( + ("Cipher", "TDES"), + tdes_file, + "TDES ECB (%s)" % tdes_file, + {"count": lambda x: int(x)}) or [] + + for index, tv in enumerate(test_vectors): + + # The test vector file contains some directive lines + if isinstance(tv, str): + continue + + key = tv.key1 + tv.key2 + tv.key3 + test_data_item = (tostr(hexlify(tv.plaintext)), + tostr(hexlify(tv.ciphertext)), + tostr(hexlify(key)), + "%s (%s)" % (tdes_file, index)) + test_data.append(test_data_item) + + +class CheckParity(unittest.TestCase): + + def test_parity_option2(self): + before_2k = unhexlify("CABF326FA56734324FFCCABCDEFACABF") + after_2k = DES3.adjust_key_parity(before_2k) + self.assertEqual(after_2k, + unhexlify("CBBF326EA46734324FFDCBBCDFFBCBBF")) + + def test_parity_option3(self): + before_3k = unhexlify("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCC") + after_3k = DES3.adjust_key_parity(before_3k) + self.assertEqual(after_3k, + unhexlify("ABABABABABABABABBABABABABABABABACDCDCDCDCDCDCDCD")) + + def test_degradation(self): + sub_key1 = bchr(1) * 8 + sub_key2 = bchr(255) * 8 + + # K1 == K2 + self.assertRaises(ValueError, DES3.adjust_key_parity, + sub_key1 * 2 + sub_key2) + + # K2 == K3 + self.assertRaises(ValueError, DES3.adjust_key_parity, + sub_key1 + sub_key2 * 2) + + # K1 == K2 == K3 + self.assertRaises(ValueError, DES3.adjust_key_parity, + sub_key1 * 3) + + # K1 == K2 (with different parity) + self.assertRaises(ValueError, DES3.adjust_key_parity, + sub_key1 + strxor_c(sub_key1, 1) + sub_key2) + + +class DegenerateToDESTest(unittest.TestCase): + + def runTest(self): + sub_key1 = bchr(1) * 8 + sub_key2 = bchr(255) * 8 + + # K1 == K2 + self.assertRaises(ValueError, DES3.new, + sub_key1 * 2 + sub_key2, + DES3.MODE_ECB) + + # K2 == K3 + self.assertRaises(ValueError, DES3.new, + sub_key1 + sub_key2 * 2, + DES3.MODE_ECB) + + # K1 == K2 == K3 + self.assertRaises(ValueError, DES3.new, + sub_key1 * 3, + DES3.MODE_ECB) + + # K2 == K3 (parity is ignored) + self.assertRaises(ValueError, DES3.new, + sub_key1 + sub_key2 + strxor_c(sub_key2, 0x1), + DES3.MODE_ECB) + + +class TestOutput(unittest.TestCase): + + def runTest(self): + # Encrypt/Decrypt data and test output parameter + + cipher = DES3.new(b'4'*8 + b'G'*8 + b'T'*8, DES3.MODE_ECB) + + pt = b'5' * 16 + ct = cipher.encrypt(pt) + + output = bytearray(16) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + output = memoryview(bytearray(16)) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*16) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*16) + + shorter_output = bytearray(7) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +def get_tests(config={}): + from .common import make_block_tests + + tests = [] + tests = make_block_tests(DES3, "DES3", test_data) + tests.append(DegenerateToDESTest()) + tests += list_test_cases(CheckParity) + tests += [TestOutput()] + return tests + + +if __name__ == '__main__': + import unittest + + def suite(): + unittest.TestSuite(get_tests()) + + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_EAX.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_EAX.py new file mode 100644 index 000000000..7dbee2bea --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_EAX.py @@ -0,0 +1,773 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors_wycheproof +from Crypto.Util.py3compat import tobytes, bchr +from Crypto.Cipher import AES, DES3 +from Crypto.Hash import SHAKE128 + +from Crypto.Util.strxor import strxor + + +def get_tag_random(tag, length): + return SHAKE128.new(data=tobytes(tag)).read(length) + + +class EaxTests(unittest.TestCase): + + key_128 = get_tag_random("key_128", 16) + key_192 = get_tag_random("key_192", 16) + nonce_96 = get_tag_random("nonce_128", 12) + data_128 = get_tag_random("data_128", 16) + + def test_loopback_128(self): + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + pt = get_tag_random("plaintext", 16 * 100) + ct = cipher.encrypt(pt) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + pt2 = cipher.decrypt(ct) + self.assertEqual(pt, pt2) + + def test_loopback_64(self): + cipher = DES3.new(self.key_192, DES3.MODE_EAX, nonce=self.nonce_96) + pt = get_tag_random("plaintext", 8 * 100) + ct = cipher.encrypt(pt) + + cipher = DES3.new(self.key_192, DES3.MODE_EAX, nonce=self.nonce_96) + pt2 = cipher.decrypt(ct) + self.assertEqual(pt, pt2) + + def test_nonce(self): + # If not passed, the nonce is created randomly + cipher = AES.new(self.key_128, AES.MODE_EAX) + nonce1 = cipher.nonce + cipher = AES.new(self.key_128, AES.MODE_EAX) + nonce2 = cipher.nonce + self.assertEqual(len(nonce1), 16) + self.assertNotEqual(nonce1, nonce2) + + cipher = AES.new(self.key_128, AES.MODE_EAX, self.nonce_96) + ct = cipher.encrypt(self.data_128) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + self.assertEqual(ct, cipher.encrypt(self.data_128)) + + def test_nonce_must_be_bytes(self): + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_EAX, + nonce=u'test12345678') + + def test_nonce_length(self): + # nonce can be of any length (but not empty) + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_EAX, + nonce=b"") + + for x in range(1, 128): + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=bchr(1) * x) + cipher.encrypt(bchr(1)) + + def test_block_size_128(self): + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + self.assertEqual(cipher.block_size, AES.block_size) + + def test_block_size_64(self): + cipher = DES3.new(self.key_192, AES.MODE_EAX, nonce=self.nonce_96) + self.assertEqual(cipher.block_size, DES3.block_size) + + def test_nonce_attribute(self): + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + self.assertEqual(cipher.nonce, self.nonce_96) + + # By default, a 16 bytes long nonce is randomly generated + nonce1 = AES.new(self.key_128, AES.MODE_EAX).nonce + nonce2 = AES.new(self.key_128, AES.MODE_EAX).nonce + self.assertEqual(len(nonce1), 16) + self.assertNotEqual(nonce1, nonce2) + + def test_unknown_parameters(self): + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_EAX, + self.nonce_96, 7) + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_EAX, + nonce=self.nonce_96, unknown=7) + + # But some are only known by the base cipher + # (e.g. use_aesni consumed by the AES module) + AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96, + use_aesni=False) + + def test_null_encryption_decryption(self): + for func in "encrypt", "decrypt": + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + result = getattr(cipher, func)(b"") + self.assertEqual(result, b"") + + def test_either_encrypt_or_decrypt(self): + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.encrypt(b"") + self.assertRaises(TypeError, cipher.decrypt, b"") + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.decrypt(b"") + self.assertRaises(TypeError, cipher.encrypt, b"") + + def test_data_must_be_bytes(self): + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.encrypt, u'test1234567890-*') + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.decrypt, u'test1234567890-*') + + def test_mac_len(self): + # Invalid MAC length + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_EAX, + nonce=self.nonce_96, mac_len=2-1) + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_EAX, + nonce=self.nonce_96, mac_len=16+1) + + # Valid MAC length + for mac_len in range(2, 16 + 1): + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96, + mac_len=mac_len) + _, mac = cipher.encrypt_and_digest(self.data_128) + self.assertEqual(len(mac), mac_len) + + # Default MAC length + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + _, mac = cipher.encrypt_and_digest(self.data_128) + self.assertEqual(len(mac), 16) + + def test_invalid_mac(self): + from Crypto.Util.strxor import strxor_c + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + ct, mac = cipher.encrypt_and_digest(self.data_128) + + invalid_mac = strxor_c(mac, 0x01) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.decrypt_and_verify, ct, + invalid_mac) + + def test_hex_mac(self): + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + mac_hex = cipher.hexdigest() + self.assertEqual(cipher.digest(), unhexlify(mac_hex)) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.hexverify(mac_hex) + + def test_message_chunks(self): + # Validate that both associated data and plaintext/ciphertext + # can be broken up in chunks of arbitrary length + + auth_data = get_tag_random("authenticated data", 127) + plaintext = get_tag_random("plaintext", 127) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.update(auth_data) + ciphertext, ref_mac = cipher.encrypt_and_digest(plaintext) + + def break_up(data, chunk_length): + return [data[i:i+chunk_length] for i in range(0, len(data), + chunk_length)] + + # Encryption + for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + + for chunk in break_up(auth_data, chunk_length): + cipher.update(chunk) + pt2 = b"" + for chunk in break_up(ciphertext, chunk_length): + pt2 += cipher.decrypt(chunk) + self.assertEqual(plaintext, pt2) + cipher.verify(ref_mac) + + # Decryption + for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + + for chunk in break_up(auth_data, chunk_length): + cipher.update(chunk) + ct2 = b"" + for chunk in break_up(plaintext, chunk_length): + ct2 += cipher.encrypt(chunk) + self.assertEqual(ciphertext, ct2) + self.assertEqual(cipher.digest(), ref_mac) + + def test_bytearray(self): + + # Encrypt + key_ba = bytearray(self.key_128) + nonce_ba = bytearray(self.nonce_96) + header_ba = bytearray(self.data_128) + data_ba = bytearray(self.data_128) + + cipher1 = AES.new(self.key_128, + AES.MODE_EAX, + nonce=self.nonce_96) + cipher1.update(self.data_128) + ct = cipher1.encrypt(self.data_128) + tag = cipher1.digest() + + cipher2 = AES.new(key_ba, + AES.MODE_EAX, + nonce=nonce_ba) + key_ba[:3] = b'\xFF\xFF\xFF' + nonce_ba[:3] = b'\xFF\xFF\xFF' + cipher2.update(header_ba) + header_ba[:3] = b'\xFF\xFF\xFF' + ct_test = cipher2.encrypt(data_ba) + data_ba[:3] = b'\x99\x99\x99' + tag_test = cipher2.digest() + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + key_ba = bytearray(self.key_128) + nonce_ba = bytearray(self.nonce_96) + header_ba = bytearray(self.data_128) + ct_ba = bytearray(ct) + tag_ba = bytearray(tag) + del data_ba + + cipher3 = AES.new(key_ba, + AES.MODE_EAX, + nonce=nonce_ba) + key_ba[:3] = b'\xFF\xFF\xFF' + nonce_ba[:3] = b'\xFF\xFF\xFF' + cipher3.update(header_ba) + header_ba[:3] = b'\xFF\xFF\xFF' + pt_test = cipher3.decrypt(ct_ba) + ct_ba[:3] = b'\xFF\xFF\xFF' + cipher3.verify(tag_ba) + + self.assertEqual(pt_test, self.data_128) + + def test_memoryview(self): + + # Encrypt + key_mv = memoryview(bytearray(self.key_128)) + nonce_mv = memoryview(bytearray(self.nonce_96)) + header_mv = memoryview(bytearray(self.data_128)) + data_mv = memoryview(bytearray(self.data_128)) + + cipher1 = AES.new(self.key_128, + AES.MODE_EAX, + nonce=self.nonce_96) + cipher1.update(self.data_128) + ct = cipher1.encrypt(self.data_128) + tag = cipher1.digest() + + cipher2 = AES.new(key_mv, + AES.MODE_EAX, + nonce=nonce_mv) + key_mv[:3] = b'\xFF\xFF\xFF' + nonce_mv[:3] = b'\xFF\xFF\xFF' + cipher2.update(header_mv) + header_mv[:3] = b'\xFF\xFF\xFF' + ct_test = cipher2.encrypt(data_mv) + data_mv[:3] = b'\x99\x99\x99' + tag_test = cipher2.digest() + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + key_mv = memoryview(bytearray(self.key_128)) + nonce_mv = memoryview(bytearray(self.nonce_96)) + header_mv = memoryview(bytearray(self.data_128)) + ct_mv = memoryview(bytearray(ct)) + tag_mv = memoryview(bytearray(tag)) + del data_mv + + cipher3 = AES.new(key_mv, + AES.MODE_EAX, + nonce=nonce_mv) + key_mv[:3] = b'\xFF\xFF\xFF' + nonce_mv[:3] = b'\xFF\xFF\xFF' + cipher3.update(header_mv) + header_mv[:3] = b'\xFF\xFF\xFF' + pt_test = cipher3.decrypt(ct_mv) + ct_mv[:3] = b'\x99\x99\x99' + cipher3.verify(tag_mv) + + self.assertEqual(pt_test, self.data_128) + + def test_output_param(self): + + pt = b'5' * 128 + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + ct = cipher.encrypt(pt) + tag = cipher.digest() + + output = bytearray(128) + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + res, tag_out = cipher.encrypt_and_digest(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + self.assertEqual(tag, tag_out) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + res = cipher.decrypt_and_verify(ct, tag, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + def test_output_param_memoryview(self): + + pt = b'5' * 128 + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + ct = cipher.encrypt(pt) + + output = memoryview(bytearray(128)) + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + def test_output_param_neg(self): + LEN_PT = 16 + + pt = b'5' * LEN_PT + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + ct = cipher.encrypt(pt) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0' * LEN_PT) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0' * LEN_PT) + + shorter_output = bytearray(LEN_PT - 1) + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +class EaxFSMTests(unittest.TestCase): + + key_128 = get_tag_random("key_128", 16) + nonce_96 = get_tag_random("nonce_128", 12) + data_128 = get_tag_random("data_128", 16) + + def test_valid_init_encrypt_decrypt_digest_verify(self): + # No authenticated data, fixed plaintext + # Verify path INIT->ENCRYPT->DIGEST + cipher = AES.new(self.key_128, AES.MODE_EAX, + nonce=self.nonce_96) + ct = cipher.encrypt(self.data_128) + mac = cipher.digest() + + # Verify path INIT->DECRYPT->VERIFY + cipher = AES.new(self.key_128, AES.MODE_EAX, + nonce=self.nonce_96) + cipher.decrypt(ct) + cipher.verify(mac) + + def test_valid_init_update_digest_verify(self): + # No plaintext, fixed authenticated data + # Verify path INIT->UPDATE->DIGEST + cipher = AES.new(self.key_128, AES.MODE_EAX, + nonce=self.nonce_96) + cipher.update(self.data_128) + mac = cipher.digest() + + # Verify path INIT->UPDATE->VERIFY + cipher = AES.new(self.key_128, AES.MODE_EAX, + nonce=self.nonce_96) + cipher.update(self.data_128) + cipher.verify(mac) + + def test_valid_full_path(self): + # Fixed authenticated data, fixed plaintext + # Verify path INIT->UPDATE->ENCRYPT->DIGEST + cipher = AES.new(self.key_128, AES.MODE_EAX, + nonce=self.nonce_96) + cipher.update(self.data_128) + ct = cipher.encrypt(self.data_128) + mac = cipher.digest() + + # Verify path INIT->UPDATE->DECRYPT->VERIFY + cipher = AES.new(self.key_128, AES.MODE_EAX, + nonce=self.nonce_96) + cipher.update(self.data_128) + cipher.decrypt(ct) + cipher.verify(mac) + + def test_valid_init_digest(self): + # Verify path INIT->DIGEST + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.digest() + + def test_valid_init_verify(self): + # Verify path INIT->VERIFY + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + mac = cipher.digest() + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.verify(mac) + + def test_valid_multiple_encrypt_or_decrypt(self): + for method_name in "encrypt", "decrypt": + for auth_data in (None, b"333", self.data_128, + self.data_128 + b"3"): + if auth_data is None: + assoc_len = None + else: + assoc_len = len(auth_data) + cipher = AES.new(self.key_128, AES.MODE_EAX, + nonce=self.nonce_96) + if auth_data is not None: + cipher.update(auth_data) + method = getattr(cipher, method_name) + method(self.data_128) + method(self.data_128) + method(self.data_128) + method(self.data_128) + + def test_valid_multiple_digest_or_verify(self): + # Multiple calls to digest + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.update(self.data_128) + first_mac = cipher.digest() + for x in range(4): + self.assertEqual(first_mac, cipher.digest()) + + # Multiple calls to verify + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.update(self.data_128) + for x in range(5): + cipher.verify(first_mac) + + def test_valid_encrypt_and_digest_decrypt_and_verify(self): + # encrypt_and_digest + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.update(self.data_128) + ct, mac = cipher.encrypt_and_digest(self.data_128) + + # decrypt_and_verify + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.update(self.data_128) + pt = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(self.data_128, pt) + + def test_invalid_mixing_encrypt_decrypt(self): + # Once per method, with or without assoc. data + for method1_name, method2_name in (("encrypt", "decrypt"), + ("decrypt", "encrypt")): + for assoc_data_present in (True, False): + cipher = AES.new(self.key_128, AES.MODE_EAX, + nonce=self.nonce_96) + if assoc_data_present: + cipher.update(self.data_128) + getattr(cipher, method1_name)(self.data_128) + self.assertRaises(TypeError, getattr(cipher, method2_name), + self.data_128) + + def test_invalid_encrypt_or_update_after_digest(self): + for method_name in "encrypt", "update": + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.encrypt(self.data_128) + cipher.digest() + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data_128) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.encrypt_and_digest(self.data_128) + + def test_invalid_decrypt_or_update_after_verify(self): + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + ct = cipher.encrypt(self.data_128) + mac = cipher.digest() + + for method_name in "decrypt", "update": + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.decrypt(ct) + cipher.verify(mac) + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data_128) + + cipher = AES.new(self.key_128, AES.MODE_EAX, nonce=self.nonce_96) + cipher.decrypt_and_verify(ct, mac) + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data_128) + + +class TestVectorsPaper(unittest.TestCase): + """Class exercising the EAX test vectors found in + http://www.cs.ucdavis.edu/~rogaway/papers/eax.pdf""" + + test_vectors_hex = [ + ( '6bfb914fd07eae6b', + '', + '', + 'e037830e8389f27b025a2d6527e79d01', + '233952dee4d5ed5f9b9c6d6ff80ff478', + '62EC67F9C3A4A407FCB2A8C49031A8B3' + ), + ( + 'fa3bfd4806eb53fa', + 'f7fb', + '19dd', + '5c4c9331049d0bdab0277408f67967e5', + '91945d3f4dcbee0bf45ef52255f095a4', + 'BECAF043B0A23D843194BA972C66DEBD' + ), + ( '234a3463c1264ac6', + '1a47cb4933', + 'd851d5bae0', + '3a59f238a23e39199dc9266626c40f80', + '01f74ad64077f2e704c0f60ada3dd523', + '70C3DB4F0D26368400A10ED05D2BFF5E' + ), + ( + '33cce2eabff5a79d', + '481c9e39b1', + '632a9d131a', + 'd4c168a4225d8e1ff755939974a7bede', + 'd07cf6cbb7f313bdde66b727afd3c5e8', + '8408DFFF3C1A2B1292DC199E46B7D617' + ), + ( + 'aeb96eaebe2970e9', + '40d0c07da5e4', + '071dfe16c675', + 'cb0677e536f73afe6a14b74ee49844dd', + '35b6d0580005bbc12b0587124557d2c2', + 'FDB6B06676EEDC5C61D74276E1F8E816' + ), + ( + 'd4482d1ca78dce0f', + '4de3b35c3fc039245bd1fb7d', + '835bb4f15d743e350e728414', + 'abb8644fd6ccb86947c5e10590210a4f', + 'bd8e6e11475e60b268784c38c62feb22', + '6EAC5C93072D8E8513F750935E46DA1B' + ), + ( + '65d2017990d62528', + '8b0a79306c9ce7ed99dae4f87f8dd61636', + '02083e3979da014812f59f11d52630da30', + '137327d10649b0aa6e1c181db617d7f2', + '7c77d6e813bed5ac98baa417477a2e7d', + '1A8C98DCD73D38393B2BF1569DEEFC19' + ), + ( + '54b9f04e6a09189a', + '1bda122bce8a8dbaf1877d962b8592dd2d56', + '2ec47b2c4954a489afc7ba4897edcdae8cc3', + '3b60450599bd02c96382902aef7f832a', + '5fff20cafab119ca2fc73549e20f5b0d', + 'DDE59B97D722156D4D9AFF2BC7559826' + ), + ( + '899a175897561d7e', + '6cf36720872b8513f6eab1a8a44438d5ef11', + '0de18fd0fdd91e7af19f1d8ee8733938b1e8', + 'e7f6d2231618102fdb7fe55ff1991700', + 'a4a4782bcffd3ec5e7ef6d8c34a56123', + 'B781FCF2F75FA5A8DE97A9CA48E522EC' + ), + ( + '126735fcc320d25a', + 'ca40d7446e545ffaed3bd12a740a659ffbbb3ceab7', + 'cb8920f87a6c75cff39627b56e3ed197c552d295a7', + 'cfc46afc253b4652b1af3795b124ab6e', + '8395fcf1e95bebd697bd010bc766aac3', + '22E7ADD93CFC6393C57EC0B3C17D6B44' + ), + ] + + test_vectors = [[unhexlify(x) for x in tv] for tv in test_vectors_hex] + + def runTest(self): + for assoc_data, pt, ct, mac, key, nonce in self.test_vectors: + # Encrypt + cipher = AES.new(key, AES.MODE_EAX, nonce, mac_len=len(mac)) + cipher.update(assoc_data) + ct2, mac2 = cipher.encrypt_and_digest(pt) + self.assertEqual(ct, ct2) + self.assertEqual(mac, mac2) + + # Decrypt + cipher = AES.new(key, AES.MODE_EAX, nonce, mac_len=len(mac)) + cipher.update(assoc_data) + pt2 = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(pt, pt2) + + +class TestVectorsWycheproof(unittest.TestCase): + + def __init__(self, wycheproof_warnings): + unittest.TestCase.__init__(self) + self._wycheproof_warnings = wycheproof_warnings + self._id = "None" + + def setUp(self): + + def filter_tag(group): + return group['tagSize'] // 8 + + self.tv = load_test_vectors_wycheproof(("Cipher", "wycheproof"), + "aes_eax_test.json", + "Wycheproof EAX", + group_tag={'tag_size': filter_tag}) + + def shortDescription(self): + return self._id + + def warn(self, tv): + if tv.warning and self._wycheproof_warnings: + import warnings + warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) + + def test_encrypt(self, tv): + self._id = "Wycheproof Encrypt EAX Test #" + str(tv.id) + + try: + cipher = AES.new(tv.key, AES.MODE_EAX, tv.iv, mac_len=tv.tag_size) + except ValueError as e: + assert len(tv.iv) == 0 and "Nonce cannot be empty" in str(e) + return + + cipher.update(tv.aad) + ct, tag = cipher.encrypt_and_digest(tv.msg) + if tv.valid: + self.assertEqual(ct, tv.ct) + self.assertEqual(tag, tv.tag) + self.warn(tv) + + def test_decrypt(self, tv): + self._id = "Wycheproof Decrypt EAX Test #" + str(tv.id) + + try: + cipher = AES.new(tv.key, AES.MODE_EAX, tv.iv, mac_len=tv.tag_size) + except ValueError as e: + assert len(tv.iv) == 0 and "Nonce cannot be empty" in str(e) + return + + cipher.update(tv.aad) + try: + pt = cipher.decrypt_and_verify(tv.ct, tv.tag) + except ValueError: + assert not tv.valid + else: + assert tv.valid + self.assertEqual(pt, tv.msg) + self.warn(tv) + + def test_corrupt_decrypt(self, tv): + self._id = "Wycheproof Corrupt Decrypt EAX Test #" + str(tv.id) + if len(tv.iv) == 0 or len(tv.ct) < 1: + return + cipher = AES.new(tv.key, AES.MODE_EAX, tv.iv, mac_len=tv.tag_size) + cipher.update(tv.aad) + ct_corrupt = strxor(tv.ct, b"\x00" * (len(tv.ct) - 1) + b"\x01") + self.assertRaises(ValueError, cipher.decrypt_and_verify, ct_corrupt, tv.tag) + + def runTest(self): + + for tv in self.tv: + self.test_encrypt(tv) + self.test_decrypt(tv) + self.test_corrupt_decrypt(tv) + + +class TestOtherCiphers(unittest.TestCase): + + @classmethod + def create_test(cls, name, factory, key_size): + + def test_template(self, factory=factory, key_size=key_size): + cipher = factory.new(get_tag_random("cipher", key_size), + factory.MODE_EAX, + nonce=b"nonce") + ct, mac = cipher.encrypt_and_digest(b"plaintext") + + cipher = factory.new(get_tag_random("cipher", key_size), + factory.MODE_EAX, + nonce=b"nonce") + pt2 = cipher.decrypt_and_verify(ct, mac) + + self.assertEqual(b"plaintext", pt2) + + setattr(cls, "test_" + name, test_template) + + +from Crypto.Cipher import DES, DES3, ARC2, CAST, Blowfish + +TestOtherCiphers.create_test("DES_" + str(DES.key_size), DES, DES.key_size) +for ks in DES3.key_size: + TestOtherCiphers.create_test("DES3_" + str(ks), DES3, ks) +for ks in ARC2.key_size: + TestOtherCiphers.create_test("ARC2_" + str(ks), ARC2, ks) +for ks in CAST.key_size: + TestOtherCiphers.create_test("CAST_" + str(ks), CAST, ks) +for ks in Blowfish.key_size: + TestOtherCiphers.create_test("Blowfish_" + str(ks), Blowfish, ks) + + +def get_tests(config={}): + wycheproof_warnings = config.get('wycheproof_warnings') + + tests = [] + tests += list_test_cases(EaxTests) + tests += list_test_cases(EaxFSMTests) + tests += [ TestVectorsPaper() ] + tests += [ TestVectorsWycheproof(wycheproof_warnings) ] + tests += list_test_cases(TestOtherCiphers) + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_GCM.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_GCM.py new file mode 100644 index 000000000..dd8da2fd8 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_GCM.py @@ -0,0 +1,951 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from __future__ import print_function + +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors, load_test_vectors_wycheproof + +from Crypto.Util.py3compat import tobytes, bchr +from Crypto.Cipher import AES +from Crypto.Hash import SHAKE128, SHA256 + +from Crypto.Util.strxor import strxor + + +def get_tag_random(tag, length): + return SHAKE128.new(data=tobytes(tag)).read(length) + + +class GcmTests(unittest.TestCase): + + key_128 = get_tag_random("key_128", 16) + nonce_96 = get_tag_random("nonce_128", 12) + data = get_tag_random("data", 128) + + def test_loopback_128(self): + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + pt = get_tag_random("plaintext", 16 * 100) + ct = cipher.encrypt(pt) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + pt2 = cipher.decrypt(ct) + self.assertEqual(pt, pt2) + + def test_nonce(self): + # Nonce is optional (a random one will be created) + AES.new(self.key_128, AES.MODE_GCM) + + cipher = AES.new(self.key_128, AES.MODE_GCM, self.nonce_96) + ct = cipher.encrypt(self.data) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + self.assertEqual(ct, cipher.encrypt(self.data)) + + def test_nonce_must_be_bytes(self): + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_GCM, + nonce=u'test12345678') + + def test_nonce_length(self): + # nonce can be of any length (but not empty) + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_GCM, + nonce=b"") + + for x in range(1, 128): + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=bchr(1) * x) + cipher.encrypt(bchr(1)) + + def test_block_size_128(self): + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + self.assertEqual(cipher.block_size, AES.block_size) + + def test_nonce_attribute(self): + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + self.assertEqual(cipher.nonce, self.nonce_96) + + # By default, a 15 bytes long nonce is randomly generated + nonce1 = AES.new(self.key_128, AES.MODE_GCM).nonce + nonce2 = AES.new(self.key_128, AES.MODE_GCM).nonce + self.assertEqual(len(nonce1), 16) + self.assertNotEqual(nonce1, nonce2) + + def test_unknown_parameters(self): + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_GCM, + self.nonce_96, 7) + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_GCM, + nonce=self.nonce_96, unknown=7) + + # But some are only known by the base cipher + # (e.g. use_aesni consumed by the AES module) + AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96, + use_aesni=False) + + def test_null_encryption_decryption(self): + for func in "encrypt", "decrypt": + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + result = getattr(cipher, func)(b"") + self.assertEqual(result, b"") + + def test_either_encrypt_or_decrypt(self): + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.encrypt(b"") + self.assertRaises(TypeError, cipher.decrypt, b"") + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.decrypt(b"") + self.assertRaises(TypeError, cipher.encrypt, b"") + + def test_data_must_be_bytes(self): + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.encrypt, u'test1234567890-*') + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.decrypt, u'test1234567890-*') + + def test_mac_len(self): + # Invalid MAC length + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_GCM, + nonce=self.nonce_96, mac_len=3) + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_GCM, + nonce=self.nonce_96, mac_len=16+1) + + # Valid MAC length + for mac_len in range(5, 16 + 1): + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96, + mac_len=mac_len) + _, mac = cipher.encrypt_and_digest(self.data) + self.assertEqual(len(mac), mac_len) + + # Default MAC length + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + _, mac = cipher.encrypt_and_digest(self.data) + self.assertEqual(len(mac), 16) + + def test_invalid_mac(self): + from Crypto.Util.strxor import strxor_c + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + ct, mac = cipher.encrypt_and_digest(self.data) + + invalid_mac = strxor_c(mac, 0x01) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.decrypt_and_verify, ct, + invalid_mac) + + def test_hex_mac(self): + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + mac_hex = cipher.hexdigest() + self.assertEqual(cipher.digest(), unhexlify(mac_hex)) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.hexverify(mac_hex) + + def test_message_chunks(self): + # Validate that both associated data and plaintext/ciphertext + # can be broken up in chunks of arbitrary length + + auth_data = get_tag_random("authenticated data", 127) + plaintext = get_tag_random("plaintext", 127) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.update(auth_data) + ciphertext, ref_mac = cipher.encrypt_and_digest(plaintext) + + def break_up(data, chunk_length): + return [data[i:i+chunk_length] for i in range(0, len(data), + chunk_length)] + + # Encryption + for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + + for chunk in break_up(auth_data, chunk_length): + cipher.update(chunk) + pt2 = b"" + for chunk in break_up(ciphertext, chunk_length): + pt2 += cipher.decrypt(chunk) + self.assertEqual(plaintext, pt2) + cipher.verify(ref_mac) + + # Decryption + for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + + for chunk in break_up(auth_data, chunk_length): + cipher.update(chunk) + ct2 = b"" + for chunk in break_up(plaintext, chunk_length): + ct2 += cipher.encrypt(chunk) + self.assertEqual(ciphertext, ct2) + self.assertEqual(cipher.digest(), ref_mac) + + def test_bytearray(self): + + # Encrypt + key_ba = bytearray(self.key_128) + nonce_ba = bytearray(self.nonce_96) + header_ba = bytearray(self.data) + data_ba = bytearray(self.data) + + cipher1 = AES.new(self.key_128, + AES.MODE_GCM, + nonce=self.nonce_96) + cipher1.update(self.data) + ct = cipher1.encrypt(self.data) + tag = cipher1.digest() + + cipher2 = AES.new(key_ba, + AES.MODE_GCM, + nonce=nonce_ba) + key_ba[:3] = b"\xFF\xFF\xFF" + nonce_ba[:3] = b"\xFF\xFF\xFF" + cipher2.update(header_ba) + header_ba[:3] = b"\xFF\xFF\xFF" + ct_test = cipher2.encrypt(data_ba) + data_ba[:3] = b"\xFF\xFF\xFF" + tag_test = cipher2.digest() + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + key_ba = bytearray(self.key_128) + nonce_ba = bytearray(self.nonce_96) + header_ba = bytearray(self.data) + del data_ba + + cipher4 = AES.new(key_ba, + AES.MODE_GCM, + nonce=nonce_ba) + key_ba[:3] = b"\xFF\xFF\xFF" + nonce_ba[:3] = b"\xFF\xFF\xFF" + cipher4.update(header_ba) + header_ba[:3] = b"\xFF\xFF\xFF" + pt_test = cipher4.decrypt_and_verify(bytearray(ct_test), bytearray(tag_test)) + + self.assertEqual(self.data, pt_test) + + def test_memoryview(self): + + # Encrypt + key_mv = memoryview(bytearray(self.key_128)) + nonce_mv = memoryview(bytearray(self.nonce_96)) + header_mv = memoryview(bytearray(self.data)) + data_mv = memoryview(bytearray(self.data)) + + cipher1 = AES.new(self.key_128, + AES.MODE_GCM, + nonce=self.nonce_96) + cipher1.update(self.data) + ct = cipher1.encrypt(self.data) + tag = cipher1.digest() + + cipher2 = AES.new(key_mv, + AES.MODE_GCM, + nonce=nonce_mv) + key_mv[:3] = b"\xFF\xFF\xFF" + nonce_mv[:3] = b"\xFF\xFF\xFF" + cipher2.update(header_mv) + header_mv[:3] = b"\xFF\xFF\xFF" + ct_test = cipher2.encrypt(data_mv) + data_mv[:3] = b"\xFF\xFF\xFF" + tag_test = cipher2.digest() + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + key_mv = memoryview(bytearray(self.key_128)) + nonce_mv = memoryview(bytearray(self.nonce_96)) + header_mv = memoryview(bytearray(self.data)) + del data_mv + + cipher4 = AES.new(key_mv, + AES.MODE_GCM, + nonce=nonce_mv) + key_mv[:3] = b"\xFF\xFF\xFF" + nonce_mv[:3] = b"\xFF\xFF\xFF" + cipher4.update(header_mv) + header_mv[:3] = b"\xFF\xFF\xFF" + pt_test = cipher4.decrypt_and_verify(memoryview(ct_test), memoryview(tag_test)) + + self.assertEqual(self.data, pt_test) + + def test_output_param(self): + + pt = b'5' * 128 + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + ct = cipher.encrypt(pt) + tag = cipher.digest() + + output = bytearray(128) + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + res, tag_out = cipher.encrypt_and_digest(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + self.assertEqual(tag, tag_out) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + res = cipher.decrypt_and_verify(ct, tag, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + def test_output_param_memoryview(self): + + pt = b'5' * 128 + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + ct = cipher.encrypt(pt) + + output = memoryview(bytearray(128)) + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + def test_output_param_neg(self): + LEN_PT = 128 + + pt = b'5' * LEN_PT + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + ct = cipher.encrypt(pt) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0' * LEN_PT) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0' * LEN_PT) + + shorter_output = bytearray(LEN_PT - 1) + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +class GcmFSMTests(unittest.TestCase): + + key_128 = get_tag_random("key_128", 16) + nonce_96 = get_tag_random("nonce_128", 12) + data = get_tag_random("data", 128) + + def test_valid_init_encrypt_decrypt_digest_verify(self): + # No authenticated data, fixed plaintext + # Verify path INIT->ENCRYPT->DIGEST + cipher = AES.new(self.key_128, AES.MODE_GCM, + nonce=self.nonce_96) + ct = cipher.encrypt(self.data) + mac = cipher.digest() + + # Verify path INIT->DECRYPT->VERIFY + cipher = AES.new(self.key_128, AES.MODE_GCM, + nonce=self.nonce_96) + cipher.decrypt(ct) + cipher.verify(mac) + + def test_valid_init_update_digest_verify(self): + # No plaintext, fixed authenticated data + # Verify path INIT->UPDATE->DIGEST + cipher = AES.new(self.key_128, AES.MODE_GCM, + nonce=self.nonce_96) + cipher.update(self.data) + mac = cipher.digest() + + # Verify path INIT->UPDATE->VERIFY + cipher = AES.new(self.key_128, AES.MODE_GCM, + nonce=self.nonce_96) + cipher.update(self.data) + cipher.verify(mac) + + def test_valid_full_path(self): + # Fixed authenticated data, fixed plaintext + # Verify path INIT->UPDATE->ENCRYPT->DIGEST + cipher = AES.new(self.key_128, AES.MODE_GCM, + nonce=self.nonce_96) + cipher.update(self.data) + ct = cipher.encrypt(self.data) + mac = cipher.digest() + + # Verify path INIT->UPDATE->DECRYPT->VERIFY + cipher = AES.new(self.key_128, AES.MODE_GCM, + nonce=self.nonce_96) + cipher.update(self.data) + cipher.decrypt(ct) + cipher.verify(mac) + + def test_valid_init_digest(self): + # Verify path INIT->DIGEST + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.digest() + + def test_valid_init_verify(self): + # Verify path INIT->VERIFY + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + mac = cipher.digest() + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.verify(mac) + + def test_valid_multiple_encrypt_or_decrypt(self): + for method_name in "encrypt", "decrypt": + for auth_data in (None, b"333", self.data, + self.data + b"3"): + if auth_data is None: + assoc_len = None + else: + assoc_len = len(auth_data) + cipher = AES.new(self.key_128, AES.MODE_GCM, + nonce=self.nonce_96) + if auth_data is not None: + cipher.update(auth_data) + method = getattr(cipher, method_name) + method(self.data) + method(self.data) + method(self.data) + method(self.data) + + def test_valid_multiple_digest_or_verify(self): + # Multiple calls to digest + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.update(self.data) + first_mac = cipher.digest() + for x in range(4): + self.assertEqual(first_mac, cipher.digest()) + + # Multiple calls to verify + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.update(self.data) + for x in range(5): + cipher.verify(first_mac) + + def test_valid_encrypt_and_digest_decrypt_and_verify(self): + # encrypt_and_digest + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.update(self.data) + ct, mac = cipher.encrypt_and_digest(self.data) + + # decrypt_and_verify + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.update(self.data) + pt = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(self.data, pt) + + def test_invalid_mixing_encrypt_decrypt(self): + # Once per method, with or without assoc. data + for method1_name, method2_name in (("encrypt", "decrypt"), + ("decrypt", "encrypt")): + for assoc_data_present in (True, False): + cipher = AES.new(self.key_128, AES.MODE_GCM, + nonce=self.nonce_96) + if assoc_data_present: + cipher.update(self.data) + getattr(cipher, method1_name)(self.data) + self.assertRaises(TypeError, getattr(cipher, method2_name), + self.data) + + def test_invalid_encrypt_or_update_after_digest(self): + for method_name in "encrypt", "update": + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.encrypt(self.data) + cipher.digest() + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.encrypt_and_digest(self.data) + + def test_invalid_decrypt_or_update_after_verify(self): + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + ct = cipher.encrypt(self.data) + mac = cipher.digest() + + for method_name in "decrypt", "update": + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.decrypt(ct) + cipher.verify(mac) + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data) + + cipher = AES.new(self.key_128, AES.MODE_GCM, nonce=self.nonce_96) + cipher.decrypt_and_verify(ct, mac) + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data) + + +class TestVectors(unittest.TestCase): + """Class exercising the GCM test vectors found in + http://csrc.nist.gov/groups/ST/toolkit/BCM/documents/proposedmodes/gcm/gcm-revised-spec.pdf""" + + # List of test vectors, each made up of: + # - authenticated data + # - plaintext + # - ciphertext + # - MAC + # - AES key + # - nonce + test_vectors_hex = [ + ( + '', + '', + '', + '58e2fccefa7e3061367f1d57a4e7455a', + '00000000000000000000000000000000', + '000000000000000000000000' + ), + ( + '', + '00000000000000000000000000000000', + '0388dace60b6a392f328c2b971b2fe78', + 'ab6e47d42cec13bdf53a67b21257bddf', + '00000000000000000000000000000000', + '000000000000000000000000' + ), + ( + '', + 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + + '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b391aafd255', + '42831ec2217774244b7221b784d0d49ce3aa212f2c02a4e035c17e2329aca12e' + + '21d514b25466931c7d8f6a5aac84aa051ba30b396a0aac973d58e091473f5985', + '4d5c2af327cd64a62cf35abd2ba6fab4', + 'feffe9928665731c6d6a8f9467308308', + 'cafebabefacedbaddecaf888' + ), + ( + 'feedfacedeadbeeffeedfacedeadbeefabaddad2', + 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + + '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', + '42831ec2217774244b7221b784d0d49ce3aa212f2c02a4e035c17e2329aca12e' + + '21d514b25466931c7d8f6a5aac84aa051ba30b396a0aac973d58e091', + '5bc94fbc3221a5db94fae95ae7121a47', + 'feffe9928665731c6d6a8f9467308308', + 'cafebabefacedbaddecaf888' + ), + ( + 'feedfacedeadbeeffeedfacedeadbeefabaddad2', + 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + + '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', + '61353b4c2806934a777ff51fa22a4755699b2a714fcdc6f83766e5f97b6c7423' + + '73806900e49f24b22b097544d4896b424989b5e1ebac0f07c23f4598', + '3612d2e79e3b0785561be14aaca2fccb', + 'feffe9928665731c6d6a8f9467308308', + 'cafebabefacedbad' + ), + ( + 'feedfacedeadbeeffeedfacedeadbeefabaddad2', + 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + + '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', + '8ce24998625615b603a033aca13fb894be9112a5c3a211a8ba262a3cca7e2ca7' + + '01e4a9a4fba43c90ccdcb281d48c7c6fd62875d2aca417034c34aee5', + '619cc5aefffe0bfa462af43c1699d050', + 'feffe9928665731c6d6a8f9467308308', + '9313225df88406e555909c5aff5269aa' + + '6a7a9538534f7da1e4c303d2a318a728c3c0c95156809539fcf0e2429a6b5254' + + '16aedbf5a0de6a57a637b39b' + ), + ( + '', + '', + '', + 'cd33b28ac773f74ba00ed1f312572435', + '000000000000000000000000000000000000000000000000', + '000000000000000000000000' + ), + ( + '', + '00000000000000000000000000000000', + '98e7247c07f0fe411c267e4384b0f600', + '2ff58d80033927ab8ef4d4587514f0fb', + '000000000000000000000000000000000000000000000000', + '000000000000000000000000' + ), + ( + '', + 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + + '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b391aafd255', + '3980ca0b3c00e841eb06fac4872a2757859e1ceaa6efd984628593b40ca1e19c' + + '7d773d00c144c525ac619d18c84a3f4718e2448b2fe324d9ccda2710acade256', + '9924a7c8587336bfb118024db8674a14', + 'feffe9928665731c6d6a8f9467308308feffe9928665731c', + 'cafebabefacedbaddecaf888' + ), + ( + 'feedfacedeadbeeffeedfacedeadbeefabaddad2', + 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + + '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', + '3980ca0b3c00e841eb06fac4872a2757859e1ceaa6efd984628593b40ca1e19c' + + '7d773d00c144c525ac619d18c84a3f4718e2448b2fe324d9ccda2710', + '2519498e80f1478f37ba55bd6d27618c', + 'feffe9928665731c6d6a8f9467308308feffe9928665731c', + 'cafebabefacedbaddecaf888' + ), + ( + 'feedfacedeadbeeffeedfacedeadbeefabaddad2', + 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + + '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', + '0f10f599ae14a154ed24b36e25324db8c566632ef2bbb34f8347280fc4507057' + + 'fddc29df9a471f75c66541d4d4dad1c9e93a19a58e8b473fa0f062f7', + '65dcc57fcf623a24094fcca40d3533f8', + 'feffe9928665731c6d6a8f9467308308feffe9928665731c', + 'cafebabefacedbad' + ), + ( + 'feedfacedeadbeeffeedfacedeadbeefabaddad2', + 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + + '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', + 'd27e88681ce3243c4830165a8fdcf9ff1de9a1d8e6b447ef6ef7b79828666e45' + + '81e79012af34ddd9e2f037589b292db3e67c036745fa22e7e9b7373b', + 'dcf566ff291c25bbb8568fc3d376a6d9', + 'feffe9928665731c6d6a8f9467308308feffe9928665731c', + '9313225df88406e555909c5aff5269aa' + + '6a7a9538534f7da1e4c303d2a318a728c3c0c95156809539fcf0e2429a6b5254' + + '16aedbf5a0de6a57a637b39b' + ), + ( + '', + '', + '', + '530f8afbc74536b9a963b4f1c4cb738b', + '0000000000000000000000000000000000000000000000000000000000000000', + '000000000000000000000000' + ), + ( + '', + '00000000000000000000000000000000', + 'cea7403d4d606b6e074ec5d3baf39d18', + 'd0d1c8a799996bf0265b98b5d48ab919', + '0000000000000000000000000000000000000000000000000000000000000000', + '000000000000000000000000' + ), + ( '', + 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + + '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b391aafd255', + '522dc1f099567d07f47f37a32a84427d643a8cdcbfe5c0c97598a2bd2555d1aa' + + '8cb08e48590dbb3da7b08b1056828838c5f61e6393ba7a0abcc9f662898015ad', + 'b094dac5d93471bdec1a502270e3cc6c', + 'feffe9928665731c6d6a8f9467308308feffe9928665731c6d6a8f9467308308', + 'cafebabefacedbaddecaf888' + ), + ( + 'feedfacedeadbeeffeedfacedeadbeefabaddad2', + 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + + '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', + '522dc1f099567d07f47f37a32a84427d643a8cdcbfe5c0c97598a2bd2555d1aa' + + '8cb08e48590dbb3da7b08b1056828838c5f61e6393ba7a0abcc9f662', + '76fc6ece0f4e1768cddf8853bb2d551b', + 'feffe9928665731c6d6a8f9467308308feffe9928665731c6d6a8f9467308308', + 'cafebabefacedbaddecaf888' + ), + ( + 'feedfacedeadbeeffeedfacedeadbeefabaddad2', + 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + + '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', + 'c3762df1ca787d32ae47c13bf19844cbaf1ae14d0b976afac52ff7d79bba9de0' + + 'feb582d33934a4f0954cc2363bc73f7862ac430e64abe499f47c9b1f', + '3a337dbf46a792c45e454913fe2ea8f2', + 'feffe9928665731c6d6a8f9467308308feffe9928665731c6d6a8f9467308308', + 'cafebabefacedbad' + ), + ( + 'feedfacedeadbeeffeedfacedeadbeefabaddad2', + 'd9313225f88406e5a55909c5aff5269a86a7a9531534f7da2e4c303d8a318a72' + + '1c3c0c95956809532fcf0e2449a6b525b16aedf5aa0de657ba637b39', + '5a8def2f0c9e53f1f75d7853659e2a20eeb2b22aafde6419a058ab4f6f746bf4' + + '0fc0c3b780f244452da3ebf1c5d82cdea2418997200ef82e44ae7e3f', + 'a44a8266ee1c8eb0c8b5d4cf5ae9f19a', + 'feffe9928665731c6d6a8f9467308308feffe9928665731c6d6a8f9467308308', + '9313225df88406e555909c5aff5269aa' + + '6a7a9538534f7da1e4c303d2a318a728c3c0c95156809539fcf0e2429a6b5254' + + '16aedbf5a0de6a57a637b39b' + ) + ] + + test_vectors = [[unhexlify(x) for x in tv] for tv in test_vectors_hex] + + def runTest(self): + for assoc_data, pt, ct, mac, key, nonce in self.test_vectors: + + # Encrypt + cipher = AES.new(key, AES.MODE_GCM, nonce, mac_len=len(mac)) + cipher.update(assoc_data) + ct2, mac2 = cipher.encrypt_and_digest(pt) + self.assertEqual(ct, ct2) + self.assertEqual(mac, mac2) + + # Decrypt + cipher = AES.new(key, AES.MODE_GCM, nonce, mac_len=len(mac)) + cipher.update(assoc_data) + pt2 = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(pt, pt2) + + +class TestVectorsGueronKrasnov(unittest.TestCase): + """Class exercising the GCM test vectors found in + 'The fragility of AES-GCM authentication algorithm', Gueron, Krasnov + https://eprint.iacr.org/2013/157.pdf""" + + def test_1(self): + key = unhexlify("3da6c536d6295579c0959a7043efb503") + iv = unhexlify("2b926197d34e091ef722db94") + aad = unhexlify("00000000000000000000000000000000" + + "000102030405060708090a0b0c0d0e0f" + + "101112131415161718191a1b1c1d1e1f" + + "202122232425262728292a2b2c2d2e2f" + + "303132333435363738393a3b3c3d3e3f") + digest = unhexlify("69dd586555ce3fcc89663801a71d957b") + + cipher = AES.new(key, AES.MODE_GCM, iv).update(aad) + self.assertEqual(digest, cipher.digest()) + + def test_2(self): + key = unhexlify("843ffcf5d2b72694d19ed01d01249412") + iv = unhexlify("dbcca32ebf9b804617c3aa9e") + aad = unhexlify("00000000000000000000000000000000" + + "101112131415161718191a1b1c1d1e1f") + pt = unhexlify("000102030405060708090a0b0c0d0e0f" + + "101112131415161718191a1b1c1d1e1f" + + "202122232425262728292a2b2c2d2e2f" + + "303132333435363738393a3b3c3d3e3f" + + "404142434445464748494a4b4c4d4e4f") + ct = unhexlify("6268c6fa2a80b2d137467f092f657ac0" + + "4d89be2beaa623d61b5a868c8f03ff95" + + "d3dcee23ad2f1ab3a6c80eaf4b140eb0" + + "5de3457f0fbc111a6b43d0763aa422a3" + + "013cf1dc37fe417d1fbfc449b75d4cc5") + digest = unhexlify("3b629ccfbc1119b7319e1dce2cd6fd6d") + + cipher = AES.new(key, AES.MODE_GCM, iv).update(aad) + ct2, digest2 = cipher.encrypt_and_digest(pt) + + self.assertEqual(ct, ct2) + self.assertEqual(digest, digest2) + + +class NISTTestVectorsGCM(unittest.TestCase): + + def __init__(self, a): + self.use_clmul = True + unittest.TestCase.__init__(self, a) + + +class NISTTestVectorsGCM_no_clmul(unittest.TestCase): + + def __init__(self, a): + self.use_clmul = False + unittest.TestCase.__init__(self, a) + + +test_vectors_nist = load_test_vectors( + ("Cipher", "AES"), + "gcmDecrypt128.rsp", + "GCM decrypt", + {"count": lambda x: int(x)}) or [] + +test_vectors_nist += load_test_vectors( + ("Cipher", "AES"), + "gcmEncryptExtIV128.rsp", + "GCM encrypt", + {"count": lambda x: int(x)}) or [] + +for idx, tv in enumerate(test_vectors_nist): + + # The test vector file contains some directive lines + if isinstance(tv, str): + continue + + def single_test(self, tv=tv): + + self.description = tv.desc + cipher = AES.new(tv.key, AES.MODE_GCM, nonce=tv.iv, + mac_len=len(tv.tag), use_clmul=self.use_clmul) + cipher.update(tv.aad) + if "FAIL" in tv.others: + self.assertRaises(ValueError, cipher.decrypt_and_verify, + tv.ct, tv.tag) + else: + pt = cipher.decrypt_and_verify(tv.ct, tv.tag) + self.assertEqual(pt, tv.pt) + + setattr(NISTTestVectorsGCM, "test_%d" % idx, single_test) + setattr(NISTTestVectorsGCM_no_clmul, "test_%d" % idx, single_test) + + +class TestVectorsWycheproof(unittest.TestCase): + + def __init__(self, wycheproof_warnings, **extra_params): + unittest.TestCase.__init__(self) + self._wycheproof_warnings = wycheproof_warnings + self._extra_params = extra_params + self._id = "None" + + def setUp(self): + + def filter_tag(group): + return group['tagSize'] // 8 + + self.tv = load_test_vectors_wycheproof(("Cipher", "wycheproof"), + "aes_gcm_test.json", + "Wycheproof GCM", + group_tag={'tag_size': filter_tag}) + + def shortDescription(self): + return self._id + + def warn(self, tv): + if tv.warning and self._wycheproof_warnings: + import warnings + warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) + + def test_encrypt(self, tv): + self._id = "Wycheproof Encrypt GCM Test #" + str(tv.id) + + try: + cipher = AES.new(tv.key, AES.MODE_GCM, tv.iv, mac_len=tv.tag_size, + **self._extra_params) + except ValueError as e: + if len(tv.iv) == 0 and "Nonce cannot be empty" in str(e): + return + raise e + + cipher.update(tv.aad) + ct, tag = cipher.encrypt_and_digest(tv.msg) + if tv.valid: + self.assertEqual(ct, tv.ct) + self.assertEqual(tag, tv.tag) + self.warn(tv) + + def test_decrypt(self, tv): + self._id = "Wycheproof Decrypt GCM Test #" + str(tv.id) + + try: + cipher = AES.new(tv.key, AES.MODE_GCM, tv.iv, mac_len=tv.tag_size, + **self._extra_params) + except ValueError as e: + if len(tv.iv) == 0 and "Nonce cannot be empty" in str(e): + return + raise e + + cipher.update(tv.aad) + try: + pt = cipher.decrypt_and_verify(tv.ct, tv.tag) + except ValueError: + assert not tv.valid + else: + assert tv.valid + self.assertEqual(pt, tv.msg) + self.warn(tv) + + def test_corrupt_decrypt(self, tv): + self._id = "Wycheproof Corrupt Decrypt GCM Test #" + str(tv.id) + if len(tv.iv) == 0 or len(tv.ct) < 1: + return + cipher = AES.new(tv.key, AES.MODE_GCM, tv.iv, mac_len=tv.tag_size, + **self._extra_params) + cipher.update(tv.aad) + ct_corrupt = strxor(tv.ct, b"\x00" * (len(tv.ct) - 1) + b"\x01") + self.assertRaises(ValueError, cipher.decrypt_and_verify, ct_corrupt, tv.tag) + + def runTest(self): + + for tv in self.tv: + self.test_encrypt(tv) + self.test_decrypt(tv) + self.test_corrupt_decrypt(tv) + + +class TestVariableLength(unittest.TestCase): + + def __init__(self, **extra_params): + unittest.TestCase.__init__(self) + self._extra_params = extra_params + + def runTest(self): + key = b'0' * 16 + h = SHA256.new() + + for length in range(160): + nonce = '{0:04d}'.format(length).encode('utf-8') + data = bchr(length) * length + cipher = AES.new(key, AES.MODE_GCM, nonce=nonce, **self._extra_params) + ct, tag = cipher.encrypt_and_digest(data) + h.update(ct) + h.update(tag) + + self.assertEqual(h.hexdigest(), "7b7eb1ffbe67a2e53a912067c0ec8e62ebc7ce4d83490ea7426941349811bdf4") + + +def get_tests(config={}): + from Crypto.Util import _cpu_features + + wycheproof_warnings = config.get('wycheproof_warnings') + + tests = [] + tests += list_test_cases(GcmTests) + tests += list_test_cases(GcmFSMTests) + tests += [TestVectors()] + tests += [TestVectorsWycheproof(wycheproof_warnings)] + tests += list_test_cases(TestVectorsGueronKrasnov) + tests += [TestVariableLength()] + if config.get('slow_tests'): + tests += list_test_cases(NISTTestVectorsGCM) + + if _cpu_features.have_clmul(): + tests += [TestVectorsWycheproof(wycheproof_warnings, use_clmul=False)] + tests += [TestVariableLength(use_clmul=False)] + if config.get('slow_tests'): + tests += list_test_cases(NISTTestVectorsGCM_no_clmul) + else: + print("Skipping test of PCLMULDQD in AES GCM") + + return tests + + +if __name__ == '__main__': + def suite(): + unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_OCB.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_OCB.py new file mode 100644 index 000000000..6e1a47ca3 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_OCB.py @@ -0,0 +1,845 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +from binascii import unhexlify + +from Crypto.Util.py3compat import b, tobytes, bchr +from Crypto.Util.number import long_to_bytes +from Crypto.SelfTest.loader import load_test_vectors +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Cipher import AES +from Crypto.Hash import SHAKE128 + + +def get_tag_random(tag, length): + return SHAKE128.new(data=tobytes(tag)).read(length) + + +class OcbTests(unittest.TestCase): + + key_128 = get_tag_random("key_128", 16) + nonce_96 = get_tag_random("nonce_128", 12) + data = get_tag_random("data", 128) + + def test_loopback_128(self): + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + pt = get_tag_random("plaintext", 16 * 100) + ct, mac = cipher.encrypt_and_digest(pt) + + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + pt2 = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(pt, pt2) + + def test_nonce(self): + # Nonce is optional + AES.new(self.key_128, AES.MODE_OCB) + + cipher = AES.new(self.key_128, AES.MODE_OCB, self.nonce_96) + ct = cipher.encrypt(self.data) + + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + self.assertEqual(ct, cipher.encrypt(self.data)) + + def test_nonce_must_be_bytes(self): + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_OCB, + nonce=u'test12345678') + + def test_nonce_length(self): + # nonce cannot be empty + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_OCB, + nonce=b("")) + + # nonce can be up to 15 bytes long + for length in range(1, 16): + AES.new(self.key_128, AES.MODE_OCB, nonce=self.data[:length]) + + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_OCB, + nonce=self.data) + + def test_block_size_128(self): + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + self.assertEqual(cipher.block_size, AES.block_size) + + # By default, a 15 bytes long nonce is randomly generated + nonce1 = AES.new(self.key_128, AES.MODE_OCB).nonce + nonce2 = AES.new(self.key_128, AES.MODE_OCB).nonce + self.assertEqual(len(nonce1), 15) + self.assertNotEqual(nonce1, nonce2) + + def test_nonce_attribute(self): + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + self.assertEqual(cipher.nonce, self.nonce_96) + + # By default, a 15 bytes long nonce is randomly generated + nonce1 = AES.new(self.key_128, AES.MODE_OCB).nonce + nonce2 = AES.new(self.key_128, AES.MODE_OCB).nonce + self.assertEqual(len(nonce1), 15) + self.assertNotEqual(nonce1, nonce2) + + def test_unknown_parameters(self): + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_OCB, + self.nonce_96, 7) + self.assertRaises(TypeError, AES.new, self.key_128, AES.MODE_OCB, + nonce=self.nonce_96, unknown=7) + + # But some are only known by the base cipher + # (e.g. use_aesni consumed by the AES module) + AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96, + use_aesni=False) + + def test_null_encryption_decryption(self): + for func in "encrypt", "decrypt": + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + result = getattr(cipher, func)(b("")) + self.assertEqual(result, b("")) + + def test_either_encrypt_or_decrypt(self): + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.encrypt(b("xyz")) + self.assertRaises(TypeError, cipher.decrypt, b("xyz")) + + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.decrypt(b("xyz")) + self.assertRaises(TypeError, cipher.encrypt, b("xyz")) + + def test_data_must_be_bytes(self): + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.encrypt, u'test1234567890-*') + + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.decrypt, u'test1234567890-*') + + def test_mac_len(self): + # Invalid MAC length + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_OCB, + nonce=self.nonce_96, mac_len=7) + self.assertRaises(ValueError, AES.new, self.key_128, AES.MODE_OCB, + nonce=self.nonce_96, mac_len=16+1) + + # Valid MAC length + for mac_len in range(8, 16 + 1): + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96, + mac_len=mac_len) + _, mac = cipher.encrypt_and_digest(self.data) + self.assertEqual(len(mac), mac_len) + + # Default MAC length + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + _, mac = cipher.encrypt_and_digest(self.data) + self.assertEqual(len(mac), 16) + + def test_invalid_mac(self): + from Crypto.Util.strxor import strxor_c + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + ct, mac = cipher.encrypt_and_digest(self.data) + + invalid_mac = strxor_c(mac, 0x01) + + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.decrypt_and_verify, ct, + invalid_mac) + + def test_hex_mac(self): + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + mac_hex = cipher.hexdigest() + self.assertEqual(cipher.digest(), unhexlify(mac_hex)) + + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.hexverify(mac_hex) + + def test_message_chunks(self): + # Validate that both associated data and plaintext/ciphertext + # can be broken up in chunks of arbitrary length + + auth_data = get_tag_random("authenticated data", 127) + plaintext = get_tag_random("plaintext", 127) + + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.update(auth_data) + ciphertext, ref_mac = cipher.encrypt_and_digest(plaintext) + + def break_up(data, chunk_length): + return [data[i:i+chunk_length] for i in range(0, len(data), + chunk_length)] + + # Encryption + for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: + + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + + for chunk in break_up(auth_data, chunk_length): + cipher.update(chunk) + pt2 = b("") + for chunk in break_up(ciphertext, chunk_length): + pt2 += cipher.decrypt(chunk) + pt2 += cipher.decrypt() + self.assertEqual(plaintext, pt2) + cipher.verify(ref_mac) + + # Decryption + for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: + + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + + for chunk in break_up(auth_data, chunk_length): + cipher.update(chunk) + ct2 = b("") + for chunk in break_up(plaintext, chunk_length): + ct2 += cipher.encrypt(chunk) + ct2 += cipher.encrypt() + self.assertEqual(ciphertext, ct2) + self.assertEqual(cipher.digest(), ref_mac) + + def test_bytearray(self): + + # Encrypt + key_ba = bytearray(self.key_128) + nonce_ba = bytearray(self.nonce_96) + header_ba = bytearray(self.data) + data_ba = bytearray(self.data) + + cipher1 = AES.new(self.key_128, + AES.MODE_OCB, + nonce=self.nonce_96) + cipher1.update(self.data) + ct = cipher1.encrypt(self.data) + cipher1.encrypt() + tag = cipher1.digest() + + cipher2 = AES.new(key_ba, + AES.MODE_OCB, + nonce=nonce_ba) + key_ba[:3] = b"\xFF\xFF\xFF" + nonce_ba[:3] = b"\xFF\xFF\xFF" + cipher2.update(header_ba) + header_ba[:3] = b"\xFF\xFF\xFF" + ct_test = cipher2.encrypt(data_ba) + cipher2.encrypt() + data_ba[:3] = b"\xFF\xFF\xFF" + tag_test = cipher2.digest() + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + key_ba = bytearray(self.key_128) + nonce_ba = bytearray(self.nonce_96) + header_ba = bytearray(self.data) + del data_ba + + cipher4 = AES.new(key_ba, + AES.MODE_OCB, + nonce=nonce_ba) + key_ba[:3] = b"\xFF\xFF\xFF" + nonce_ba[:3] = b"\xFF\xFF\xFF" + cipher4.update(header_ba) + header_ba[:3] = b"\xFF\xFF\xFF" + pt_test = cipher4.decrypt_and_verify(bytearray(ct_test), bytearray(tag_test)) + + self.assertEqual(self.data, pt_test) + + def test_memoryview(self): + + # Encrypt + key_mv = memoryview(bytearray(self.key_128)) + nonce_mv = memoryview(bytearray(self.nonce_96)) + header_mv = memoryview(bytearray(self.data)) + data_mv = memoryview(bytearray(self.data)) + + cipher1 = AES.new(self.key_128, + AES.MODE_OCB, + nonce=self.nonce_96) + cipher1.update(self.data) + ct = cipher1.encrypt(self.data) + cipher1.encrypt() + tag = cipher1.digest() + + cipher2 = AES.new(key_mv, + AES.MODE_OCB, + nonce=nonce_mv) + key_mv[:3] = b"\xFF\xFF\xFF" + nonce_mv[:3] = b"\xFF\xFF\xFF" + cipher2.update(header_mv) + header_mv[:3] = b"\xFF\xFF\xFF" + ct_test = cipher2.encrypt(data_mv) + cipher2.encrypt() + data_mv[:3] = b"\xFF\xFF\xFF" + tag_test = cipher2.digest() + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + key_mv = memoryview(bytearray(self.key_128)) + nonce_mv = memoryview(bytearray(self.nonce_96)) + header_mv = memoryview(bytearray(self.data)) + del data_mv + + cipher4 = AES.new(key_mv, + AES.MODE_OCB, + nonce=nonce_mv) + key_mv[:3] = b"\xFF\xFF\xFF" + nonce_mv[:3] = b"\xFF\xFF\xFF" + cipher4.update(header_mv) + header_mv[:3] = b"\xFF\xFF\xFF" + pt_test = cipher4.decrypt_and_verify(memoryview(ct_test), memoryview(tag_test)) + + self.assertEqual(self.data, pt_test) + + +class OcbFSMTests(unittest.TestCase): + + key_128 = get_tag_random("key_128", 16) + nonce_96 = get_tag_random("nonce_128", 12) + data = get_tag_random("data", 128) + + def test_valid_init_encrypt_decrypt_digest_verify(self): + # No authenticated data, fixed plaintext + # Verify path INIT->ENCRYPT->ENCRYPT(NONE)->DIGEST + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + ct = cipher.encrypt(self.data) + ct += cipher.encrypt() + mac = cipher.digest() + + # Verify path INIT->DECRYPT->DECRYPT(NONCE)->VERIFY + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + cipher.decrypt(ct) + cipher.decrypt() + cipher.verify(mac) + + def test_invalid_init_encrypt_decrypt_digest_verify(self): + # No authenticated data, fixed plaintext + # Verify path INIT->ENCRYPT->DIGEST + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + ct = cipher.encrypt(self.data) + self.assertRaises(TypeError, cipher.digest) + + # Verify path INIT->DECRYPT->VERIFY + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + cipher.decrypt(ct) + self.assertRaises(TypeError, cipher.verify) + + def test_valid_init_update_digest_verify(self): + # No plaintext, fixed authenticated data + # Verify path INIT->UPDATE->DIGEST + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + cipher.update(self.data) + mac = cipher.digest() + + # Verify path INIT->UPDATE->VERIFY + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + cipher.update(self.data) + cipher.verify(mac) + + def test_valid_full_path(self): + # Fixed authenticated data, fixed plaintext + # Verify path INIT->UPDATE->ENCRYPT->ENCRYPT(NONE)->DIGEST + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + cipher.update(self.data) + ct = cipher.encrypt(self.data) + ct += cipher.encrypt() + mac = cipher.digest() + + # Verify path INIT->UPDATE->DECRYPT->DECRYPT(NONE)->VERIFY + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + cipher.update(self.data) + cipher.decrypt(ct) + cipher.decrypt() + cipher.verify(mac) + + # Verify path INIT->UPDATE->ENCRYPT->ENCRYPT_AND_DIGEST + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + cipher.update(self.data) + ct1 = cipher.encrypt(self.data[:2]) + ct2, mac = cipher.encrypt_and_digest(self.data[2:]) + + # Verify path INIT->UPDATE->DECRYPT->DECRYPT_AND_VERIFY + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + cipher.update(self.data) + cipher.decrypt(ct1) + cipher.decrypt_and_verify(ct2, mac) + + def test_invalid_encrypt_after_final(self): + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + cipher.update(self.data) + cipher.encrypt(self.data) + cipher.encrypt() + self.assertRaises(TypeError, cipher.encrypt, self.data) + + def test_invalid_decrypt_after_final(self): + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + cipher.update(self.data) + cipher.decrypt(self.data) + cipher.decrypt() + self.assertRaises(TypeError, cipher.decrypt, self.data) + + def test_valid_init_digest(self): + # Verify path INIT->DIGEST + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.digest() + + def test_valid_init_verify(self): + # Verify path INIT->VERIFY + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + mac = cipher.digest() + + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.verify(mac) + + def test_valid_multiple_encrypt_or_decrypt(self): + for method_name in "encrypt", "decrypt": + for auth_data in (None, b("333"), self.data, + self.data + b("3")): + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + if auth_data is not None: + cipher.update(auth_data) + method = getattr(cipher, method_name) + method(self.data) + method(self.data) + method(self.data) + method(self.data) + method() + + def test_valid_multiple_digest_or_verify(self): + # Multiple calls to digest + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.update(self.data) + first_mac = cipher.digest() + for x in range(4): + self.assertEqual(first_mac, cipher.digest()) + + # Multiple calls to verify + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.update(self.data) + for x in range(5): + cipher.verify(first_mac) + + def test_valid_encrypt_and_digest_decrypt_and_verify(self): + # encrypt_and_digest + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.update(self.data) + ct, mac = cipher.encrypt_and_digest(self.data) + + # decrypt_and_verify + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.update(self.data) + pt = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(self.data, pt) + + def test_invalid_mixing_encrypt_decrypt(self): + # Once per method, with or without assoc. data + for method1_name, method2_name in (("encrypt", "decrypt"), + ("decrypt", "encrypt")): + for assoc_data_present in (True, False): + cipher = AES.new(self.key_128, AES.MODE_OCB, + nonce=self.nonce_96) + if assoc_data_present: + cipher.update(self.data) + getattr(cipher, method1_name)(self.data) + self.assertRaises(TypeError, getattr(cipher, method2_name), + self.data) + + def test_invalid_encrypt_or_update_after_digest(self): + for method_name in "encrypt", "update": + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.encrypt(self.data) + cipher.encrypt() + cipher.digest() + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data) + + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.encrypt_and_digest(self.data) + + def test_invalid_decrypt_or_update_after_verify(self): + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + ct = cipher.encrypt(self.data) + ct += cipher.encrypt() + mac = cipher.digest() + + for method_name in "decrypt", "update": + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.decrypt(ct) + cipher.decrypt() + cipher.verify(mac) + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data) + + cipher = AES.new(self.key_128, AES.MODE_OCB, nonce=self.nonce_96) + cipher.decrypt_and_verify(ct, mac) + self.assertRaises(TypeError, getattr(cipher, method_name), + self.data) + + +def algo_rfc7253(keylen, taglen, noncelen): + """Implement the algorithm at page 18 of RFC 7253""" + + key = bchr(0) * (keylen // 8 - 1) + bchr(taglen) + C = b"" + + for i in range(128): + S = bchr(0) * i + + N = long_to_bytes(3 * i + 1, noncelen // 8) + cipher = AES.new(key, AES.MODE_OCB, nonce=N, mac_len=taglen // 8) + cipher.update(S) + C += cipher.encrypt(S) + cipher.encrypt() + cipher.digest() + + N = long_to_bytes(3 * i + 2, noncelen // 8) + cipher = AES.new(key, AES.MODE_OCB, nonce=N, mac_len=taglen // 8) + C += cipher.encrypt(S) + cipher.encrypt() + cipher.digest() + + N = long_to_bytes(3 * i + 3, noncelen // 8) + cipher = AES.new(key, AES.MODE_OCB, nonce=N, mac_len=taglen // 8) + cipher.update(S) + C += cipher.encrypt() + cipher.digest() + + N = long_to_bytes(385, noncelen // 8) + cipher = AES.new(key, AES.MODE_OCB, nonce=N, mac_len=taglen // 8) + cipher.update(C) + return cipher.encrypt() + cipher.digest() + + +class OcbRfc7253Test(unittest.TestCase): + + # Tuple with + # - nonce + # - authenticated data + # - plaintext + # - ciphertext and 16 byte MAC tag + tv1_key = "000102030405060708090A0B0C0D0E0F" + tv1 = ( + ( + "BBAA99887766554433221100", + "", + "", + "785407BFFFC8AD9EDCC5520AC9111EE6" + ), + ( + "BBAA99887766554433221101", + "0001020304050607", + "0001020304050607", + "6820B3657B6F615A5725BDA0D3B4EB3A257C9AF1F8F03009" + ), + ( + "BBAA99887766554433221102", + "0001020304050607", + "", + "81017F8203F081277152FADE694A0A00" + ), + ( + "BBAA99887766554433221103", + "", + "0001020304050607", + "45DD69F8F5AAE72414054CD1F35D82760B2CD00D2F99BFA9" + ), + ( + "BBAA99887766554433221104", + "000102030405060708090A0B0C0D0E0F", + "000102030405060708090A0B0C0D0E0F", + "571D535B60B277188BE5147170A9A22C3AD7A4FF3835B8C5" + "701C1CCEC8FC3358" + ), + ( + "BBAA99887766554433221105", + "000102030405060708090A0B0C0D0E0F", + "", + "8CF761B6902EF764462AD86498CA6B97" + ), + ( + "BBAA99887766554433221106", + "", + "000102030405060708090A0B0C0D0E0F", + "5CE88EC2E0692706A915C00AEB8B2396F40E1C743F52436B" + "DF06D8FA1ECA343D" + ), + ( + "BBAA99887766554433221107", + "000102030405060708090A0B0C0D0E0F1011121314151617", + "000102030405060708090A0B0C0D0E0F1011121314151617", + "1CA2207308C87C010756104D8840CE1952F09673A448A122" + "C92C62241051F57356D7F3C90BB0E07F" + ), + ( + "BBAA99887766554433221108", + "000102030405060708090A0B0C0D0E0F1011121314151617", + "", + "6DC225A071FC1B9F7C69F93B0F1E10DE" + ), + ( + "BBAA99887766554433221109", + "", + "000102030405060708090A0B0C0D0E0F1011121314151617", + "221BD0DE7FA6FE993ECCD769460A0AF2D6CDED0C395B1C3C" + "E725F32494B9F914D85C0B1EB38357FF" + ), + ( + "BBAA9988776655443322110A", + "000102030405060708090A0B0C0D0E0F1011121314151617" + "18191A1B1C1D1E1F", + "000102030405060708090A0B0C0D0E0F1011121314151617" + "18191A1B1C1D1E1F", + "BD6F6C496201C69296C11EFD138A467ABD3C707924B964DE" + "AFFC40319AF5A48540FBBA186C5553C68AD9F592A79A4240" + ), + ( + "BBAA9988776655443322110B", + "000102030405060708090A0B0C0D0E0F1011121314151617" + "18191A1B1C1D1E1F", + "", + "FE80690BEE8A485D11F32965BC9D2A32" + ), + ( + "BBAA9988776655443322110C", + "", + "000102030405060708090A0B0C0D0E0F1011121314151617" + "18191A1B1C1D1E1F", + "2942BFC773BDA23CABC6ACFD9BFD5835BD300F0973792EF4" + "6040C53F1432BCDFB5E1DDE3BC18A5F840B52E653444D5DF" + ), + ( + "BBAA9988776655443322110D", + "000102030405060708090A0B0C0D0E0F1011121314151617" + "18191A1B1C1D1E1F2021222324252627", + "000102030405060708090A0B0C0D0E0F1011121314151617" + "18191A1B1C1D1E1F2021222324252627", + "D5CA91748410C1751FF8A2F618255B68A0A12E093FF45460" + "6E59F9C1D0DDC54B65E8628E568BAD7AED07BA06A4A69483" + "A7035490C5769E60" + ), + ( + "BBAA9988776655443322110E", + "000102030405060708090A0B0C0D0E0F1011121314151617" + "18191A1B1C1D1E1F2021222324252627", + "", + "C5CD9D1850C141E358649994EE701B68" + ), + ( + "BBAA9988776655443322110F", + "", + "000102030405060708090A0B0C0D0E0F1011121314151617" + "18191A1B1C1D1E1F2021222324252627", + "4412923493C57D5DE0D700F753CCE0D1D2D95060122E9F15" + "A5DDBFC5787E50B5CC55EE507BCB084E479AD363AC366B95" + "A98CA5F3000B1479" + ) + ) + + # Tuple with + # - key + # - nonce + # - authenticated data + # - plaintext + # - ciphertext and 12 byte MAC tag + tv2 = ( + "0F0E0D0C0B0A09080706050403020100", + "BBAA9988776655443322110D", + "000102030405060708090A0B0C0D0E0F1011121314151617" + "18191A1B1C1D1E1F2021222324252627", + "000102030405060708090A0B0C0D0E0F1011121314151617" + "18191A1B1C1D1E1F2021222324252627", + "1792A4E31E0755FB03E31B22116E6C2DDF9EFD6E33D536F1" + "A0124B0A55BAE884ED93481529C76B6AD0C515F4D1CDD4FD" + "AC4F02AA" + ) + + # Tuple with + # - key length + # - MAC tag length + # - Expected output + tv3 = ( + (128, 128, "67E944D23256C5E0B6C61FA22FDF1EA2"), + (192, 128, "F673F2C3E7174AAE7BAE986CA9F29E17"), + (256, 128, "D90EB8E9C977C88B79DD793D7FFA161C"), + (128, 96, "77A3D8E73589158D25D01209"), + (192, 96, "05D56EAD2752C86BE6932C5E"), + (256, 96, "5458359AC23B0CBA9E6330DD"), + (128, 64, "192C9B7BD90BA06A"), + (192, 64, "0066BC6E0EF34E24"), + (256, 64, "7D4EA5D445501CBE"), + ) + + def test1(self): + key = unhexlify(b(self.tv1_key)) + for tv in self.tv1: + nonce, aad, pt, ct = [unhexlify(b(x)) for x in tv] + ct, mac_tag = ct[:-16], ct[-16:] + + cipher = AES.new(key, AES.MODE_OCB, nonce=nonce) + cipher.update(aad) + ct2 = cipher.encrypt(pt) + cipher.encrypt() + self.assertEqual(ct, ct2) + self.assertEqual(mac_tag, cipher.digest()) + + cipher = AES.new(key, AES.MODE_OCB, nonce=nonce) + cipher.update(aad) + pt2 = cipher.decrypt(ct) + cipher.decrypt() + self.assertEqual(pt, pt2) + cipher.verify(mac_tag) + + def test2(self): + + key, nonce, aad, pt, ct = [unhexlify(b(x)) for x in self.tv2] + ct, mac_tag = ct[:-12], ct[-12:] + + cipher = AES.new(key, AES.MODE_OCB, nonce=nonce, mac_len=12) + cipher.update(aad) + ct2 = cipher.encrypt(pt) + cipher.encrypt() + self.assertEqual(ct, ct2) + self.assertEqual(mac_tag, cipher.digest()) + + cipher = AES.new(key, AES.MODE_OCB, nonce=nonce, mac_len=12) + cipher.update(aad) + pt2 = cipher.decrypt(ct) + cipher.decrypt() + self.assertEqual(pt, pt2) + cipher.verify(mac_tag) + + def test3(self): + for keylen, taglen, result in self.tv3: + result2 = algo_rfc7253(keylen, taglen, 96) + self.assertEqual(unhexlify(b(result)), result2) + + +class OcbDkgTest(unittest.TestCase): + """Test vectors from https://gitlab.com/dkg/ocb-test-vectors""" + + def test_1_2(self): + tvs = [] + for fi in (1, 2): + for nb in (104, 112, 120): + tv_file = load_test_vectors(("Cipher", "AES"), + "test-vector-%d-nonce%d.txt" % (fi, nb), + "DKG tests, %d, %d bits" % (fi, nb), + {}) + if tv_file is None: + break + key = tv_file[0].k + for tv in tv_file[1:]: + tv.k = key + tvs.append(tv) + + for tv in tvs: + k, n, a, p, c = tv.k, tv.n, tv.a, tv.p, tv.c + mac_len = len(c) - len(p) + cipher = AES.new(k, AES.MODE_OCB, nonce=n, mac_len=mac_len) + cipher.update(a) + c_out, tag_out = cipher.encrypt_and_digest(p) + self.assertEqual(c, c_out + tag_out) + + def test_3(self): + + def check(keylen, taglen, noncelen, exp): + result = algo_rfc7253(keylen, taglen, noncelen) + self.assertEqual(result, unhexlify(exp)) + + # test-vector-3-nonce104.txt + check(128, 128, 104, "C47F5F0341E15326D4D1C46F47F05062") + check(192, 128, 104, "95B9167A38EB80495DFC561A8486E109") + check(256, 128, 104, "AFE1CDDB97028FD92F8FB3C8CFBA7D83") + check(128, 96, 104, "F471B4983BA80946DF217A54") + check(192, 96, 104, "5AE828BC51C24D85FA5CC7B2") + check(256, 96, 104, "8C8335982E2B734616CAD14C") + check(128, 64, 104, "B553F74B85FD1E5B") + check(192, 64, 104, "3B49D20E513531F9") + check(256, 64, 104, "ED6DA5B1216BF8BB") + + # test-vector-3-nonce112.txt + check(128, 128, 112, "CA8AFCA031BAC3F480A583BD6C50A547") + check(192, 128, 112, "D170C1DF356308079DA9A3F619147148") + check(256, 128, 112, "57F94381F2F9231EFB04AECD323757C3") + check(128, 96, 112, "3A618B2531ED39F260C750DC") + check(192, 96, 112, "9071EB89FEDBADDA88FD286E") + check(256, 96, 112, "FDF0EFB97F21A39AC4BAB5AC") + check(128, 64, 112, "FAB2FF3A8DD82A13") + check(192, 64, 112, "AC01D912BD0737D3") + check(256, 64, 112, "9D1FD0B500EA4ECF") + + # test-vector-3-nonce120.txt + check(128, 128, 120, "9E043A7140A25FB91F43BCC9DD7E0F46") + check(192, 128, 120, "680000E53908323A7F396B955B8EC641") + check(256, 128, 120, "8304B97FAACDA56E676602E1878A7E6F") + check(128, 96, 120, "81F978AC9867E825D339847D") + check(192, 96, 120, "EFCF2D60B24926ADA48CF5B1") + check(256, 96, 120, "84961DC56E917B165E58C174") + check(128, 64, 120, "227AEE6C9D905A61") + check(192, 64, 120, "541DE691B9E1A2F9") + check(256, 64, 120, "B0E761381C7129FC") + + def test_2_bugfix(self): + nonce = unhexlify("EEDDCCBBAA9988776655443322110D") + key = unhexlify("0F0E0D0C0B0A09080706050403020100") + A = unhexlify("000102030405060708090A0B0C0D0E0F1011121314151617" + "18191A1B1C1D1E1F2021222324252627") + P = unhexlify("000102030405060708090A0B0C0D0E0F1011121314151617" + "18191A1B1C1D1E1F2021222324252627") + C = unhexlify("07E903BFC49552411ABC865F5ECE60F6FAD1F5A9F14D3070" + "FA2F1308A563207FFE14C1EEA44B22059C7484319D8A2C53" + "C236A7B3") + mac_len = len(C) - len(P) + + # Prior to version 3.17, a nonce of maximum length (15 bytes) + # was actually used as a 14 byte nonce. The last byte was erroneously + # ignored. + buggy_result = unhexlify("BA015C4E5AE54D76C890AE81BD40DC57" + "03EDC30E8AC2A58BC5D8FA4D61C5BAE6" + "C39BEAC435B2FD56A2A5085C1B135D77" + "0C8264B7") + cipher = AES.new(key, AES.MODE_OCB, nonce=nonce[:-1], mac_len=mac_len) + cipher.update(A) + C_out2, tag_out2 = cipher.encrypt_and_digest(P) + self.assertEqual(buggy_result, C_out2 + tag_out2) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(OcbTests) + tests += list_test_cases(OcbFSMTests) + tests += list_test_cases(OcbRfc7253Test) + tests += list_test_cases(OcbDkgTest) + return tests + + +if __name__ == '__main__': + def suite(): + return unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_OFB.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_OFB.py new file mode 100644 index 000000000..ec145ada3 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_OFB.py @@ -0,0 +1,238 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Util.py3compat import tobytes +from Crypto.Cipher import AES, DES3, DES +from Crypto.Hash import SHAKE128 +from Crypto.SelfTest.loader import load_test_vectors_wycheproof + +def get_tag_random(tag, length): + return SHAKE128.new(data=tobytes(tag)).read(length) + +from Crypto.SelfTest.Cipher.test_CBC import BlockChainingTests + +class OfbTests(BlockChainingTests): + + aes_mode = AES.MODE_OFB + des3_mode = DES3.MODE_OFB + + # Redefine test_unaligned_data_128/64 + + def test_unaligned_data_128(self): + plaintexts = [ b"7777777" ] * 100 + + cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=8) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=8) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=128) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=128) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + def test_unaligned_data_64(self): + plaintexts = [ b"7777777" ] * 100 + cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=8) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=8) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=64) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=64) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + +from Crypto.SelfTest.Cipher.test_CBC import NistBlockChainingVectors + +class NistOfbVectors(NistBlockChainingVectors): + aes_mode = AES.MODE_OFB + des_mode = DES.MODE_OFB + des3_mode = DES3.MODE_OFB + + +# Create one test method per file +nist_aes_kat_mmt_files = ( + # KAT + "OFBGFSbox128.rsp", + "OFBGFSbox192.rsp", + "OFBGFSbox256.rsp", + "OFBKeySbox128.rsp", + "OFBKeySbox192.rsp", + "OFBKeySbox256.rsp", + "OFBVarKey128.rsp", + "OFBVarKey192.rsp", + "OFBVarKey256.rsp", + "OFBVarTxt128.rsp", + "OFBVarTxt192.rsp", + "OFBVarTxt256.rsp", + # MMT + "OFBMMT128.rsp", + "OFBMMT192.rsp", + "OFBMMT256.rsp", + ) +nist_aes_mct_files = ( + "OFBMCT128.rsp", + "OFBMCT192.rsp", + "OFBMCT256.rsp", + ) + +for file_name in nist_aes_kat_mmt_files: + def new_func(self, file_name=file_name): + self._do_kat_aes_test(file_name) + setattr(NistOfbVectors, "test_AES_" + file_name, new_func) + +for file_name in nist_aes_mct_files: + def new_func(self, file_name=file_name): + self._do_mct_aes_test(file_name) + setattr(NistOfbVectors, "test_AES_" + file_name, new_func) +del file_name, new_func + +nist_tdes_files = ( + "TOFBMMT2.rsp", # 2TDES + "TOFBMMT3.rsp", # 3TDES + "TOFBinvperm.rsp", # Single DES + "TOFBpermop.rsp", + "TOFBsubtab.rsp", + "TOFBvarkey.rsp", + "TOFBvartext.rsp", + ) + +for file_name in nist_tdes_files: + def new_func(self, file_name=file_name): + self._do_tdes_test(file_name) + setattr(NistOfbVectors, "test_TDES_" + file_name, new_func) + +# END OF NIST OFB TEST VECTORS + + +class SP800TestVectors(unittest.TestCase): + """Class exercising the OFB test vectors found in Section F.4 + of NIST SP 800-3A""" + + def test_aes_128(self): + plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ + 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ + '30c81c46a35ce411e5fbc1191a0a52ef' +\ + 'f69f2445df4f9b17ad2b417be66c3710' + ciphertext = '3b3fd92eb72dad20333449f8e83cfb4a' +\ + '7789508d16918f03f53c52dac54ed825' +\ + '9740051e9c5fecf64344f7a82260edcc' +\ + '304c6528f659c77866a510d9c1d6ae5e' + key = '2b7e151628aed2a6abf7158809cf4f3c' + iv = '000102030405060708090a0b0c0d0e0f' + + key = unhexlify(key) + iv = unhexlify(iv) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_OFB, iv) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_OFB, iv) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + cipher = AES.new(key, AES.MODE_OFB, iv) + self.assertEqual(cipher.encrypt(plaintext[:-8]), ciphertext[:-8]) + cipher = AES.new(key, AES.MODE_OFB, iv) + self.assertEqual(cipher.decrypt(ciphertext[:-8]), plaintext[:-8]) + + def test_aes_192(self): + plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ + 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ + '30c81c46a35ce411e5fbc1191a0a52ef' +\ + 'f69f2445df4f9b17ad2b417be66c3710' + ciphertext = 'cdc80d6fddf18cab34c25909c99a4174' +\ + 'fcc28b8d4c63837c09e81700c1100401' +\ + '8d9a9aeac0f6596f559c6d4daf59a5f2' +\ + '6d9f200857ca6c3e9cac524bd9acc92a' + key = '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b' + iv = '000102030405060708090a0b0c0d0e0f' + + key = unhexlify(key) + iv = unhexlify(iv) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_OFB, iv) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_OFB, iv) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + cipher = AES.new(key, AES.MODE_OFB, iv) + self.assertEqual(cipher.encrypt(plaintext[:-8]), ciphertext[:-8]) + cipher = AES.new(key, AES.MODE_OFB, iv) + self.assertEqual(cipher.decrypt(ciphertext[:-8]), plaintext[:-8]) + + def test_aes_256(self): + plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ + 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ + '30c81c46a35ce411e5fbc1191a0a52ef' +\ + 'f69f2445df4f9b17ad2b417be66c3710' + ciphertext = 'dc7e84bfda79164b7ecd8486985d3860' +\ + '4febdc6740d20b3ac88f6ad82a4fb08d' +\ + '71ab47a086e86eedf39d1c5bba97c408' +\ + '0126141d67f37be8538f5a8be740e484' + key = '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4' + iv = '000102030405060708090a0b0c0d0e0f' + + key = unhexlify(key) + iv = unhexlify(iv) + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + + cipher = AES.new(key, AES.MODE_OFB, iv) + self.assertEqual(cipher.encrypt(plaintext), ciphertext) + cipher = AES.new(key, AES.MODE_OFB, iv) + self.assertEqual(cipher.decrypt(ciphertext), plaintext) + + cipher = AES.new(key, AES.MODE_OFB, iv) + self.assertEqual(cipher.encrypt(plaintext[:-8]), ciphertext[:-8]) + cipher = AES.new(key, AES.MODE_OFB, iv) + self.assertEqual(cipher.decrypt(ciphertext[:-8]), plaintext[:-8]) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(OfbTests) + if config.get('slow_tests'): + tests += list_test_cases(NistOfbVectors) + tests += list_test_cases(SP800TestVectors) + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_OpenPGP.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_OpenPGP.py new file mode 100644 index 000000000..e6cae670c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_OpenPGP.py @@ -0,0 +1,218 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Util.py3compat import tobytes +from Crypto.Cipher import AES, DES3, DES +from Crypto.Hash import SHAKE128 + +def get_tag_random(tag, length): + return SHAKE128.new(data=tobytes(tag)).read(length) + + +from Crypto.SelfTest.Cipher.test_CBC import BlockChainingTests + +class OpenPGPTests(BlockChainingTests): + + aes_mode = AES.MODE_OPENPGP + des3_mode = DES3.MODE_OPENPGP + + # Redefine test_unaligned_data_128/64 + + key_128 = get_tag_random("key_128", 16) + key_192 = get_tag_random("key_192", 24) + iv_128 = get_tag_random("iv_128", 16) + iv_64 = get_tag_random("iv_64", 8) + data_128 = get_tag_random("data_128", 16) + + def test_loopback_128(self): + cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128) + pt = get_tag_random("plaintext", 16 * 100) + ct = cipher.encrypt(pt) + + eiv, ct = ct[:18], ct[18:] + + cipher = AES.new(self.key_128, AES.MODE_OPENPGP, eiv) + pt2 = cipher.decrypt(ct) + self.assertEqual(pt, pt2) + + def test_loopback_64(self): + cipher = DES3.new(self.key_192, DES3.MODE_OPENPGP, self.iv_64) + pt = get_tag_random("plaintext", 8 * 100) + ct = cipher.encrypt(pt) + + eiv, ct = ct[:10], ct[10:] + + cipher = DES3.new(self.key_192, DES3.MODE_OPENPGP, eiv) + pt2 = cipher.decrypt(ct) + self.assertEqual(pt, pt2) + + def test_IV_iv_attributes(self): + cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128) + eiv = cipher.encrypt(b"") + self.assertEqual(cipher.iv, self.iv_128) + + cipher = AES.new(self.key_128, AES.MODE_OPENPGP, eiv) + self.assertEqual(cipher.iv, self.iv_128) + + def test_null_encryption_decryption(self): + cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128) + eiv = cipher.encrypt(b"") + + cipher = AES.new(self.key_128, AES.MODE_OPENPGP, eiv) + self.assertEqual(cipher.decrypt(b""), b"") + + def test_either_encrypt_or_decrypt(self): + cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128) + eiv = cipher.encrypt(b"") + self.assertRaises(TypeError, cipher.decrypt, b"") + + cipher = AES.new(self.key_128, AES.MODE_OPENPGP, eiv) + cipher.decrypt(b"") + self.assertRaises(TypeError, cipher.encrypt, b"") + + def test_unaligned_data_128(self): + plaintexts = [ b"7777777" ] * 100 + + cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + def test_unaligned_data_64(self): + plaintexts = [ b"7777777" ] * 100 + + cipher = DES3.new(self.key_192, DES3.MODE_OPENPGP, self.iv_64) + ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] + cipher = DES3.new(self.key_192, DES3.MODE_OPENPGP, self.iv_64) + self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts))) + + def test_output_param(self): + pass + + def test_output_param_same_buffer(self): + pass + + def test_output_param_memoryview(self): + pass + + def test_output_param_neg(self): + pass + + +class TestVectors(unittest.TestCase): + + def test_aes(self): + # The following test vectors have been generated with gpg v1.4.0. + # The command line used was: + # + # gpg -c -z 0 --cipher-algo AES --passphrase secret_passphrase \ + # --disable-mdc --s2k-mode 0 --output ct pt + # + # As result, the content of the file 'pt' is encrypted with a key derived + # from 'secret_passphrase' and written to file 'ct'. + # Test vectors must be extracted from 'ct', which is a collection of + # TLVs (see RFC4880 for all details): + # - the encrypted data (with the encrypted IV as prefix) is the payload + # of the TLV with tag 9 (Symmetrical Encrypted Data Packet). + # This is the ciphertext in the test vector. + # - inside the encrypted part, there is a further layer of TLVs. One must + # look for tag 11 (Literal Data Packet); in its payload, after a short + # but time dependent header, there is the content of file 'pt'. + # In the test vector, the plaintext is the complete set of TLVs that gets + # encrypted. It is not just the content of 'pt'. + # - the key is the leftmost 16 bytes of the SHA1 digest of the password. + # The test vector contains such shortened digest. + # + # Note that encryption uses a clear IV, and decryption an encrypted IV + + plaintext = 'ac18620270744fb4f647426c61636b4361745768697465436174' + ciphertext = 'dc6b9e1f095de609765c59983db5956ae4f63aea7405389d2ebb' + key = '5baa61e4c9b93f3f0682250b6cf8331b' + iv = '3d7d3e62282add7eb203eeba5c800733' + encrypted_iv='fd934601ef49cb58b6d9aebca6056bdb96ef' + + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + key = unhexlify(key) + iv = unhexlify(iv) + encrypted_iv = unhexlify(encrypted_iv) + + cipher = AES.new(key, AES.MODE_OPENPGP, iv) + ct = cipher.encrypt(plaintext) + self.assertEqual(ct[:18], encrypted_iv) + self.assertEqual(ct[18:], ciphertext) + + cipher = AES.new(key, AES.MODE_OPENPGP, encrypted_iv) + pt = cipher.decrypt(ciphertext) + self.assertEqual(pt, plaintext) + + def test_des3(self): + # The following test vectors have been generated with gpg v1.4.0. + # The command line used was: + # gpg -c -z 0 --cipher-algo 3DES --passphrase secret_passphrase \ + # --disable-mdc --s2k-mode 0 --output ct pt + # For an explanation, see test_AES.py . + + plaintext = 'ac1762037074324fb53ba3596f73656d69746556616c6c6579' + ciphertext = '9979238528357b90e2e0be549cb0b2d5999b9a4a447e5c5c7d' + key = '7ade65b460f5ea9be35f9e14aa883a2048e3824aa616c0b2' + iv='cd47e2afb8b7e4b0' + encrypted_iv='6a7eef0b58050e8b904a' + + plaintext = unhexlify(plaintext) + ciphertext = unhexlify(ciphertext) + key = unhexlify(key) + iv = unhexlify(iv) + encrypted_iv = unhexlify(encrypted_iv) + + cipher = DES3.new(key, DES3.MODE_OPENPGP, iv) + ct = cipher.encrypt(plaintext) + self.assertEqual(ct[:10], encrypted_iv) + self.assertEqual(ct[10:], ciphertext) + + cipher = DES3.new(key, DES3.MODE_OPENPGP, encrypted_iv) + pt = cipher.decrypt(ciphertext) + self.assertEqual(pt, plaintext) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(OpenPGPTests) + tests += list_test_cases(TestVectors) + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_SIV.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_SIV.py new file mode 100644 index 000000000..a80ddc1e2 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_SIV.py @@ -0,0 +1,552 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import json +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors_wycheproof + +from Crypto.Util.py3compat import tobytes, bchr +from Crypto.Cipher import AES +from Crypto.Hash import SHAKE128 + +from Crypto.Util.strxor import strxor + + +def get_tag_random(tag, length): + return SHAKE128.new(data=tobytes(tag)).read(length) + + +class SivTests(unittest.TestCase): + + key_256 = get_tag_random("key_256", 32) + key_384 = get_tag_random("key_384", 48) + key_512 = get_tag_random("key_512", 64) + nonce_96 = get_tag_random("nonce_128", 12) + data = get_tag_random("data", 128) + + def test_loopback_128(self): + for key in self.key_256, self.key_384, self.key_512: + cipher = AES.new(key, AES.MODE_SIV, nonce=self.nonce_96) + pt = get_tag_random("plaintext", 16 * 100) + ct, mac = cipher.encrypt_and_digest(pt) + + cipher = AES.new(key, AES.MODE_SIV, nonce=self.nonce_96) + pt2 = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(pt, pt2) + + def test_nonce(self): + # Deterministic encryption + AES.new(self.key_256, AES.MODE_SIV) + + cipher = AES.new(self.key_256, AES.MODE_SIV, self.nonce_96) + ct1, tag1 = cipher.encrypt_and_digest(self.data) + + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + ct2, tag2 = cipher.encrypt_and_digest(self.data) + self.assertEqual(ct1 + tag1, ct2 + tag2) + + def test_nonce_must_be_bytes(self): + self.assertRaises(TypeError, AES.new, self.key_256, AES.MODE_SIV, + nonce=u'test12345678') + + def test_nonce_length(self): + # nonce can be of any length (but not empty) + self.assertRaises(ValueError, AES.new, self.key_256, AES.MODE_SIV, + nonce=b"") + + for x in range(1, 128): + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=bchr(1) * x) + cipher.encrypt_and_digest(b'\x01') + + def test_block_size_128(self): + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + self.assertEqual(cipher.block_size, AES.block_size) + + def test_nonce_attribute(self): + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + self.assertEqual(cipher.nonce, self.nonce_96) + + # By default, no nonce is randomly generated + self.assertFalse(hasattr(AES.new(self.key_256, AES.MODE_SIV), "nonce")) + + def test_unknown_parameters(self): + self.assertRaises(TypeError, AES.new, self.key_256, AES.MODE_SIV, + self.nonce_96, 7) + self.assertRaises(TypeError, AES.new, self.key_256, AES.MODE_SIV, + nonce=self.nonce_96, unknown=7) + + # But some are only known by the base cipher + # (e.g. use_aesni consumed by the AES module) + AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96, + use_aesni=False) + + def test_encrypt_excludes_decrypt(self): + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + cipher.encrypt_and_digest(self.data) + self.assertRaises(TypeError, cipher.decrypt, self.data) + + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + cipher.encrypt_and_digest(self.data) + self.assertRaises(TypeError, cipher.decrypt_and_verify, + self.data, self.data) + + def test_data_must_be_bytes(self): + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.encrypt, u'test1234567890-*') + + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.decrypt_and_verify, + u'test1234567890-*', b"xxxx") + + def test_mac_len(self): + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + _, mac = cipher.encrypt_and_digest(self.data) + self.assertEqual(len(mac), 16) + + def test_invalid_mac(self): + from Crypto.Util.strxor import strxor_c + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + ct, mac = cipher.encrypt_and_digest(self.data) + + invalid_mac = strxor_c(mac, 0x01) + + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.decrypt_and_verify, ct, + invalid_mac) + + def test_hex_mac(self): + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + mac_hex = cipher.hexdigest() + self.assertEqual(cipher.digest(), unhexlify(mac_hex)) + + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + cipher.hexverify(mac_hex) + + def test_bytearray(self): + + # Encrypt + key = bytearray(self.key_256) + nonce = bytearray(self.nonce_96) + data = bytearray(self.data) + header = bytearray(self.data) + + cipher1 = AES.new(self.key_256, + AES.MODE_SIV, + nonce=self.nonce_96) + cipher1.update(self.data) + ct, tag = cipher1.encrypt_and_digest(self.data) + + cipher2 = AES.new(key, + AES.MODE_SIV, + nonce=nonce) + key[:3] = b'\xFF\xFF\xFF' + nonce[:3] = b'\xFF\xFF\xFF' + cipher2.update(header) + header[:3] = b'\xFF\xFF\xFF' + ct_test, tag_test = cipher2.encrypt_and_digest(data) + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + key = bytearray(self.key_256) + nonce = bytearray(self.nonce_96) + header = bytearray(self.data) + ct_ba = bytearray(ct) + tag_ba = bytearray(tag) + + cipher3 = AES.new(key, + AES.MODE_SIV, + nonce=nonce) + key[:3] = b'\xFF\xFF\xFF' + nonce[:3] = b'\xFF\xFF\xFF' + cipher3.update(header) + header[:3] = b'\xFF\xFF\xFF' + pt_test = cipher3.decrypt_and_verify(ct_ba, tag_ba) + + self.assertEqual(self.data, pt_test) + + def test_memoryview(self): + + # Encrypt + key = memoryview(bytearray(self.key_256)) + nonce = memoryview(bytearray(self.nonce_96)) + data = memoryview(bytearray(self.data)) + header = memoryview(bytearray(self.data)) + + cipher1 = AES.new(self.key_256, + AES.MODE_SIV, + nonce=self.nonce_96) + cipher1.update(self.data) + ct, tag = cipher1.encrypt_and_digest(self.data) + + cipher2 = AES.new(key, + AES.MODE_SIV, + nonce=nonce) + key[:3] = b'\xFF\xFF\xFF' + nonce[:3] = b'\xFF\xFF\xFF' + cipher2.update(header) + header[:3] = b'\xFF\xFF\xFF' + ct_test, tag_test= cipher2.encrypt_and_digest(data) + + self.assertEqual(ct, ct_test) + self.assertEqual(tag, tag_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decrypt + key = memoryview(bytearray(self.key_256)) + nonce = memoryview(bytearray(self.nonce_96)) + header = memoryview(bytearray(self.data)) + ct_ba = memoryview(bytearray(ct)) + tag_ba = memoryview(bytearray(tag)) + + cipher3 = AES.new(key, + AES.MODE_SIV, + nonce=nonce) + key[:3] = b'\xFF\xFF\xFF' + nonce[:3] = b'\xFF\xFF\xFF' + cipher3.update(header) + header[:3] = b'\xFF\xFF\xFF' + pt_test = cipher3.decrypt_and_verify(ct_ba, tag_ba) + + self.assertEqual(self.data, pt_test) + + def test_output_param(self): + + pt = b'5' * 128 + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + ct, tag = cipher.encrypt_and_digest(pt) + + output = bytearray(128) + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + res, tag_out = cipher.encrypt_and_digest(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + self.assertEqual(tag, tag_out) + + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + res = cipher.decrypt_and_verify(ct, tag, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + def test_output_param_memoryview(self): + + pt = b'5' * 128 + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + ct, tag = cipher.encrypt_and_digest(pt) + + output = memoryview(bytearray(128)) + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + cipher.encrypt_and_digest(pt, output=output) + self.assertEqual(ct, output) + + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + cipher.decrypt_and_verify(ct, tag, output=output) + self.assertEqual(pt, output) + + def test_output_param_neg(self): + LEN_PT = 128 + + pt = b'5' * LEN_PT + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + ct, tag = cipher.encrypt_and_digest(pt) + + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.encrypt_and_digest, pt, output=b'0' * LEN_PT) + + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.decrypt_and_verify, ct, tag, output=b'0' * LEN_PT) + + shorter_output = bytearray(LEN_PT - 1) + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.encrypt_and_digest, pt, output=shorter_output) + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + self.assertRaises(ValueError, cipher.decrypt_and_verify, ct, tag, output=shorter_output) + + +class SivFSMTests(unittest.TestCase): + + key_256 = get_tag_random("key_256", 32) + nonce_96 = get_tag_random("nonce_96", 12) + data = get_tag_random("data", 128) + + def test_invalid_init_encrypt(self): + # Path INIT->ENCRYPT fails + cipher = AES.new(self.key_256, AES.MODE_SIV, + nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.encrypt, b"xxx") + + def test_invalid_init_decrypt(self): + # Path INIT->DECRYPT fails + cipher = AES.new(self.key_256, AES.MODE_SIV, + nonce=self.nonce_96) + self.assertRaises(TypeError, cipher.decrypt, b"xxx") + + def test_valid_init_update_digest_verify(self): + # No plaintext, fixed authenticated data + # Verify path INIT->UPDATE->DIGEST + cipher = AES.new(self.key_256, AES.MODE_SIV, + nonce=self.nonce_96) + cipher.update(self.data) + mac = cipher.digest() + + # Verify path INIT->UPDATE->VERIFY + cipher = AES.new(self.key_256, AES.MODE_SIV, + nonce=self.nonce_96) + cipher.update(self.data) + cipher.verify(mac) + + def test_valid_init_digest(self): + # Verify path INIT->DIGEST + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + cipher.digest() + + def test_valid_init_verify(self): + # Verify path INIT->VERIFY + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + mac = cipher.digest() + + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + cipher.verify(mac) + + def test_valid_multiple_digest_or_verify(self): + # Multiple calls to digest + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + cipher.update(self.data) + first_mac = cipher.digest() + for x in range(4): + self.assertEqual(first_mac, cipher.digest()) + + # Multiple calls to verify + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + cipher.update(self.data) + for x in range(5): + cipher.verify(first_mac) + + def test_valid_encrypt_and_digest_decrypt_and_verify(self): + # encrypt_and_digest + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + cipher.update(self.data) + ct, mac = cipher.encrypt_and_digest(self.data) + + # decrypt_and_verify + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + cipher.update(self.data) + pt = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(self.data, pt) + + def test_invalid_multiple_encrypt_and_digest(self): + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + ct, tag = cipher.encrypt_and_digest(self.data) + self.assertRaises(TypeError, cipher.encrypt_and_digest, b'') + + def test_invalid_multiple_decrypt_and_verify(self): + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + ct, tag = cipher.encrypt_and_digest(self.data) + + cipher = AES.new(self.key_256, AES.MODE_SIV, nonce=self.nonce_96) + cipher.decrypt_and_verify(ct, tag) + self.assertRaises(TypeError, cipher.decrypt_and_verify, ct, tag) + + +def transform(tv): + new_tv = [[unhexlify(x) for x in tv[0].split("-")]] + new_tv += [ unhexlify(x) for x in tv[1:5]] + if tv[5]: + nonce = unhexlify(tv[5]) + else: + nonce = None + new_tv += [ nonce ] + return new_tv + + +class TestVectors(unittest.TestCase): + """Class exercising the SIV test vectors found in RFC5297""" + + # This is a list of tuples with 5 items: + # + # 1. Header + '|' + plaintext + # 2. Header + '|' + ciphertext + '|' + MAC + # 3. AES-128 key + # 4. Description + # 5. Dictionary of parameters to be passed to AES.new(). + # It must include the nonce. + # + # A "Header" is a dash ('-') separated sequece of components. + # + test_vectors_hex = [ + ( + '101112131415161718191a1b1c1d1e1f2021222324252627', + '112233445566778899aabbccddee', + '40c02b9690c4dc04daef7f6afe5c', + '85632d07c6e8f37f950acd320a2ecc93', + 'fffefdfcfbfaf9f8f7f6f5f4f3f2f1f0f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff', + None + ), + ( + '00112233445566778899aabbccddeeffdeaddadadeaddadaffeeddccbbaa9988' + + '7766554433221100-102030405060708090a0', + '7468697320697320736f6d6520706c61696e7465787420746f20656e63727970' + + '74207573696e67205349562d414553', + 'cb900f2fddbe404326601965c889bf17dba77ceb094fa663b7a3f748ba8af829' + + 'ea64ad544a272e9c485b62a3fd5c0d', + '7bdb6e3b432667eb06f4d14bff2fbd0f', + '7f7e7d7c7b7a79787776757473727170404142434445464748494a4b4c4d4e4f', + '09f911029d74e35bd84156c5635688c0' + ), + ] + + test_vectors = [ transform(tv) for tv in test_vectors_hex ] + + def runTest(self): + for assoc_data, pt, ct, mac, key, nonce in self.test_vectors: + + # Encrypt + cipher = AES.new(key, AES.MODE_SIV, nonce=nonce) + for x in assoc_data: + cipher.update(x) + ct2, mac2 = cipher.encrypt_and_digest(pt) + self.assertEqual(ct, ct2) + self.assertEqual(mac, mac2) + + # Decrypt + cipher = AES.new(key, AES.MODE_SIV, nonce=nonce) + for x in assoc_data: + cipher.update(x) + pt2 = cipher.decrypt_and_verify(ct, mac) + self.assertEqual(pt, pt2) + + +class TestVectorsWycheproof(unittest.TestCase): + + def __init__(self): + unittest.TestCase.__init__(self) + self._id = "None" + + def setUp(self): + self.tv = load_test_vectors_wycheproof(("Cipher", "wycheproof"), + "aes_siv_cmac_test.json", + "Wycheproof AES SIV") + + def shortDescription(self): + return self._id + + def test_encrypt(self, tv): + self._id = "Wycheproof Encrypt AES-SIV Test #" + str(tv.id) + + cipher = AES.new(tv.key, AES.MODE_SIV) + cipher.update(tv.aad) + ct, tag = cipher.encrypt_and_digest(tv.msg) + if tv.valid: + self.assertEqual(tag + ct, tv.ct) + + def test_decrypt(self, tv): + self._id = "Wycheproof Decrypt AES_SIV Test #" + str(tv.id) + + cipher = AES.new(tv.key, AES.MODE_SIV) + cipher.update(tv.aad) + try: + pt = cipher.decrypt_and_verify(tv.ct[16:], tv.ct[:16]) + except ValueError: + assert not tv.valid + else: + assert tv.valid + self.assertEqual(pt, tv.msg) + + def runTest(self): + + for tv in self.tv: + self.test_encrypt(tv) + self.test_decrypt(tv) + + +class TestVectorsWycheproof2(unittest.TestCase): + + def __init__(self): + unittest.TestCase.__init__(self) + self._id = "None" + + def setUp(self): + self.tv = load_test_vectors_wycheproof(("Cipher", "wycheproof"), + "aead_aes_siv_cmac_test.json", + "Wycheproof AEAD SIV") + + def shortDescription(self): + return self._id + + def test_encrypt(self, tv): + self._id = "Wycheproof Encrypt AEAD-AES-SIV Test #" + str(tv.id) + + cipher = AES.new(tv.key, AES.MODE_SIV, nonce=tv.iv) + cipher.update(tv.aad) + ct, tag = cipher.encrypt_and_digest(tv.msg) + if tv.valid: + self.assertEqual(ct, tv.ct) + self.assertEqual(tag, tv.tag) + + def test_decrypt(self, tv): + self._id = "Wycheproof Decrypt AEAD-AES-SIV Test #" + str(tv.id) + + cipher = AES.new(tv.key, AES.MODE_SIV, nonce=tv.iv) + cipher.update(tv.aad) + try: + pt = cipher.decrypt_and_verify(tv.ct, tv.tag) + except ValueError: + assert not tv.valid + else: + assert tv.valid + self.assertEqual(pt, tv.msg) + + def runTest(self): + + for tv in self.tv: + self.test_encrypt(tv) + self.test_decrypt(tv) + + +def get_tests(config={}): + wycheproof_warnings = config.get('wycheproof_warnings') + + tests = [] + tests += list_test_cases(SivTests) + tests += list_test_cases(SivFSMTests) + tests += [ TestVectors() ] + tests += [ TestVectorsWycheproof() ] + tests += [ TestVectorsWycheproof2() ] + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_Salsa20.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_Salsa20.py new file mode 100644 index 000000000..a710462ed --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_Salsa20.py @@ -0,0 +1,367 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/Salsa20.py: Self-test for the Salsa20 stream cipher +# +# Written in 2013 by Fabrizio Tarizzo +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.Salsa20""" + +import unittest + +from Crypto.Util.py3compat import bchr + +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Cipher import Salsa20 + +from .common import make_stream_tests + +# This is a list of (plaintext, ciphertext, key[, description[, params]]) +# tuples. +test_data = [ + # Test vectors are taken from + # http://www.ecrypt.eu.org/stream/svn/viewcvs.cgi/ecrypt/trunk/submissions/salsa20/full/verified.test-vectors + ( '00' * 512, + '4dfa5e481da23ea09a31022050859936da52fcee218005164f267cb65f5cfd7f' + + '2b4f97e0ff16924a52df269515110a07f9e460bc65ef95da58f740b7d1dbb0aa' + + 'd64cec189c7eb8c6bbf3d7376c80a481d43e628701f6a27afb9fe23919f24114' + + '8db44f70d7063efcc3dd55a0893a613c3c6fe1c127bd6f59910589293bb6ef9e' + + 'e24819066dee1a64f49b0bbad5988635272b169af861f85df881939f29ada6fd' + + '0241410e8d332ae4798d929434a2630de451ec4e0169694cbaa7ebb121ea6a2b' + + 'da9c1581f429e0a00f7d67e23b730676783b262e8eb43a25f55fb90b3e753aef' + + '8c6713ec66c51881111593ccb3e8cb8f8de124080501eeeb389c4bcb6977cf95' + + '7d5789631eb4554400e1e025935dfa7b3e9039d61bdc58a8697d36815bf1985c' + + 'efdf7ae112e5bb81e37ecf0616ce7147fc08a93a367e08631f23c03b00a8da2f' + + 'aa5024e5c8d30aca43fc2d5082067b21b234bc741d68fb292c6012c3764ccee3' + + '1e364a5403e00cfee338a21a01e7d3cefd5a770ca0ab48c435ea6116435f7ad8' + + '30b217b49f978a68e207ed9f462af7fb195b2115fe8f24f152e4ddc32202d6f2' + + 'b52fafbcfbc202d8a259a611e901d3f62d065eb13f09bbc45cd45119b843efaa' + + 'b375703739daced4dd4059fd71c3c47fc2f9939670fad4a46066adcc6a564578' + + '3308b90ffb72be04a6b147cbe38cc0c3b9267c296a92a7c69873f9f263be9703', + '80000000000000000000000000000000', + '128 bits key, set 1, vector 0', + dict (iv='00'*8)), + + ( '00' * 512, + 'e3be8fdd8beca2e3ea8ef9475b29a6e7003951e1097a5c38d23b7a5fad9f6844' + + 'b22c97559e2723c7cbbd3fe4fc8d9a0744652a83e72a9c461876af4d7ef1a117' + + '8da2b74eef1b6283e7e20166abcae538e9716e4669e2816b6b20c5c356802001' + + 'cc1403a9a117d12a2669f456366d6ebb0f1246f1265150f793cdb4b253e348ae' + + '203d89bc025e802a7e0e00621d70aa36b7e07cb1e7d5b38d5e222b8b0e4b8407' + + '0142b1e29504767d76824850320b5368129fdd74e861b498e3be8d16f2d7d169' + + '57be81f47b17d9ae7c4ff15429a73e10acf250ed3a90a93c711308a74c6216a9' + + 'ed84cd126da7f28e8abf8bb63517e1ca98e712f4fb2e1a6aed9fdc73291faa17' + + '958211c4ba2ebd5838c635edb81f513a91a294e194f1c039aeec657dce40aa7e' + + '7c0af57cacefa40c9f14b71a4b3456a63e162ec7d8d10b8ffb1810d71001b618' + + '2f9f73da53b85405c11f7b2d890fa8ae0c7f2e926d8a98c7ec4e91b65120e988' + + '349631a700c6facec3471cb0413656e75e309456584084d7e12c5b43a41c43ed' + + '9a048abd9b880da65f6a665a20fe7b77cd292fe62cae644b7f7df69f32bdb331' + + '903e6505ce44fdc293920c6a9ec7057e23df7dad298f82ddf4efb7fdc7bfc622' + + '696afcfd0cddcc83c7e77f11a649d79acdc3354e9635ff137e929933a0bd6f53' + + '77efa105a3a4266b7c0d089d08f1e855cc32b15b93784a36e56a76cc64bc8477', + '8000000000000000000000000000000000000000000000000000000000000000', + '256 bits key, set 1, vector 0', + dict (iv='00'*8)), + + ( '00' * 512, + '169060ccb42bea7bee4d8012a02f3635eb7bca12859fa159cd559094b3507db8' + + '01735d1a1300102a9c9415546829cbd2021ba217b39b81d89c55b13d0c603359' + + '3f84159a3c84f4b4f4a0edcd9d38ff261a737909e0b66d68b5cac496f3a5be99' + + 'cb12c321ab711afaab36cc0947955e1a9bb952ed54425e7711279fbc81bb83f5' + + '6e55cea44e6daddb05858a153ea6213b3350c12aa1a83ef2726f09485fa71790' + + 'f9b9f922c7dda1113b1f9d56658ed3402803f511bc1f122601d5e7f0ff036e23' + + '23ef24bb24195b9fd574823cd8a40c29d86bd35c191e2038779ff696c712b6d8' + + '2e7014dbe1ac5d527af076c088c4a8d44317958189f6ef54933a7e0816b5b916' + + 'd8f12ed8afe9422b85e5cc9b8adec9d6cfabe8dbc1082bccc02f5a7266aa074c' + + 'a284e583a35837798cc0e69d4ce937653b8cdd65ce414b89138615ccb165ad19' + + '3c6b9c3d05eef4be921a10ea811fe61d11c6867600188e065daff90b509ec56b' + + 'd41e7e8968c478c78d590c2d2ee24ea009c8f49bc3d81672cfc47895a9e21c9a' + + '471ebf8e294bee5d2de436ac8d052bf31111b345f1da23c3a4d13b9fc5f0900a' + + 'a298f98f538973b8fad40d4d159777de2cfe2a3dead1645ddb49794827dba040' + + 'f70a0ff4ecd155e0f033604693a51e2363880e2ecf98699e7174af7c2c6b0fc6' + + '59ae329599a3949272a37b9b2183a0910922a3f325ae124dcbdd735364055ceb', + '09090909090909090909090909090909', + '128 bits key, set 2, vector 9', + dict (iv='00'*8)), + + ( '00' * 512, + '7041e747ceb22ed7812985465f50333124f971da1c5d6efe5ca201b886f31046' + + 'e757e5c3ec914f60ed1f6bce2819b6810953f12b8ba1199bf82d746a8b8a88f1' + + '142002978ec4c35b95dc2c82990f9e847a0ab45f2ca72625f5190c820f29f3aa' + + 'f5f0b5572b06b70a144f2a240c3b3098d4831fa1ce1459f8d1df226a6a79b0ab' + + '41e91799ef31b5ff3d756c19126b19025858ee70fbd69f2be955cb011c005e31' + + '32b271b378f39b0cb594e95c99ce6ff17735a541891845bbf0450afcb4a850b9' + + '4ee90afb713ae7e01295c74381180a3816d7020d5a396c0d97aaa783eaabb6ec' + + '44d5111157f2212d1b1b8fca7893e8b520cd482418c272ab119b569a2b9598eb' + + '355624d12e79adab81153b58cd22eaf1b2a32395dedc4a1c66f4d274070b9800' + + 'ea95766f0245a8295f8aadb36ddbbdfa936417c8dbc6235d19494036964d3e70' + + 'b125b0f800c3d53881d9d11e7970f827c2f9556935cd29e927b0aceb8cae5fd4' + + '0fd88a8854010a33db94c96c98735858f1c5df6844f864feaca8f41539313e7f' + + '3c0610214912cd5e6362197646207e2d64cd5b26c9dfe0822629dcbeb16662e8' + + '9ff5bf5cf2e499138a5e27bd5027329d0e68ddf53103e9e409523662e27f61f6' + + '5cf38c1232023e6a6ef66c315bcb2a4328642faabb7ca1e889e039e7c444b34b' + + 'b3443f596ac730f3df3dfcdb343c307c80f76e43e8898c5e8f43dc3bb280add0', + '0909090909090909090909090909090909090909090909090909090909090909', + '256 bits key, set 2, vector 9', + dict (iv='00'*8)), + + ( '00' * 1024, + '71daee5142d0728b41b6597933ebf467e43279e30978677078941602629cbf68' + + 'b73d6bd2c95f118d2b3e6ec955dabb6dc61c4143bc9a9b32b99dbe6866166dc0' + + '8631b7d6553050303d7252c264d3a90d26c853634813e09ad7545a6ce7e84a5d' + + 'fc75ec43431207d5319970b0faadb0e1510625bb54372c8515e28e2accf0a993' + + '0ad15f431874923d2a59e20d9f2a5367dba6051564f150287debb1db536ff9b0' + + '9ad981f25e5010d85d76ee0c305f755b25e6f09341e0812f95c94f42eead346e' + + '81f39c58c5faa2c88953dc0cac90469db2063cb5cdb22c9eae22afbf0506fca4' + + '1dc710b846fbdfe3c46883dd118f3a5e8b11b6afd9e71680d8666557301a2daa' + + 'fb9496c559784d35a035360885f9b17bd7191977deea932b981ebdb29057ae3c' + + '92cfeff5e6c5d0cb62f209ce342d4e35c69646ccd14e53350e488bb310a32f8b' + + '0248e70acc5b473df537ced3f81a014d4083932bedd62ed0e447b6766cd2604b' + + '706e9b346c4468beb46a34ecf1610ebd38331d52bf33346afec15eefb2a7699e' + + '8759db5a1f636a48a039688e39de34d995df9f27ed9edc8dd795e39e53d9d925' + + 'b278010565ff665269042f05096d94da3433d957ec13d2fd82a0066283d0d1ee' + + 'b81bf0ef133b7fd90248b8ffb499b2414cd4fa003093ff0864575a43749bf596' + + '02f26c717fa96b1d057697db08ebc3fa664a016a67dcef8807577cc3a09385d3' + + 'f4dc79b34364bb3b166ce65fe1dd28e3950fe6fa81063f7b16ce1c0e6daac1f8' + + '188455b77752045e863c9b256ad92bc6e2d08314c5bba191c274f42dfbb3d652' + + 'bb771956555e880f84cd8b827a4c5a52f3a099fa0259bd4aac3efd541f191170' + + '4412d6e85fbcc628b335875b9fef24807f6e1bc66c3186159e1e7f5a13913e02' + + 'd241ce2efdbcaa275039fb14eac5923d17ffbc7f1abd3b45e92127575bfbabf9' + + '3a257ebef0aa1437b326e41b585af572f7239c33b32981a1577a4f629b027e1e' + + 'b49d58cc497e944d79cef44357c2bf25442ab779651e991147bf79d6fd3a8868' + + '0cd3b1748e07fd10d78aceef6db8a5e563570d40127f754146c34a440f2a991a' + + '23fa39d365141f255041f2135c5cba4373452c114da1801bacca38610e3a6524' + + '2b822d32de4ab5a7d3cf9b61b37493c863bd12e2cae10530cddcda2cb7a5436b' + + 'ef8988d4d24e8cdc31b2d2a3586340bc5141f8f6632d0dd543bfed81eb471ba1' + + 'f3dc2225a15ffddcc03eb48f44e27e2aa390598adf83f15c6608a5f18d4dfcf0' + + 'f547d467a4d70b281c83a595d7660d0b62de78b9cca023cca89d7b1f83484638' + + '0e228c25f049184a612ef5bb3d37454e6cfa5b10dceda619d898a699b3c8981a' + + '173407844bb89b4287bf57dd6600c79e352c681d74b03fa7ea0d7bf6ad69f8a6' + + '8ecb001963bd2dd8a2baa0083ec09751cd9742402ad716be16d5c052304cfca1', + '0F62B5085BAE0154A7FA4DA0F34699EC', + '128 bits key, Set 6, vector# 3', + dict (iv='288FF65DC42B92F9')), + + ( '00' * 1024, + '5e5e71f90199340304abb22a37b6625bf883fb89ce3b21f54a10b81066ef87da' + + '30b77699aa7379da595c77dd59542da208e5954f89e40eb7aa80a84a6176663f' + + 'd910cde567cf1ff60f7040548d8f376bfd1f44c4774aac37410ede7d5c3463fc' + + '4508a603201d8495ad257894e5eb1914b53e8da5e4bf2bc83ac87ce55cc67df7' + + '093d9853d2a83a9c8be969175df7c807a17156df768445dd0874a9271c6537f5' + + 'ce0466473582375f067fa4fcdaf65dbc0139cd75e8c21a482f28c0fb8c3d9f94' + + '22606cc8e88fe28fe73ec3cb10ff0e8cc5f2a49e540f007265c65b7130bfdb98' + + '795b1df9522da46e48b30e55d9f0d787955ece720205b29c85f3ad9be33b4459' + + '7d21b54d06c9a60b04b8e640c64e566e51566730e86cf128ab14174f91bd8981' + + 'a6fb00fe587bbd6c38b5a1dfdb04ea7e61536fd229f957aa9b070ca931358e85' + + '11b92c53c523cb54828fb1513c5636fa9a0645b4a3c922c0db94986d92f314ff' + + '7852c03b231e4dceea5dd8cced621869cff818daf3c270ff3c8be2e5c74be767' + + 'a4e1fdf3327a934fe31e46df5a74ae2021cee021d958c4f615263d99a5ddae7f' + + 'eab45e6eccbafefe4761c57750847b7e75ee2e2f14333c0779ce4678f47b1e1b' + + '760a03a5f17d6e91d4b42313b3f1077ee270e432fe04917ed1fc8babebf7c941' + + '42b80dfb44a28a2a3e59093027606f6860bfb8c2e5897078cfccda7314c70035' + + 'f137de6f05daa035891d5f6f76e1df0fce1112a2ff0ac2bd3534b5d1bf4c7165' + + 'fb40a1b6eacb7f295711c4907ae457514a7010f3a342b4427593d61ba993bc59' + + '8bd09c56b9ee53aac5dd861fa4b4bb53888952a4aa9d8ca8671582de716270e1' + + '97375b3ee49e51fa2bf4ef32015dd9a764d966aa2ae541592d0aa650849e99ca' + + '5c6c39beebf516457cc32fe4c105bff314a12f1ec94bdf4d626f5d9b1cbbde42' + + 'e5733f0885765ba29e2e82c829d312f5fc7e180679ac84826c08d0a644b326d0' + + '44da0fdcc75fa53cfe4ced0437fa4df5a7ecbca8b4cb7c4a9ecf9a60d00a56eb' + + '81da52adc21f508dbb60a9503a3cc94a896616d86020d5b0e5c637329b6d396a' + + '41a21ba2c4a9493cf33fa2d4f10f77d5b12fdad7e478ccfe79b74851fc96a7ca' + + '6320c5efd561a222c0ab0fb44bbda0e42149611d2262bb7d1719150fa798718a' + + '0eec63ee297cad459869c8b0f06c4e2b56cbac03cd2605b2a924efedf85ec8f1' + + '9b0b6c90e7cbd933223ffeb1b3a3f9677657905829294c4c70acdb8b0891b47d' + + '0875d0cd6c0f4efe2917fc44b581ef0d1e4280197065d07da34ab33283364552' + + 'efad0bd9257b059acdd0a6f246812feb69e7e76065f27dbc2eee94da9cc41835' + + 'bf826e36e5cebe5d4d6a37a6a666246290ce51a0c082718ab0ec855668db1add' + + 'a658e5f257e0db39384d02e6145c4c00eaa079098f6d820d872de711b6ed08cf', + '0F62B5085BAE0154A7FA4DA0F34699EC3F92E5388BDE3184D72A7DD02376C91C', + '256 bits key, Set 6, vector# 3', + dict (iv='288FF65DC42B92F9')), + +] + + +class KeyLength(unittest.TestCase): + + def runTest(self): + + nonce = bchr(0) * 8 + for key_length in (15, 30, 33): + key = bchr(1) * key_length + self.assertRaises(ValueError, Salsa20.new, key, nonce) + + +class NonceTests(unittest.TestCase): + + def test_invalid_nonce_length(self): + key = bchr(1) * 16 + self.assertRaises(ValueError, Salsa20.new, key, bchr(0) * 7) + self.assertRaises(ValueError, Salsa20.new, key, bchr(0) * 9) + + def test_default_nonce(self): + + cipher1 = Salsa20.new(bchr(1) * 16) + cipher2 = Salsa20.new(bchr(1) * 16) + self.assertEqual(len(cipher1.nonce), 8) + self.assertNotEqual(cipher1.nonce, cipher2.nonce) + + +class ByteArrayTest(unittest.TestCase): + """Verify we can encrypt or decrypt bytearrays""" + + def runTest(self): + + data = b"0123" + key = b"9" * 32 + nonce = b"t" * 8 + + # Encryption + data_ba = bytearray(data) + key_ba = bytearray(key) + nonce_ba = bytearray(nonce) + + cipher1 = Salsa20.new(key=key, nonce=nonce) + ct = cipher1.encrypt(data) + + cipher2 = Salsa20.new(key=key_ba, nonce=nonce_ba) + key_ba[:1] = b'\xFF' + nonce_ba[:1] = b'\xFF' + ct_test = cipher2.encrypt(data_ba) + + self.assertEqual(ct, ct_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decryption + key_ba = bytearray(key) + nonce_ba = bytearray(nonce) + ct_ba = bytearray(ct) + + cipher3 = Salsa20.new(key=key_ba, nonce=nonce_ba) + key_ba[:1] = b'\xFF' + nonce_ba[:1] = b'\xFF' + pt_test = cipher3.decrypt(ct_ba) + + self.assertEqual(data, pt_test) + + +class MemoryviewTest(unittest.TestCase): + """Verify we can encrypt or decrypt bytearrays""" + + def runTest(self): + + data = b"0123" + key = b"9" * 32 + nonce = b"t" * 8 + + # Encryption + data_mv = memoryview(bytearray(data)) + key_mv = memoryview(bytearray(key)) + nonce_mv = memoryview(bytearray(nonce)) + + cipher1 = Salsa20.new(key=key, nonce=nonce) + ct = cipher1.encrypt(data) + + cipher2 = Salsa20.new(key=key_mv, nonce=nonce_mv) + key_mv[:1] = b'\xFF' + nonce_mv[:1] = b'\xFF' + ct_test = cipher2.encrypt(data_mv) + + self.assertEqual(ct, ct_test) + self.assertEqual(cipher1.nonce, cipher2.nonce) + + # Decryption + key_mv = memoryview(bytearray(key)) + nonce_mv = memoryview(bytearray(nonce)) + ct_mv = memoryview(bytearray(ct)) + + cipher3 = Salsa20.new(key=key_mv, nonce=nonce_mv) + key_mv[:1] = b'\xFF' + nonce_mv[:1] = b'\xFF' + pt_test = cipher3.decrypt(ct_mv) + + self.assertEqual(data, pt_test) + + +class TestOutput(unittest.TestCase): + + def runTest(self): + # Encrypt/Decrypt data and test output parameter + + key = b'4' * 32 + nonce = b'5' * 8 + cipher = Salsa20.new(key=key, nonce=nonce) + + pt = b'5' * 300 + ct = cipher.encrypt(pt) + + output = bytearray(len(pt)) + cipher = Salsa20.new(key=key, nonce=nonce) + res = cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + self.assertEqual(res, None) + + cipher = Salsa20.new(key=key, nonce=nonce) + res = cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + self.assertEqual(res, None) + + output = memoryview(bytearray(len(pt))) + cipher = Salsa20.new(key=key, nonce=nonce) + cipher.encrypt(pt, output=output) + self.assertEqual(ct, output) + + cipher = Salsa20.new(key=key, nonce=nonce) + cipher.decrypt(ct, output=output) + self.assertEqual(pt, output) + + cipher = Salsa20.new(key=key, nonce=nonce) + self.assertRaises(TypeError, cipher.encrypt, pt, output=b'0'*len(pt)) + + cipher = Salsa20.new(key=key, nonce=nonce) + self.assertRaises(TypeError, cipher.decrypt, ct, output=b'0'*len(ct)) + + shorter_output = bytearray(len(pt) - 1) + + cipher = Salsa20.new(key=key, nonce=nonce) + self.assertRaises(ValueError, cipher.encrypt, pt, output=shorter_output) + + cipher = Salsa20.new(key=key, nonce=nonce) + self.assertRaises(ValueError, cipher.decrypt, ct, output=shorter_output) + + +def get_tests(config={}): + tests = make_stream_tests(Salsa20, "Salsa20", test_data) + tests.append(KeyLength()) + tests += list_test_cases(NonceTests) + tests.append(ByteArrayTest()) + tests.append(MemoryviewTest()) + tests.append(TestOutput()) + + return tests + + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_15.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_15.py new file mode 100644 index 000000000..e16a54385 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_15.py @@ -0,0 +1,283 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/test_pkcs1_15.py: Self-test for PKCS#1 v1.5 encryption +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from __future__ import print_function + +import unittest + +from Crypto.PublicKey import RSA +from Crypto.SelfTest.st_common import list_test_cases, a2b_hex +from Crypto import Random +from Crypto.Cipher import PKCS1_v1_5 as PKCS +from Crypto.Util.py3compat import b +from Crypto.Util.number import bytes_to_long, long_to_bytes +from Crypto.SelfTest.loader import load_test_vectors_wycheproof + + +def rws(t): + """Remove white spaces, tabs, and new lines from a string""" + for c in ['\n', '\t', ' ']: + t = t.replace(c, '') + return t + + +def t2b(t): + """Convert a text string with bytes in hex form to a byte string""" + clean = b(rws(t)) + if len(clean) % 2 == 1: + raise ValueError("Even number of characters expected") + return a2b_hex(clean) + + +class PKCS1_15_Tests(unittest.TestCase): + + def setUp(self): + self.rng = Random.new().read + self.key1024 = RSA.generate(1024, self.rng) + + # List of tuples with test data for PKCS#1 v1.5. + # Each tuple is made up by: + # Item #0: dictionary with RSA key component, or key to import + # Item #1: plaintext + # Item #2: ciphertext + # Item #3: random data + + _testData = ( + + # + # Generated with openssl 0.9.8o + # + ( + # Private key + '''-----BEGIN RSA PRIVATE KEY----- +MIICXAIBAAKBgQDAiAnvIAOvqVwJTaYzsKnefZftgtXGE2hPJppGsWl78yz9jeXY +W/FxX/gTPURArNhdnhP6n3p2ZaDIBrO2zizbgIXs0IsljTTcr4vnI8fMXzyNUOjA +zP3nzMqZDZK6757XQAobOssMkBFqRWwilT/3DsBhRpl3iMUhF+wvpTSHewIDAQAB +AoGAC4HV/inOrpgTvSab8Wj0riyZgQOZ3U3ZpSlsfR8ra9Ib9Uee3jCYnKscu6Gk +y6zI/cdt8EPJ4PuwAWSNJzbpbVaDvUq25OD+CX8/uRT08yBS4J8TzBitZJTD4lS7 +atdTnKT0Wmwk+u8tDbhvMKwnUHdJLcuIsycts9rwJVapUtkCQQDvDpx2JMun0YKG +uUttjmL8oJ3U0m3ZvMdVwBecA0eebZb1l2J5PvI3EJD97eKe91Nsw8T3lwpoN40k +IocSVDklAkEAzi1HLHE6EzVPOe5+Y0kGvrIYRRhncOb72vCvBZvD6wLZpQgqo6c4 +d3XHFBBQWA6xcvQb5w+VVEJZzw64y25sHwJBAMYReRl6SzL0qA0wIYrYWrOt8JeQ +8mthulcWHXmqTgC6FEXP9Es5GD7/fuKl4wqLKZgIbH4nqvvGay7xXLCXD/ECQH9a +1JYNMtRen5unSAbIOxRcKkWz92F0LKpm9ZW/S9vFHO+mBcClMGoKJHiuQxLBsLbT +NtEZfSJZAeS2sUtn3/0CQDb2M2zNBTF8LlM0nxmh0k9VGm5TVIyBEMcipmvOgqIs +HKukWBcq9f/UOmS0oEhai/6g+Uf7VHJdWaeO5LzuvwU= +-----END RSA PRIVATE KEY-----''', + # Plaintext + '''THIS IS PLAINTEXT\x0A''', + # Ciphertext + '''3f dc fd 3c cd 5c 9b 12 af 65 32 e3 f7 d0 da 36 + 8f 8f d9 e3 13 1c 7f c8 b3 f9 c1 08 e4 eb 79 9c + 91 89 1f 96 3b 94 77 61 99 a4 b1 ee 5d e6 17 c9 + 5d 0a b5 63 52 0a eb 00 45 38 2a fb b0 71 3d 11 + f7 a1 9e a7 69 b3 af 61 c0 bb 04 5b 5d 4b 27 44 + 1f 5b 97 89 ba 6a 08 95 ee 4f a2 eb 56 64 e5 0f + da 7c f9 9a 61 61 06 62 ed a0 bc 5f aa 6c 31 78 + 70 28 1a bb 98 3c e3 6a 60 3c d1 0b 0f 5a f4 75''', + # Random data + '''eb d7 7d 86 a4 35 23 a3 54 7e 02 0b 42 1d + 61 6c af 67 b8 4e 17 56 80 66 36 04 64 34 26 8a + 47 dd 44 b3 1a b2 17 60 f4 91 2e e2 b5 95 64 cc + f9 da c8 70 94 54 86 4c ef 5b 08 7d 18 c4 ab 8d + 04 06 33 8f ca 15 5f 52 60 8a a1 0c f5 08 b5 4c + bb 99 b8 94 25 04 9c e6 01 75 e6 f9 63 7a 65 61 + 13 8a a7 47 77 81 ae 0d b8 2c 4d 50 a5''' + ), + ) + + def testEncrypt1(self): + for test in self._testData: + # Build the key + key = RSA.importKey(test[0]) + # RNG that takes its random numbers from a pool given + # at initialization + class randGen: + def __init__(self, data): + self.data = data + self.idx = 0 + def __call__(self, N): + r = self.data[self.idx:self.idx+N] + self.idx += N + return r + # The real test + cipher = PKCS.new(key, randfunc=randGen(t2b(test[3]))) + ct = cipher.encrypt(b(test[1])) + self.assertEqual(ct, t2b(test[2])) + + def testEncrypt2(self): + # Verify that encryption fail if plaintext is too long + pt = '\x00'*(128-11+1) + cipher = PKCS.new(self.key1024) + self.assertRaises(ValueError, cipher.encrypt, pt) + + def testVerify1(self): + for test in self._testData: + key = RSA.importKey(test[0]) + expected_pt = b(test[1]) + ct = t2b(test[2]) + cipher = PKCS.new(key) + + # The real test + pt = cipher.decrypt(ct, None) + self.assertEqual(pt, expected_pt) + + pt = cipher.decrypt(ct, b'\xFF' * len(expected_pt)) + self.assertEqual(pt, expected_pt) + + def testVerify2(self): + # Verify that decryption fails if ciphertext is not as long as + # RSA modulus + cipher = PKCS.new(self.key1024) + self.assertRaises(ValueError, cipher.decrypt, '\x00'*127, "---") + self.assertRaises(ValueError, cipher.decrypt, '\x00'*129, "---") + + # Verify that decryption fails if there are less then 8 non-zero padding + # bytes + pt = b('\x00\x02' + '\xFF'*7 + '\x00' + '\x45'*118) + pt_int = bytes_to_long(pt) + ct_int = self.key1024._encrypt(pt_int) + ct = long_to_bytes(ct_int, 128) + self.assertEqual(b"---", cipher.decrypt(ct, b"---")) + + def testEncryptVerify1(self): + # Encrypt/Verify messages of length [0..RSAlen-11] + # and therefore padding [8..117] + for pt_len in range(0, 128 - 11 + 1): + pt = self.rng(pt_len) + cipher = PKCS.new(self.key1024) + ct = cipher.encrypt(pt) + pt2 = cipher.decrypt(ct, b'\xAA' * pt_len) + self.assertEqual(pt, pt2) + + def test_encrypt_verify_exp_pt_len(self): + + cipher = PKCS.new(self.key1024) + pt = b'5' * 16 + ct = cipher.encrypt(pt) + sentinel = b'\xAA' * 16 + + pt_A = cipher.decrypt(ct, sentinel, 16) + self.assertEqual(pt, pt_A) + + pt_B = cipher.decrypt(ct, sentinel, 15) + self.assertEqual(sentinel, pt_B) + + pt_C = cipher.decrypt(ct, sentinel, 17) + self.assertEqual(sentinel, pt_C) + + def testByteArray(self): + pt = b"XER" + cipher = PKCS.new(self.key1024) + ct = cipher.encrypt(bytearray(pt)) + pt2 = cipher.decrypt(bytearray(ct), '\xFF' * len(pt)) + self.assertEqual(pt, pt2) + + def testMemoryview(self): + pt = b"XER" + cipher = PKCS.new(self.key1024) + ct = cipher.encrypt(memoryview(bytearray(pt))) + pt2 = cipher.decrypt(memoryview(bytearray(ct)), b'\xFF' * len(pt)) + self.assertEqual(pt, pt2) + + def test_return_type(self): + pt = b"XYZ" + cipher = PKCS.new(self.key1024) + ct = cipher.encrypt(pt) + self.assertTrue(isinstance(ct, bytes)) + pt2 = cipher.decrypt(ct, b'\xAA' * 3) + self.assertTrue(isinstance(pt2, bytes)) + + +class TestVectorsWycheproof(unittest.TestCase): + + def __init__(self, wycheproof_warnings, skip_slow_tests): + unittest.TestCase.__init__(self) + self._wycheproof_warnings = wycheproof_warnings + self._skip_slow_tests = skip_slow_tests + self._id = "None" + + def load_tests(self, filename): + + def filter_rsa(group): + return RSA.import_key(group['privateKeyPem']) + + result = load_test_vectors_wycheproof(("Cipher", "wycheproof"), + filename, + "Wycheproof PKCS#1v1.5 (%s)" % filename, + group_tag={'rsa_key': filter_rsa} + ) + return result + + def setUp(self): + self.tv = [] + self.tv.extend(self.load_tests("rsa_pkcs1_2048_test.json")) + if not self._skip_slow_tests: + self.tv.extend(self.load_tests("rsa_pkcs1_3072_test.json")) + self.tv.extend(self.load_tests("rsa_pkcs1_4096_test.json")) + + def shortDescription(self): + return self._id + + def warn(self, tv): + if tv.warning and self._wycheproof_warnings: + import warnings + warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) + + def test_decrypt(self, tv): + self._id = "Wycheproof Decrypt PKCS#1v1.5 Test #%s" % tv.id + sentinel = b'\xAA' * max(3, len(tv.msg)) + cipher = PKCS.new(tv.rsa_key) + try: + pt = cipher.decrypt(tv.ct, sentinel=sentinel) + except ValueError: + assert not tv.valid + else: + if pt == sentinel: + assert not tv.valid + else: + assert tv.valid + self.assertEqual(pt, tv.msg) + self.warn(tv) + + def runTest(self): + + for tv in self.tv: + self.test_decrypt(tv) + + +def get_tests(config={}): + skip_slow_tests = not config.get('slow_tests') + wycheproof_warnings = config.get('wycheproof_warnings') + + tests = [] + tests += list_test_cases(PKCS1_15_Tests) + tests += [TestVectorsWycheproof(wycheproof_warnings, skip_slow_tests)] + return tests + + +if __name__ == '__main__': + def suite(): + return unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_oaep.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_oaep.py new file mode 100644 index 000000000..171158194 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_oaep.py @@ -0,0 +1,506 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/test_pkcs1_oaep.py: Self-test for PKCS#1 OAEP encryption +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +import unittest + +from Crypto.SelfTest.st_common import list_test_cases, a2b_hex +from Crypto.SelfTest.loader import load_test_vectors_wycheproof + +from Crypto.PublicKey import RSA +from Crypto.Cipher import PKCS1_OAEP as PKCS +from Crypto.Hash import MD2, MD5, SHA1, SHA256, RIPEMD160, SHA224, SHA384, SHA512 +from Crypto import Random +from Crypto.Signature.pss import MGF1 + +from Crypto.Util.py3compat import b, bchr + + +def rws(t): + """Remove white spaces, tabs, and new lines from a string""" + for c in ['\n', '\t', ' ']: + t = t.replace(c, '') + return t + + +def t2b(t): + """Convert a text string with bytes in hex form to a byte string""" + clean = rws(t) + if len(clean) % 2 == 1: + raise ValueError("Even number of characters expected") + return a2b_hex(clean) + + +class PKCS1_OAEP_Tests(unittest.TestCase): + + def setUp(self): + self.rng = Random.new().read + self.key1024 = RSA.generate(1024, self.rng) + + # List of tuples with test data for PKCS#1 OAEP + # Each tuple is made up by: + # Item #0: dictionary with RSA key component + # Item #1: plaintext + # Item #2: ciphertext + # Item #3: random data (=seed) + # Item #4: hash object + + _testData = ( + + # + # From in oaep-int.txt to be found in + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''bb f8 2f 09 06 82 ce 9c 23 38 ac 2b 9d a8 71 f7 + 36 8d 07 ee d4 10 43 a4 40 d6 b6 f0 74 54 f5 1f + b8 df ba af 03 5c 02 ab 61 ea 48 ce eb 6f cd 48 + 76 ed 52 0d 60 e1 ec 46 19 71 9d 8a 5b 8b 80 7f + af b8 e0 a3 df c7 37 72 3e e6 b4 b7 d9 3a 25 84 + ee 6a 64 9d 06 09 53 74 88 34 b2 45 45 98 39 4e + e0 aa b1 2d 7b 61 a5 1f 52 7a 9a 41 f6 c1 68 7f + e2 53 72 98 ca 2a 8f 59 46 f8 e5 fd 09 1d bd cb''', + # Public key + 'e':'11', + # In the test vector, only p and q were given... + # d is computed offline as e^{-1} mod (p-1)(q-1) + 'd':'''a5dafc5341faf289c4b988db30c1cdf83f31251e0 + 668b42784813801579641b29410b3c7998d6bc465745e5c3 + 92669d6870da2c082a939e37fdcb82ec93edac97ff3ad595 + 0accfbc111c76f1a9529444e56aaf68c56c092cd38dc3bef + 5d20a939926ed4f74a13eddfbe1a1cecc4894af9428c2b7b + 8883fe4463a4bc85b1cb3c1''' + } + , + # Plaintext + '''d4 36 e9 95 69 fd 32 a7 c8 a0 5b bc 90 d3 2c 49''', + # Ciphertext + '''12 53 e0 4d c0 a5 39 7b b4 4a 7a b8 7e 9b f2 a0 + 39 a3 3d 1e 99 6f c8 2a 94 cc d3 00 74 c9 5d f7 + 63 72 20 17 06 9e 52 68 da 5d 1c 0b 4f 87 2c f6 + 53 c1 1d f8 23 14 a6 79 68 df ea e2 8d ef 04 bb + 6d 84 b1 c3 1d 65 4a 19 70 e5 78 3b d6 eb 96 a0 + 24 c2 ca 2f 4a 90 fe 9f 2e f5 c9 c1 40 e5 bb 48 + da 95 36 ad 87 00 c8 4f c9 13 0a de a7 4e 55 8d + 51 a7 4d df 85 d8 b5 0d e9 68 38 d6 06 3e 09 55''', + # Random + '''aa fd 12 f6 59 ca e6 34 89 b4 79 e5 07 6d de c2 + f0 6c b5 8f''', + # Hash + SHA1, + ), + + # + # From in oaep-vect.txt to be found in Example 1.1 + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''a8 b3 b2 84 af 8e b5 0b 38 70 34 a8 60 f1 46 c4 + 91 9f 31 87 63 cd 6c 55 98 c8 ae 48 11 a1 e0 ab + c4 c7 e0 b0 82 d6 93 a5 e7 fc ed 67 5c f4 66 85 + 12 77 2c 0c bc 64 a7 42 c6 c6 30 f5 33 c8 cc 72 + f6 2a e8 33 c4 0b f2 58 42 e9 84 bb 78 bd bf 97 + c0 10 7d 55 bd b6 62 f5 c4 e0 fa b9 84 5c b5 14 + 8e f7 39 2d d3 aa ff 93 ae 1e 6b 66 7b b3 d4 24 + 76 16 d4 f5 ba 10 d4 cf d2 26 de 88 d3 9f 16 fb''', + 'e':'''01 00 01''', + 'd':'''53 33 9c fd b7 9f c8 46 6a 65 5c 73 16 ac a8 5c + 55 fd 8f 6d d8 98 fd af 11 95 17 ef 4f 52 e8 fd + 8e 25 8d f9 3f ee 18 0f a0 e4 ab 29 69 3c d8 3b + 15 2a 55 3d 4a c4 d1 81 2b 8b 9f a5 af 0e 7f 55 + fe 73 04 df 41 57 09 26 f3 31 1f 15 c4 d6 5a 73 + 2c 48 31 16 ee 3d 3d 2d 0a f3 54 9a d9 bf 7c bf + b7 8a d8 84 f8 4d 5b eb 04 72 4d c7 36 9b 31 de + f3 7d 0c f5 39 e9 cf cd d3 de 65 37 29 ea d5 d1 ''' + } + , + # Plaintext + '''66 28 19 4e 12 07 3d b0 3b a9 4c da 9e f9 53 23 + 97 d5 0d ba 79 b9 87 00 4a fe fe 34''', + # Ciphertext + '''35 4f e6 7b 4a 12 6d 5d 35 fe 36 c7 77 79 1a 3f + 7b a1 3d ef 48 4e 2d 39 08 af f7 22 fa d4 68 fb + 21 69 6d e9 5d 0b e9 11 c2 d3 17 4f 8a fc c2 01 + 03 5f 7b 6d 8e 69 40 2d e5 45 16 18 c2 1a 53 5f + a9 d7 bf c5 b8 dd 9f c2 43 f8 cf 92 7d b3 13 22 + d6 e8 81 ea a9 1a 99 61 70 e6 57 a0 5a 26 64 26 + d9 8c 88 00 3f 84 77 c1 22 70 94 a0 d9 fa 1e 8c + 40 24 30 9c e1 ec cc b5 21 00 35 d4 7a c7 2e 8a''', + # Random + '''18 b7 76 ea 21 06 9d 69 77 6a 33 e9 6b ad 48 e1 + dd a0 a5 ef''', + SHA1 + ), + + # + # From in oaep-vect.txt to be found in Example 2.1 + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''01 94 7c 7f ce 90 42 5f 47 27 9e 70 85 1f 25 d5 + e6 23 16 fe 8a 1d f1 93 71 e3 e6 28 e2 60 54 3e + 49 01 ef 60 81 f6 8c 0b 81 41 19 0d 2a e8 da ba + 7d 12 50 ec 6d b6 36 e9 44 ec 37 22 87 7c 7c 1d + 0a 67 f1 4b 16 94 c5 f0 37 94 51 a4 3e 49 a3 2d + de 83 67 0b 73 da 91 a1 c9 9b c2 3b 43 6a 60 05 + 5c 61 0f 0b af 99 c1 a0 79 56 5b 95 a3 f1 52 66 + 32 d1 d4 da 60 f2 0e da 25 e6 53 c4 f0 02 76 6f + 45''', + 'e':'''01 00 01''', + 'd':'''08 23 f2 0f ad b5 da 89 08 8a 9d 00 89 3e 21 fa + 4a 1b 11 fb c9 3c 64 a3 be 0b aa ea 97 fb 3b 93 + c3 ff 71 37 04 c1 9c 96 3c 1d 10 7a ae 99 05 47 + 39 f7 9e 02 e1 86 de 86 f8 7a 6d de fe a6 d8 cc + d1 d3 c8 1a 47 bf a7 25 5b e2 06 01 a4 a4 b2 f0 + 8a 16 7b 5e 27 9d 71 5b 1b 45 5b dd 7e ab 24 59 + 41 d9 76 8b 9a ce fb 3c cd a5 95 2d a3 ce e7 25 + 25 b4 50 16 63 a8 ee 15 c9 e9 92 d9 24 62 fe 39''' + }, + # Plaintext + '''8f f0 0c aa 60 5c 70 28 30 63 4d 9a 6c 3d 42 c6 + 52 b5 8c f1 d9 2f ec 57 0b ee e7''', + # Ciphertext + '''01 81 af 89 22 b9 fc b4 d7 9d 92 eb e1 98 15 99 + 2f c0 c1 43 9d 8b cd 49 13 98 a0 f4 ad 3a 32 9a + 5b d9 38 55 60 db 53 26 83 c8 b7 da 04 e4 b1 2a + ed 6a ac df 47 1c 34 c9 cd a8 91 ad dc c2 df 34 + 56 65 3a a6 38 2e 9a e5 9b 54 45 52 57 eb 09 9d + 56 2b be 10 45 3f 2b 6d 13 c5 9c 02 e1 0f 1f 8a + bb 5d a0 d0 57 09 32 da cf 2d 09 01 db 72 9d 0f + ef cc 05 4e 70 96 8e a5 40 c8 1b 04 bc ae fe 72 + 0e''', + # Random + '''8c 40 7b 5e c2 89 9e 50 99 c5 3e 8c e7 93 bf 94 + e7 1b 17 82''', + SHA1 + ), + + # + # From in oaep-vect.txt to be found in Example 10.1 + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''ae 45 ed 56 01 ce c6 b8 cc 05 f8 03 93 5c 67 4d + db e0 d7 5c 4c 09 fd 79 51 fc 6b 0c ae c3 13 a8 + df 39 97 0c 51 8b ff ba 5e d6 8f 3f 0d 7f 22 a4 + 02 9d 41 3f 1a e0 7e 4e be 9e 41 77 ce 23 e7 f5 + 40 4b 56 9e 4e e1 bd cf 3c 1f b0 3e f1 13 80 2d + 4f 85 5e b9 b5 13 4b 5a 7c 80 85 ad ca e6 fa 2f + a1 41 7e c3 76 3b e1 71 b0 c6 2b 76 0e de 23 c1 + 2a d9 2b 98 08 84 c6 41 f5 a8 fa c2 6b da d4 a0 + 33 81 a2 2f e1 b7 54 88 50 94 c8 25 06 d4 01 9a + 53 5a 28 6a fe b2 71 bb 9b a5 92 de 18 dc f6 00 + c2 ae ea e5 6e 02 f7 cf 79 fc 14 cf 3b dc 7c d8 + 4f eb bb f9 50 ca 90 30 4b 22 19 a7 aa 06 3a ef + a2 c3 c1 98 0e 56 0c d6 4a fe 77 95 85 b6 10 76 + 57 b9 57 85 7e fd e6 01 09 88 ab 7d e4 17 fc 88 + d8 f3 84 c4 e6 e7 2c 3f 94 3e 0c 31 c0 c4 a5 cc + 36 f8 79 d8 a3 ac 9d 7d 59 86 0e aa da 6b 83 bb''', + 'e':'''01 00 01''', + 'd':'''05 6b 04 21 6f e5 f3 54 ac 77 25 0a 4b 6b 0c 85 + 25 a8 5c 59 b0 bd 80 c5 64 50 a2 2d 5f 43 8e 59 + 6a 33 3a a8 75 e2 91 dd 43 f4 8c b8 8b 9d 5f c0 + d4 99 f9 fc d1 c3 97 f9 af c0 70 cd 9e 39 8c 8d + 19 e6 1d b7 c7 41 0a 6b 26 75 df bf 5d 34 5b 80 + 4d 20 1a dd 50 2d 5c e2 df cb 09 1c e9 99 7b be + be 57 30 6f 38 3e 4d 58 81 03 f0 36 f7 e8 5d 19 + 34 d1 52 a3 23 e4 a8 db 45 1d 6f 4a 5b 1b 0f 10 + 2c c1 50 e0 2f ee e2 b8 8d ea 4a d4 c1 ba cc b2 + 4d 84 07 2d 14 e1 d2 4a 67 71 f7 40 8e e3 05 64 + fb 86 d4 39 3a 34 bc f0 b7 88 50 1d 19 33 03 f1 + 3a 22 84 b0 01 f0 f6 49 ea f7 93 28 d4 ac 5c 43 + 0a b4 41 49 20 a9 46 0e d1 b7 bc 40 ec 65 3e 87 + 6d 09 ab c5 09 ae 45 b5 25 19 01 16 a0 c2 61 01 + 84 82 98 50 9c 1c 3b f3 a4 83 e7 27 40 54 e1 5e + 97 07 50 36 e9 89 f6 09 32 80 7b 52 57 75 1e 79''' + }, + # Plaintext + '''8b ba 6b f8 2a 6c 0f 86 d5 f1 75 6e 97 95 68 70 + b0 89 53 b0 6b 4e b2 05 bc 16 94 ee''', + # Ciphertext + '''53 ea 5d c0 8c d2 60 fb 3b 85 85 67 28 7f a9 15 + 52 c3 0b 2f eb fb a2 13 f0 ae 87 70 2d 06 8d 19 + ba b0 7f e5 74 52 3d fb 42 13 9d 68 c3 c5 af ee + e0 bf e4 cb 79 69 cb f3 82 b8 04 d6 e6 13 96 14 + 4e 2d 0e 60 74 1f 89 93 c3 01 4b 58 b9 b1 95 7a + 8b ab cd 23 af 85 4f 4c 35 6f b1 66 2a a7 2b fc + c7 e5 86 55 9d c4 28 0d 16 0c 12 67 85 a7 23 eb + ee be ff 71 f1 15 94 44 0a ae f8 7d 10 79 3a 87 + 74 a2 39 d4 a0 4c 87 fe 14 67 b9 da f8 52 08 ec + 6c 72 55 79 4a 96 cc 29 14 2f 9a 8b d4 18 e3 c1 + fd 67 34 4b 0c d0 82 9d f3 b2 be c6 02 53 19 62 + 93 c6 b3 4d 3f 75 d3 2f 21 3d d4 5c 62 73 d5 05 + ad f4 cc ed 10 57 cb 75 8f c2 6a ee fa 44 12 55 + ed 4e 64 c1 99 ee 07 5e 7f 16 64 61 82 fd b4 64 + 73 9b 68 ab 5d af f0 e6 3e 95 52 01 68 24 f0 54 + bf 4d 3c 8c 90 a9 7b b6 b6 55 32 84 eb 42 9f cc''', + # Random + '''47 e1 ab 71 19 fe e5 6c 95 ee 5e aa d8 6f 40 d0 + aa 63 bd 33''', + SHA1 + ), + ) + + def testEncrypt1(self): + # Verify encryption using all test vectors + for test in self._testData: + # Build the key + comps = [int(rws(test[0][x]), 16) for x in ('n', 'e')] + key = RSA.construct(comps) + + # RNG that takes its random numbers from a pool given + # at initialization + class randGen: + + def __init__(self, data): + self.data = data + self.idx = 0 + + def __call__(self, N): + r = self.data[self.idx:N] + self.idx += N + return r + + # The real test + cipher = PKCS.new(key, test[4], randfunc=randGen(t2b(test[3]))) + ct = cipher.encrypt(t2b(test[1])) + self.assertEqual(ct, t2b(test[2])) + + def testEncrypt2(self): + # Verify that encryption fails if plaintext is too long + pt = '\x00'*(128-2*20-2+1) + cipher = PKCS.new(self.key1024) + self.assertRaises(ValueError, cipher.encrypt, pt) + + def testDecrypt1(self): + # Verify decryption using all test vectors + for test in self._testData: + # Build the key + comps = [int(rws(test[0][x]),16) for x in ('n', 'e', 'd')] + key = RSA.construct(comps) + # The real test + cipher = PKCS.new(key, test[4]) + pt = cipher.decrypt(t2b(test[2])) + self.assertEqual(pt, t2b(test[1])) + + def testDecrypt2(self): + # Simplest possible negative tests + for ct_size in (127, 128, 129): + cipher = PKCS.new(self.key1024) + self.assertRaises(ValueError, cipher.decrypt, bchr(0x00)*ct_size) + + def testEncryptDecrypt1(self): + # Encrypt/Decrypt messages of length [0..128-2*20-2] + for pt_len in range(0, 128-2*20-2): + pt = self.rng(pt_len) + cipher = PKCS.new(self.key1024) + ct = cipher.encrypt(pt) + pt2 = cipher.decrypt(ct) + self.assertEqual(pt, pt2) + + def testEncryptDecrypt2(self): + # Helper function to monitor what's requested from RNG + global asked + + def localRng(N): + global asked + asked += N + return self.rng(N) + + # Verify that OAEP is friendly to all hashes + for hashmod in (MD2, MD5, SHA1, SHA256, RIPEMD160): + # Verify that encrypt() asks for as many random bytes + # as the hash output size + asked = 0 + pt = self.rng(40) + cipher = PKCS.new(self.key1024, hashmod, randfunc=localRng) + ct = cipher.encrypt(pt) + self.assertEqual(cipher.decrypt(ct), pt) + self.assertEqual(asked, hashmod.digest_size) + + def testEncryptDecrypt3(self): + # Verify that OAEP supports labels + pt = self.rng(35) + xlabel = self.rng(22) + cipher = PKCS.new(self.key1024, label=xlabel) + ct = cipher.encrypt(pt) + self.assertEqual(cipher.decrypt(ct), pt) + + def testEncryptDecrypt4(self): + # Verify that encrypt() uses the custom MGF + global mgfcalls + # Helper function to monitor what's requested from MGF + + def newMGF(seed, maskLen): + global mgfcalls + mgfcalls += 1 + return b'\x00' * maskLen + + mgfcalls = 0 + pt = self.rng(32) + cipher = PKCS.new(self.key1024, mgfunc=newMGF) + ct = cipher.encrypt(pt) + self.assertEqual(mgfcalls, 2) + self.assertEqual(cipher.decrypt(ct), pt) + + def testByteArray(self): + pt = b("XER") + cipher = PKCS.new(self.key1024) + ct = cipher.encrypt(bytearray(pt)) + pt2 = cipher.decrypt(bytearray(ct)) + self.assertEqual(pt, pt2) + + def testMemoryview(self): + pt = b("XER") + cipher = PKCS.new(self.key1024) + ct = cipher.encrypt(memoryview(bytearray(pt))) + pt2 = cipher.decrypt(memoryview(bytearray(ct))) + self.assertEqual(pt, pt2) + + +class TestVectorsWycheproof(unittest.TestCase): + + def __init__(self, wycheproof_warnings, skip_slow_tests): + unittest.TestCase.__init__(self) + self._wycheproof_warnings = wycheproof_warnings + self._skip_slow_tests = skip_slow_tests + self._id = "None" + + def load_tests(self, filename): + + def filter_rsa(group): + return RSA.import_key(group['privateKeyPem']) + + def filter_sha(group): + if group['sha'] == "SHA-1": + return SHA1 + elif group['sha'] == "SHA-224": + return SHA224 + elif group['sha'] == "SHA-256": + return SHA256 + elif group['sha'] == "SHA-384": + return SHA384 + elif group['sha'] == "SHA-512": + return SHA512 + else: + raise ValueError("Unknown sha " + group['sha']) + + def filter_mgf(group): + if group['mgfSha'] == "SHA-1": + return lambda x, y: MGF1(x, y, SHA1) + elif group['mgfSha'] == "SHA-224": + return lambda x, y: MGF1(x, y, SHA224) + elif group['mgfSha'] == "SHA-256": + return lambda x, y: MGF1(x, y, SHA256) + elif group['mgfSha'] == "SHA-384": + return lambda x, y: MGF1(x, y, SHA384) + elif group['mgfSha'] == "SHA-512": + return lambda x, y: MGF1(x, y, SHA512) + else: + raise ValueError("Unknown mgf/sha " + group['mgfSha']) + + def filter_algo(group): + return "%s with MGF1/%s" % (group['sha'], group['mgfSha']) + + result = load_test_vectors_wycheproof(("Cipher", "wycheproof"), + filename, + "Wycheproof PKCS#1 OAEP (%s)" % filename, + group_tag={'rsa_key': filter_rsa, + 'hash_mod': filter_sha, + 'mgf': filter_mgf, + 'algo': filter_algo} + ) + return result + + def setUp(self): + self.tv = [] + self.tv.extend(self.load_tests("rsa_oaep_2048_sha1_mgf1sha1_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_2048_sha224_mgf1sha1_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_2048_sha224_mgf1sha224_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_2048_sha256_mgf1sha1_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_2048_sha256_mgf1sha256_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_2048_sha384_mgf1sha1_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_2048_sha384_mgf1sha384_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_2048_sha512_mgf1sha1_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_2048_sha512_mgf1sha512_test.json")) + if not self._skip_slow_tests: + self.tv.extend(self.load_tests("rsa_oaep_3072_sha256_mgf1sha1_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_3072_sha256_mgf1sha256_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_3072_sha512_mgf1sha1_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_3072_sha512_mgf1sha512_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_4096_sha256_mgf1sha1_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_4096_sha256_mgf1sha256_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_4096_sha512_mgf1sha1_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_4096_sha512_mgf1sha512_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_4096_sha512_mgf1sha512_test.json")) + self.tv.extend(self.load_tests("rsa_oaep_misc_test.json")) + + def shortDescription(self): + return self._id + + def warn(self, tv): + if tv.warning and self._wycheproof_warnings: + import warnings + warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) + + def test_decrypt(self, tv): + self._id = "Wycheproof Decrypt %s Test #%s" % (tv.algo, tv.id) + + cipher = PKCS.new(tv.rsa_key, hashAlgo=tv.hash_mod, mgfunc=tv.mgf, label=tv.label) + try: + pt = cipher.decrypt(tv.ct) + except ValueError: + assert not tv.valid + else: + assert tv.valid + self.assertEqual(pt, tv.msg) + self.warn(tv) + + def runTest(self): + + for tv in self.tv: + self.test_decrypt(tv) + + +def get_tests(config={}): + skip_slow_tests = not config.get('slow_tests') + wycheproof_warnings = config.get('wycheproof_warnings') + + tests = [] + tests += list_test_cases(PKCS1_OAEP_Tests) + tests += [TestVectorsWycheproof(wycheproof_warnings, skip_slow_tests)] + return tests + + +if __name__ == '__main__': + def suite(): + unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/__init__.py new file mode 100644 index 000000000..98fcab208 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/__init__.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/__init__.py: Self-test for hash modules +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for hash modules""" + +__revision__ = "$Id$" + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.Hash import test_HMAC; tests += test_HMAC.get_tests(config=config) + from Crypto.SelfTest.Hash import test_CMAC; tests += test_CMAC.get_tests(config=config) + from Crypto.SelfTest.Hash import test_MD2; tests += test_MD2.get_tests(config=config) + from Crypto.SelfTest.Hash import test_MD4; tests += test_MD4.get_tests(config=config) + from Crypto.SelfTest.Hash import test_MD5; tests += test_MD5.get_tests(config=config) + from Crypto.SelfTest.Hash import test_RIPEMD160; tests += test_RIPEMD160.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHA1; tests += test_SHA1.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHA224; tests += test_SHA224.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHA256; tests += test_SHA256.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHA384; tests += test_SHA384.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHA512; tests += test_SHA512.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHA3_224; tests += test_SHA3_224.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHA3_256; tests += test_SHA3_256.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHA3_384; tests += test_SHA3_384.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHA3_512; tests += test_SHA3_512.get_tests(config=config) + from Crypto.SelfTest.Hash import test_keccak; tests += test_keccak.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHAKE; tests += test_SHAKE.get_tests(config=config) + from Crypto.SelfTest.Hash import test_BLAKE2; tests += test_BLAKE2.get_tests(config=config) + from Crypto.SelfTest.Hash import test_Poly1305; tests += test_Poly1305.get_tests(config=config) + from Crypto.SelfTest.Hash import test_cSHAKE; tests += test_cSHAKE.get_tests(config=config) + from Crypto.SelfTest.Hash import test_KMAC; tests += test_KMAC.get_tests(config=config) + from Crypto.SelfTest.Hash import test_TupleHash; tests += test_TupleHash.get_tests(config=config) + from Crypto.SelfTest.Hash import test_KangarooTwelve; tests += test_KangarooTwelve.get_tests(config=config) + from Crypto.SelfTest.Hash import test_TurboSHAKE; tests += test_TurboSHAKE.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/common.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/common.py new file mode 100644 index 000000000..157866713 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/common.py @@ -0,0 +1,290 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/common.py: Common code for Crypto.SelfTest.Hash +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-testing for PyCrypto hash modules""" + +import re +import sys +import unittest +import binascii +import Crypto.Hash +from binascii import hexlify, unhexlify +from Crypto.Util.py3compat import b, tobytes +from Crypto.Util.strxor import strxor_c + +def t2b(hex_string): + shorter = re.sub(br'\s+', b'', tobytes(hex_string)) + return unhexlify(shorter) + + +class HashDigestSizeSelfTest(unittest.TestCase): + + def __init__(self, hashmod, description, expected, extra_params): + unittest.TestCase.__init__(self) + self.hashmod = hashmod + self.expected = expected + self.description = description + self.extra_params = extra_params + + def shortDescription(self): + return self.description + + def runTest(self): + if "truncate" not in self.extra_params: + self.assertTrue(hasattr(self.hashmod, "digest_size")) + self.assertEqual(self.hashmod.digest_size, self.expected) + h = self.hashmod.new(**self.extra_params) + self.assertTrue(hasattr(h, "digest_size")) + self.assertEqual(h.digest_size, self.expected) + + +class HashSelfTest(unittest.TestCase): + + def __init__(self, hashmod, description, expected, input, extra_params): + unittest.TestCase.__init__(self) + self.hashmod = hashmod + self.expected = expected.lower() + self.input = input + self.description = description + self.extra_params = extra_params + + def shortDescription(self): + return self.description + + def runTest(self): + h = self.hashmod.new(**self.extra_params) + h.update(self.input) + + out1 = binascii.b2a_hex(h.digest()) + out2 = h.hexdigest() + + h = self.hashmod.new(self.input, **self.extra_params) + + out3 = h.hexdigest() + out4 = binascii.b2a_hex(h.digest()) + + # PY3K: hexdigest() should return str(), and digest() bytes + self.assertEqual(self.expected, out1) # h = .new(); h.update(data); h.digest() + if sys.version_info[0] == 2: + self.assertEqual(self.expected, out2) # h = .new(); h.update(data); h.hexdigest() + self.assertEqual(self.expected, out3) # h = .new(data); h.hexdigest() + else: + self.assertEqual(self.expected.decode(), out2) # h = .new(); h.update(data); h.hexdigest() + self.assertEqual(self.expected.decode(), out3) # h = .new(data); h.hexdigest() + self.assertEqual(self.expected, out4) # h = .new(data); h.digest() + + # Verify that the .new() method produces a fresh hash object, except + # for MD5 and SHA1, which are hashlib objects. (But test any .new() + # method that does exist.) + if self.hashmod.__name__ not in ('Crypto.Hash.MD5', 'Crypto.Hash.SHA1') or hasattr(h, 'new'): + h2 = h.new() + h2.update(self.input) + out5 = binascii.b2a_hex(h2.digest()) + self.assertEqual(self.expected, out5) + + +class HashTestOID(unittest.TestCase): + def __init__(self, hashmod, oid, extra_params): + unittest.TestCase.__init__(self) + self.hashmod = hashmod + self.oid = oid + self.extra_params = extra_params + + def runTest(self): + h = self.hashmod.new(**self.extra_params) + self.assertEqual(h.oid, self.oid) + + +class ByteArrayTest(unittest.TestCase): + + def __init__(self, module, extra_params): + unittest.TestCase.__init__(self) + self.module = module + self.extra_params = extra_params + + def runTest(self): + data = b("\x00\x01\x02") + + # Data can be a bytearray (during initialization) + ba = bytearray(data) + + h1 = self.module.new(data, **self.extra_params) + h2 = self.module.new(ba, **self.extra_params) + ba[:1] = b'\xFF' + self.assertEqual(h1.digest(), h2.digest()) + + # Data can be a bytearray (during operation) + ba = bytearray(data) + + h1 = self.module.new(**self.extra_params) + h2 = self.module.new(**self.extra_params) + + h1.update(data) + h2.update(ba) + + ba[:1] = b'\xFF' + self.assertEqual(h1.digest(), h2.digest()) + + +class MemoryViewTest(unittest.TestCase): + + def __init__(self, module, extra_params): + unittest.TestCase.__init__(self) + self.module = module + self.extra_params = extra_params + + def runTest(self): + + data = b"\x00\x01\x02" + + def get_mv_ro(data): + return memoryview(data) + + def get_mv_rw(data): + return memoryview(bytearray(data)) + + for get_mv in get_mv_ro, get_mv_rw: + + # Data can be a memoryview (during initialization) + mv = get_mv(data) + + h1 = self.module.new(data, **self.extra_params) + h2 = self.module.new(mv, **self.extra_params) + if not mv.readonly: + mv[:1] = b'\xFF' + self.assertEqual(h1.digest(), h2.digest()) + + # Data can be a memoryview (during operation) + mv = get_mv(data) + + h1 = self.module.new(**self.extra_params) + h2 = self.module.new(**self.extra_params) + h1.update(data) + h2.update(mv) + if not mv.readonly: + mv[:1] = b'\xFF' + self.assertEqual(h1.digest(), h2.digest()) + + +class MACSelfTest(unittest.TestCase): + + def __init__(self, module, description, result, data, key, params): + unittest.TestCase.__init__(self) + self.module = module + self.result = t2b(result) + self.data = t2b(data) + self.key = t2b(key) + self.params = params + self.description = description + + def shortDescription(self): + return self.description + + def runTest(self): + + result_hex = hexlify(self.result) + + # Verify result + h = self.module.new(self.key, **self.params) + h.update(self.data) + self.assertEqual(self.result, h.digest()) + self.assertEqual(hexlify(self.result).decode('ascii'), h.hexdigest()) + + # Verify that correct MAC does not raise any exception + h.verify(self.result) + h.hexverify(result_hex) + + # Verify that incorrect MAC does raise ValueError exception + wrong_mac = strxor_c(self.result, 255) + self.assertRaises(ValueError, h.verify, wrong_mac) + self.assertRaises(ValueError, h.hexverify, "4556") + + # Verify again, with data passed to new() + h = self.module.new(self.key, self.data, **self.params) + self.assertEqual(self.result, h.digest()) + self.assertEqual(hexlify(self.result).decode('ascii'), h.hexdigest()) + + # Test .copy() + try: + h = self.module.new(self.key, self.data, **self.params) + h2 = h.copy() + h3 = h.copy() + + # Verify that changing the copy does not change the original + h2.update(b"bla") + self.assertEqual(h3.digest(), self.result) + + # Verify that both can reach the same state + h.update(b"bla") + self.assertEqual(h.digest(), h2.digest()) + except NotImplementedError: + pass + + # PY3K: Check that hexdigest() returns str and digest() returns bytes + self.assertTrue(isinstance(h.digest(), type(b""))) + self.assertTrue(isinstance(h.hexdigest(), type(""))) + + # PY3K: Check that .hexverify() accepts bytes or str + h.hexverify(h.hexdigest()) + h.hexverify(h.hexdigest().encode('ascii')) + + +def make_hash_tests(module, module_name, test_data, digest_size, oid=None, + extra_params={}): + tests = [] + for i in range(len(test_data)): + row = test_data[i] + (expected, input) = map(tobytes,row[0:2]) + if len(row) < 3: + description = repr(input) + else: + description = row[2] + name = "%s #%d: %s" % (module_name, i+1, description) + tests.append(HashSelfTest(module, name, expected, input, extra_params)) + + name = "%s #%d: digest_size" % (module_name, len(test_data) + 1) + tests.append(HashDigestSizeSelfTest(module, name, digest_size, extra_params)) + + if oid is not None: + tests.append(HashTestOID(module, oid, extra_params)) + + tests.append(ByteArrayTest(module, extra_params)) + + tests.append(MemoryViewTest(module, extra_params)) + + return tests + + +def make_mac_tests(module, module_name, test_data): + tests = [] + for i, row in enumerate(test_data): + if len(row) == 4: + (key, data, results, description, params) = list(row) + [ {} ] + else: + (key, data, results, description, params) = row + name = "%s #%d: %s" % (module_name, i+1, description) + tests.append(MACSelfTest(module, name, results, data, key, params)) + return tests + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_BLAKE2.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_BLAKE2.py new file mode 100644 index 000000000..f953eabd9 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_BLAKE2.py @@ -0,0 +1,482 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import os +import re +import unittest +import warnings +from binascii import unhexlify, hexlify + +from Crypto.Util.py3compat import tobytes +from Crypto.Util.strxor import strxor_c +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Hash import BLAKE2b, BLAKE2s + + +class Blake2Test(unittest.TestCase): + + def test_new_positive(self): + + h = self.BLAKE2.new(digest_bits=self.max_bits) + for new_func in self.BLAKE2.new, h.new: + + for dbits in range(8, self.max_bits + 1, 8): + hobj = new_func(digest_bits=dbits) + self.assertEqual(hobj.digest_size, dbits // 8) + + for dbytes in range(1, self.max_bytes + 1): + hobj = new_func(digest_bytes=dbytes) + self.assertEqual(hobj.digest_size, dbytes) + + digest1 = new_func(data=b"\x90", digest_bytes=self.max_bytes).digest() + digest2 = new_func(digest_bytes=self.max_bytes).update(b"\x90").digest() + self.assertEqual(digest1, digest2) + + new_func(data=b"A", key=b"5", digest_bytes=self.max_bytes) + + hobj = h.new() + self.assertEqual(hobj.digest_size, self.max_bytes) + + def test_new_negative(self): + + h = self.BLAKE2.new(digest_bits=self.max_bits) + for new_func in self.BLAKE2.new, h.new: + self.assertRaises(TypeError, new_func, + digest_bytes=self.max_bytes, + digest_bits=self.max_bits) + self.assertRaises(ValueError, new_func, digest_bytes=0) + self.assertRaises(ValueError, new_func, + digest_bytes=self.max_bytes + 1) + self.assertRaises(ValueError, new_func, digest_bits=7) + self.assertRaises(ValueError, new_func, digest_bits=15) + self.assertRaises(ValueError, new_func, + digest_bits=self.max_bits + 1) + self.assertRaises(TypeError, new_func, + digest_bytes=self.max_bytes, + key=u"string") + self.assertRaises(TypeError, new_func, + digest_bytes=self.max_bytes, + data=u"string") + + def test_default_digest_size(self): + digest = self.BLAKE2.new(data=b'abc').digest() + self.assertEqual(len(digest), self.max_bytes) + + def test_update(self): + pieces = [b"\x0A" * 200, b"\x14" * 300] + h = self.BLAKE2.new(digest_bytes=self.max_bytes) + h.update(pieces[0]).update(pieces[1]) + digest = h.digest() + h = self.BLAKE2.new(digest_bytes=self.max_bytes) + h.update(pieces[0] + pieces[1]) + self.assertEqual(h.digest(), digest) + + def test_update_negative(self): + h = self.BLAKE2.new(digest_bytes=self.max_bytes) + self.assertRaises(TypeError, h.update, u"string") + + def test_digest(self): + h = self.BLAKE2.new(digest_bytes=self.max_bytes) + digest = h.digest() + + # hexdigest does not change the state + self.assertEqual(h.digest(), digest) + # digest returns a byte string + self.assertTrue(isinstance(digest, type(b"digest"))) + + def test_update_after_digest(self): + msg = b"rrrrttt" + + # Normally, update() cannot be done after digest() + h = self.BLAKE2.new(digest_bits=256, data=msg[:4]) + dig1 = h.digest() + self.assertRaises(TypeError, h.update, msg[4:]) + dig2 = self.BLAKE2.new(digest_bits=256, data=msg).digest() + + # With the proper flag, it is allowed + h = self.BLAKE2.new(digest_bits=256, data=msg[:4], update_after_digest=True) + self.assertEqual(h.digest(), dig1) + # ... and the subsequent digest applies to the entire message + # up to that point + h.update(msg[4:]) + self.assertEqual(h.digest(), dig2) + + def test_hex_digest(self): + mac = self.BLAKE2.new(digest_bits=self.max_bits) + digest = mac.digest() + hexdigest = mac.hexdigest() + + # hexdigest is equivalent to digest + self.assertEqual(hexlify(digest), tobytes(hexdigest)) + # hexdigest does not change the state + self.assertEqual(mac.hexdigest(), hexdigest) + # hexdigest returns a string + self.assertTrue(isinstance(hexdigest, type("digest"))) + + def test_verify(self): + h = self.BLAKE2.new(digest_bytes=self.max_bytes, key=b"4") + mac = h.digest() + h.verify(mac) + wrong_mac = strxor_c(mac, 255) + self.assertRaises(ValueError, h.verify, wrong_mac) + + def test_hexverify(self): + h = self.BLAKE2.new(digest_bytes=self.max_bytes, key=b"4") + mac = h.hexdigest() + h.hexverify(mac) + self.assertRaises(ValueError, h.hexverify, "4556") + + def test_oid(self): + + prefix = "1.3.6.1.4.1.1722.12.2." + self.oid_variant + "." + + for digest_bits in self.digest_bits_oid: + h = self.BLAKE2.new(digest_bits=digest_bits) + self.assertEqual(h.oid, prefix + str(digest_bits // 8)) + + h = self.BLAKE2.new(digest_bits=digest_bits, key=b"secret") + self.assertRaises(AttributeError, lambda: h.oid) + + for digest_bits in (8, self.max_bits): + if digest_bits in self.digest_bits_oid: + continue + self.assertRaises(AttributeError, lambda: h.oid) + + def test_bytearray(self): + + key = b'0' * 16 + data = b"\x00\x01\x02" + + # Data and key can be a bytearray (during initialization) + key_ba = bytearray(key) + data_ba = bytearray(data) + + h1 = self.BLAKE2.new(data=data, key=key) + h2 = self.BLAKE2.new(data=data_ba, key=key_ba) + key_ba[:1] = b'\xFF' + data_ba[:1] = b'\xFF' + + self.assertEqual(h1.digest(), h2.digest()) + + # Data can be a bytearray (during operation) + data_ba = bytearray(data) + + h1 = self.BLAKE2.new() + h2 = self.BLAKE2.new() + h1.update(data) + h2.update(data_ba) + data_ba[:1] = b'\xFF' + + self.assertEqual(h1.digest(), h2.digest()) + + def test_memoryview(self): + + key = b'0' * 16 + data = b"\x00\x01\x02" + + def get_mv_ro(data): + return memoryview(data) + + def get_mv_rw(data): + return memoryview(bytearray(data)) + + for get_mv in (get_mv_ro, get_mv_rw): + + # Data and key can be a memoryview (during initialization) + key_mv = get_mv(key) + data_mv = get_mv(data) + + h1 = self.BLAKE2.new(data=data, key=key) + h2 = self.BLAKE2.new(data=data_mv, key=key_mv) + if not data_mv.readonly: + data_mv[:1] = b'\xFF' + key_mv[:1] = b'\xFF' + + self.assertEqual(h1.digest(), h2.digest()) + + # Data can be a memoryview (during operation) + data_mv = get_mv(data) + + h1 = self.BLAKE2.new() + h2 = self.BLAKE2.new() + h1.update(data) + h2.update(data_mv) + if not data_mv.readonly: + data_mv[:1] = b'\xFF' + + self.assertEqual(h1.digest(), h2.digest()) + + +class Blake2bTest(Blake2Test): + #: Module + BLAKE2 = BLAKE2b + #: Max output size (in bits) + max_bits = 512 + #: Max output size (in bytes) + max_bytes = 64 + #: Bit size of the digests for which an ASN OID exists + digest_bits_oid = (160, 256, 384, 512) + # http://tools.ietf.org/html/draft-saarinen-blake2-02 + oid_variant = "1" + + +class Blake2sTest(Blake2Test): + #: Module + BLAKE2 = BLAKE2s + #: Max output size (in bits) + max_bits = 256 + #: Max output size (in bytes) + max_bytes = 32 + #: Bit size of the digests for which an ASN OID exists + digest_bits_oid = (128, 160, 224, 256) + # http://tools.ietf.org/html/draft-saarinen-blake2-02 + oid_variant = "2" + + +class Blake2OfficialTestVector(unittest.TestCase): + + def _load_tests(self, test_vector_file): + expected = "in" + test_vectors = [] + with open(test_vector_file, "rt") as test_vector_fd: + for line_number, line in enumerate(test_vector_fd): + + if line.strip() == "" or line.startswith("#"): + continue + + res = re.match("%s:\t([0-9A-Fa-f]*)" % expected, line) + if not res: + raise ValueError("Incorrect test vector format (line %d)" + % line_number) + + if res.group(1): + bin_value = unhexlify(tobytes(res.group(1))) + else: + bin_value = b"" + if expected == "in": + input_data = bin_value + expected = "key" + elif expected == "key": + key = bin_value + expected = "hash" + else: + result = bin_value + expected = "in" + test_vectors.append((input_data, key, result)) + return test_vectors + + def setUp(self): + + dir_comps = ("Hash", self.name) + file_name = self.name.lower() + "-test.txt" + self.description = "%s tests" % self.name + + try: + import pycryptodome_test_vectors # type: ignore + except ImportError: + warnings.warn("Warning: skipping extended tests for %s" % self.name, + UserWarning) + self.test_vectors = [] + return + + init_dir = os.path.dirname(pycryptodome_test_vectors.__file__) + full_file_name = os.path.join(os.path.join(init_dir, *dir_comps), file_name) + self.test_vectors = self._load_tests(full_file_name) + + def runTest(self): + for (input_data, key, result) in self.test_vectors: + mac = self.BLAKE2.new(key=key, digest_bytes=self.max_bytes) + mac.update(input_data) + self.assertEqual(mac.digest(), result) + + +class Blake2bOfficialTestVector(Blake2OfficialTestVector): + #: Module + BLAKE2 = BLAKE2b + #: Hash name + name = "BLAKE2b" + #: Max digest size + max_bytes = 64 + + +class Blake2sOfficialTestVector(Blake2OfficialTestVector): + #: Module + BLAKE2 = BLAKE2s + #: Hash name + name = "BLAKE2s" + #: Max digest size + max_bytes = 32 + + +class Blake2TestVector1(unittest.TestCase): + + def _load_tests(self, test_vector_file): + test_vectors = [] + with open(test_vector_file, "rt") as test_vector_fd: + for line_number, line in enumerate(test_vector_fd): + if line.strip() == "" or line.startswith("#"): + continue + res = re.match("digest: ([0-9A-Fa-f]*)", line) + if not res: + raise ValueError("Incorrect test vector format (line %d)" + % line_number) + + test_vectors.append(unhexlify(tobytes(res.group(1)))) + return test_vectors + + def setUp(self): + dir_comps = ("Hash", self.name) + file_name = "tv1.txt" + self.description = "%s tests" % self.name + + try: + import pycryptodome_test_vectors + except ImportError: + warnings.warn("Warning: skipping extended tests for %s" % self.name, + UserWarning) + self.test_vectors = [] + return + + init_dir = os.path.dirname(pycryptodome_test_vectors.__file__) + full_file_name = os.path.join(os.path.join(init_dir, *dir_comps), file_name) + self.test_vectors = self._load_tests(full_file_name) + + def runTest(self): + + for tv in self.test_vectors: + digest_bytes = len(tv) + next_data = b"" + for _ in range(100): + h = self.BLAKE2.new(digest_bytes=digest_bytes) + h.update(next_data) + next_data = h.digest() + next_data + self.assertEqual(h.digest(), tv) + + +class Blake2bTestVector1(Blake2TestVector1): + #: Module + BLAKE2 = BLAKE2b + #: Hash name + name = "BLAKE2b" + + +class Blake2sTestVector1(Blake2TestVector1): + #: Module + BLAKE2 = BLAKE2s + #: Hash name + name = "BLAKE2s" + + +class Blake2TestVector2(unittest.TestCase): + + def _load_tests(self, test_vector_file): + test_vectors = [] + with open(test_vector_file, "rt") as test_vector_fd: + for line_number, line in enumerate(test_vector_fd): + if line.strip() == "" or line.startswith("#"): + continue + res = re.match(r"digest\(([0-9]+)\): ([0-9A-Fa-f]*)", line) + if not res: + raise ValueError("Incorrect test vector format (line %d)" + % line_number) + key_size = int(res.group(1)) + result = unhexlify(tobytes(res.group(2))) + test_vectors.append((key_size, result)) + return test_vectors + + def setUp(self): + dir_comps = ("Hash", self.name) + file_name = "tv2.txt" + self.description = "%s tests" % self.name + + try: + import pycryptodome_test_vectors # type: ignore + except ImportError: + warnings.warn("Warning: skipping extended tests for %s" % self.name, + UserWarning) + self.test_vectors = [] + return + + init_dir = os.path.dirname(pycryptodome_test_vectors.__file__) + full_file_name = os.path.join(os.path.join(init_dir, *dir_comps), file_name) + self.test_vectors = self._load_tests(full_file_name) + + def runTest(self): + + for key_size, result in self.test_vectors: + next_data = b"" + for _ in range(100): + h = self.BLAKE2.new(digest_bytes=self.max_bytes, + key=b"A" * key_size) + h.update(next_data) + next_data = h.digest() + next_data + self.assertEqual(h.digest(), result) + + +class Blake2bTestVector2(Blake2TestVector1): + #: Module + BLAKE2 = BLAKE2b + #: Hash name + name = "BLAKE2b" + #: Max digest size in bytes + max_bytes = 64 + + +class Blake2sTestVector2(Blake2TestVector1): + #: Module + BLAKE2 = BLAKE2s + #: Hash name + name = "BLAKE2s" + #: Max digest size in bytes + max_bytes = 32 + + +def get_tests(config={}): + tests = [] + + tests += list_test_cases(Blake2bTest) + tests.append(Blake2bOfficialTestVector()) + tests.append(Blake2bTestVector1()) + tests.append(Blake2bTestVector2()) + + tests += list_test_cases(Blake2sTest) + tests.append(Blake2sOfficialTestVector()) + tests.append(Blake2sTestVector1()) + tests.append(Blake2sTestVector2()) + + return tests + + +if __name__ == '__main__': + import unittest + def suite(): + return unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_CMAC.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_CMAC.py new file mode 100644 index 000000000..f4763f210 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_CMAC.py @@ -0,0 +1,448 @@ +# +# SelfTest/Hash/CMAC.py: Self-test for the CMAC module +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.CMAC""" + +import json +import unittest +from binascii import unhexlify + +from Crypto.Util.py3compat import tobytes + +from Crypto.Hash import CMAC +from Crypto.Cipher import AES, DES3 +from Crypto.Hash import SHAKE128 + +from Crypto.Util.strxor import strxor + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors_wycheproof + +# This is a list of (key, data, result, description, module) tuples. +test_data = [ + + ## Test vectors from RFC 4493 ## + ## The are also in NIST SP 800 38B D.2 ## + ( '2b7e151628aed2a6abf7158809cf4f3c', + '', + 'bb1d6929e95937287fa37d129b756746', + 'RFC 4493 #1', + AES + ), + + ( '2b7e151628aed2a6abf7158809cf4f3c', + '6bc1bee22e409f96e93d7e117393172a', + '070a16b46b4d4144f79bdd9dd04a287c', + 'RFC 4493 #2', + AES + ), + + ( '2b7e151628aed2a6abf7158809cf4f3c', + '6bc1bee22e409f96e93d7e117393172a'+ + 'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411', + 'dfa66747de9ae63030ca32611497c827', + 'RFC 4493 #3', + AES + ), + + ( '2b7e151628aed2a6abf7158809cf4f3c', + '6bc1bee22e409f96e93d7e117393172a'+ + 'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+ + 'f69f2445df4f9b17ad2b417be66c3710', + '51f0bebf7e3b9d92fc49741779363cfe', + 'RFC 4493 #4', + AES + ), + + ## The rest of Appendix D of NIST SP 800 38B + ## was not totally correct. + ## Values in Examples 14, 15, 18, and 19 were wrong. + ## The updated test values are published in: + ## http://csrc.nist.gov/publications/nistpubs/800-38B/Updated_CMAC_Examples.pdf + + ( '8e73b0f7da0e6452c810f32b809079e5'+ + '62f8ead2522c6b7b', + '', + 'd17ddf46adaacde531cac483de7a9367', + 'NIST SP 800 38B D.2 Example 5', + AES + ), + + ( '8e73b0f7da0e6452c810f32b809079e5'+ + '62f8ead2522c6b7b', + '6bc1bee22e409f96e93d7e117393172a', + '9e99a7bf31e710900662f65e617c5184', + 'NIST SP 800 38B D.2 Example 6', + AES + ), + + ( '8e73b0f7da0e6452c810f32b809079e5'+ + '62f8ead2522c6b7b', + '6bc1bee22e409f96e93d7e117393172a'+ + 'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411', + '8a1de5be2eb31aad089a82e6ee908b0e', + 'NIST SP 800 38B D.2 Example 7', + AES + ), + + ( '8e73b0f7da0e6452c810f32b809079e5'+ + '62f8ead2522c6b7b', + '6bc1bee22e409f96e93d7e117393172a'+ + 'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+ + 'f69f2445df4f9b17ad2b417be66c3710', + 'a1d5df0eed790f794d77589659f39a11', + 'NIST SP 800 38B D.2 Example 8', + AES + ), + + ( '603deb1015ca71be2b73aef0857d7781'+ + '1f352c073b6108d72d9810a30914dff4', + '', + '028962f61b7bf89efc6b551f4667d983', + 'NIST SP 800 38B D.3 Example 9', + AES + ), + + ( '603deb1015ca71be2b73aef0857d7781'+ + '1f352c073b6108d72d9810a30914dff4', + '6bc1bee22e409f96e93d7e117393172a', + '28a7023f452e8f82bd4bf28d8c37c35c', + 'NIST SP 800 38B D.3 Example 10', + AES + ), + + ( '603deb1015ca71be2b73aef0857d7781'+ + '1f352c073b6108d72d9810a30914dff4', + '6bc1bee22e409f96e93d7e117393172a'+ + 'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411', + 'aaf3d8f1de5640c232f5b169b9c911e6', + 'NIST SP 800 38B D.3 Example 11', + AES + ), + + ( '603deb1015ca71be2b73aef0857d7781'+ + '1f352c073b6108d72d9810a30914dff4', + '6bc1bee22e409f96e93d7e117393172a'+ + 'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+ + 'f69f2445df4f9b17ad2b417be66c3710', + 'e1992190549f6ed5696a2c056c315410', + 'NIST SP 800 38B D.3 Example 12', + AES + ), + + ( '8aa83bf8cbda1062'+ + '0bc1bf19fbb6cd58'+ + 'bc313d4a371ca8b5', + '', + 'b7a688e122ffaf95', + 'NIST SP 800 38B D.4 Example 13', + DES3 + ), + + ( '8aa83bf8cbda1062'+ + '0bc1bf19fbb6cd58'+ + 'bc313d4a371ca8b5', + '6bc1bee22e409f96', + '8e8f293136283797', + 'NIST SP 800 38B D.4 Example 14', + DES3 + ), + + ( '8aa83bf8cbda1062'+ + '0bc1bf19fbb6cd58'+ + 'bc313d4a371ca8b5', + '6bc1bee22e409f96'+ + 'e93d7e117393172a'+ + 'ae2d8a57', + '743ddbe0ce2dc2ed', + 'NIST SP 800 38B D.4 Example 15', + DES3 + ), + + ( '8aa83bf8cbda1062'+ + '0bc1bf19fbb6cd58'+ + 'bc313d4a371ca8b5', + '6bc1bee22e409f96'+ + 'e93d7e117393172a'+ + 'ae2d8a571e03ac9c'+ + '9eb76fac45af8e51', + '33e6b1092400eae5', + 'NIST SP 800 38B D.4 Example 16', + DES3 + ), + + ( '4cf15134a2850dd5'+ + '8a3d10ba80570d38', + '', + 'bd2ebf9a3ba00361', + 'NIST SP 800 38B D.7 Example 17', + DES3 + ), + + ( '4cf15134a2850dd5'+ + '8a3d10ba80570d38', + '6bc1bee22e409f96', + '4ff2ab813c53ce83', + 'NIST SP 800 38B D.7 Example 18', + DES3 + ), + + ( '4cf15134a2850dd5'+ + '8a3d10ba80570d38', + '6bc1bee22e409f96'+ + 'e93d7e117393172a'+ + 'ae2d8a57', + '62dd1b471902bd4e', + 'NIST SP 800 38B D.7 Example 19', + DES3 + ), + + ( '4cf15134a2850dd5'+ + '8a3d10ba80570d38', + '6bc1bee22e409f96'+ + 'e93d7e117393172a'+ + 'ae2d8a571e03ac9c'+ + '9eb76fac45af8e51', + '31b1e431dabc4eb8', + 'NIST SP 800 38B D.7 Example 20', + DES3 + ), + +] + + +def get_tag_random(tag, length): + return SHAKE128.new(data=tobytes(tag)).read(length) + + +class TestCMAC(unittest.TestCase): + + def test_internal_caching(self): + """Verify that internal caching is implemented correctly""" + + data_to_mac = get_tag_random("data_to_mac", 128) + key = get_tag_random("key", 16) + ref_mac = CMAC.new(key, msg=data_to_mac, ciphermod=AES).digest() + + # Break up in chunks of different length + # The result must always be the same + for chunk_length in 1, 2, 3, 7, 10, 13, 16, 40, 80, 128: + + chunks = [data_to_mac[i:i+chunk_length] for i in + range(0, len(data_to_mac), chunk_length)] + + mac = CMAC.new(key, ciphermod=AES) + for chunk in chunks: + mac.update(chunk) + self.assertEqual(ref_mac, mac.digest()) + + def test_update_after_digest(self): + msg = b"rrrrttt" + key = b"4" * 16 + + # Normally, update() cannot be done after digest() + h = CMAC.new(key, msg[:4], ciphermod=AES) + dig1 = h.digest() + self.assertRaises(TypeError, h.update, msg[4:]) + dig2 = CMAC.new(key, msg, ciphermod=AES).digest() + + # With the proper flag, it is allowed + h2 = CMAC.new(key, msg[:4], ciphermod=AES, update_after_digest=True) + self.assertEqual(h2.digest(), dig1) + # ... and the subsequent digest applies to the entire message + # up to that point + h2.update(msg[4:]) + self.assertEqual(h2.digest(), dig2) + + +class ByteArrayTests(unittest.TestCase): + + def runTest(self): + + key = b"0" * 16 + data = b"\x00\x01\x02" + + # Data and key can be a bytearray (during initialization) + key_ba = bytearray(key) + data_ba = bytearray(data) + + h1 = CMAC.new(key, data, ciphermod=AES) + h2 = CMAC.new(key_ba, data_ba, ciphermod=AES) + key_ba[:1] = b'\xFF' + data_ba[:1] = b'\xFF' + self.assertEqual(h1.digest(), h2.digest()) + + # Data can be a bytearray (during operation) + key_ba = bytearray(key) + data_ba = bytearray(data) + + h1 = CMAC.new(key, ciphermod=AES) + h2 = CMAC.new(key, ciphermod=AES) + h1.update(data) + h2.update(data_ba) + data_ba[:1] = b'\xFF' + self.assertEqual(h1.digest(), h2.digest()) + + +class MemoryViewTests(unittest.TestCase): + + def runTest(self): + + key = b"0" * 16 + data = b"\x00\x01\x02" + + def get_mv_ro(data): + return memoryview(data) + + def get_mv_rw(data): + return memoryview(bytearray(data)) + + for get_mv in (get_mv_ro, get_mv_rw): + + # Data and key can be a memoryview (during initialization) + key_mv = get_mv(key) + data_mv = get_mv(data) + + h1 = CMAC.new(key, data, ciphermod=AES) + h2 = CMAC.new(key_mv, data_mv, ciphermod=AES) + if not data_mv.readonly: + key_mv[:1] = b'\xFF' + data_mv[:1] = b'\xFF' + self.assertEqual(h1.digest(), h2.digest()) + + # Data can be a memoryview (during operation) + data_mv = get_mv(data) + + h1 = CMAC.new(key, ciphermod=AES) + h2 = CMAC.new(key, ciphermod=AES) + h1.update(data) + h2.update(data_mv) + if not data_mv.readonly: + data_mv[:1] = b'\xFF' + self.assertEqual(h1.digest(), h2.digest()) + + +class TestVectorsWycheproof(unittest.TestCase): + + def __init__(self, wycheproof_warnings): + unittest.TestCase.__init__(self) + self._wycheproof_warnings = wycheproof_warnings + self._id = "None" + + def setUp(self): + + def filter_tag(group): + return group['tagSize'] // 8 + + self.tv = load_test_vectors_wycheproof(("Hash", "wycheproof"), + "aes_cmac_test.json", + "Wycheproof CMAC", + group_tag={'tag_size': filter_tag}) + + def shortDescription(self): + return self._id + + def warn(self, tv): + if tv.warning and self._wycheproof_warnings: + import warnings + warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) + + def test_create_mac(self, tv): + self._id = "Wycheproof MAC creation Test #" + str(tv.id) + + try: + tag = CMAC.new(tv.key, tv.msg, ciphermod=AES, mac_len=tv.tag_size).digest() + except ValueError as e: + if len(tv.key) not in (16, 24, 32) and "key length" in str(e): + return + raise e + if tv.valid: + self.assertEqual(tag, tv.tag) + self.warn(tv) + + def test_verify_mac(self, tv): + self._id = "Wycheproof MAC verification Test #" + str(tv.id) + + try: + mac = CMAC.new(tv.key, tv.msg, ciphermod=AES, mac_len=tv.tag_size) + except ValueError as e: + if len(tv.key) not in (16, 24, 32) and "key length" in str(e): + return + raise e + try: + mac.verify(tv.tag) + except ValueError: + assert not tv.valid + else: + assert tv.valid + self.warn(tv) + + def runTest(self): + + for tv in self.tv: + self.test_create_mac(tv) + self.test_verify_mac(tv) + + +def get_tests(config={}): + global test_data + import types + from .common import make_mac_tests + + wycheproof_warnings = config.get('wycheproof_warnings') + + # Add new() parameters to the back of each test vector + params_test_data = [] + for row in test_data: + t = list(row) + t[4] = dict(ciphermod=t[4]) + params_test_data.append(t) + + tests = make_mac_tests(CMAC, "CMAC", params_test_data) + tests.append(ByteArrayTests()) + tests.append(list_test_cases(TestCMAC)) + tests.append(MemoryViewTests()) + tests += [ TestVectorsWycheproof(wycheproof_warnings) ] + return tests + + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_HMAC.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_HMAC.py new file mode 100644 index 000000000..26b7b247d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_HMAC.py @@ -0,0 +1,548 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/HMAC.py: Self-test for the HMAC module +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.HMAC""" + +import unittest +from binascii import hexlify +from Crypto.Util.py3compat import tostr, tobytes + +from Crypto.Hash import (HMAC, MD5, SHA1, SHA256, + SHA224, SHA384, SHA512, + RIPEMD160, + SHA3_224, SHA3_256, SHA3_384, SHA3_512) + + +hash_modules = dict(MD5=MD5, SHA1=SHA1, SHA256=SHA256, + SHA224=SHA224, SHA384=SHA384, SHA512=SHA512, + RIPEMD160=RIPEMD160, + SHA3_224=SHA3_224, SHA3_256=SHA3_256, + SHA3_384=SHA3_384, SHA3_512=SHA3_512) + +default_hash = None + +def xl(text): + return tostr(hexlify(tobytes(text))) + +# This is a list of (key, data, results, description) tuples. +test_data = [ + ## Test vectors from RFC 2202 ## + # Test that the default hashmod is MD5 + ('0b' * 16, + '4869205468657265', + dict(default_hash='9294727a3638bb1c13f48ef8158bfc9d'), + 'default-is-MD5'), + + # Test case 1 (MD5) + ('0b' * 16, + '4869205468657265', + dict(MD5='9294727a3638bb1c13f48ef8158bfc9d'), + 'RFC 2202 #1-MD5 (HMAC-MD5)'), + + # Test case 1 (SHA1) + ('0b' * 20, + '4869205468657265', + dict(SHA1='b617318655057264e28bc0b6fb378c8ef146be00'), + 'RFC 2202 #1-SHA1 (HMAC-SHA1)'), + + # Test case 2 + ('4a656665', + '7768617420646f2079612077616e7420666f72206e6f7468696e673f', + dict(MD5='750c783e6ab0b503eaa86e310a5db738', + SHA1='effcdf6ae5eb2fa2d27416d5f184df9c259a7c79'), + 'RFC 2202 #2 (HMAC-MD5/SHA1)'), + + # Test case 3 (MD5) + ('aa' * 16, + 'dd' * 50, + dict(MD5='56be34521d144c88dbb8c733f0e8b3f6'), + 'RFC 2202 #3-MD5 (HMAC-MD5)'), + + # Test case 3 (SHA1) + ('aa' * 20, + 'dd' * 50, + dict(SHA1='125d7342b9ac11cd91a39af48aa17b4f63f175d3'), + 'RFC 2202 #3-SHA1 (HMAC-SHA1)'), + + # Test case 4 + ('0102030405060708090a0b0c0d0e0f10111213141516171819', + 'cd' * 50, + dict(MD5='697eaf0aca3a3aea3a75164746ffaa79', + SHA1='4c9007f4026250c6bc8414f9bf50c86c2d7235da'), + 'RFC 2202 #4 (HMAC-MD5/SHA1)'), + + # Test case 5 (MD5) + ('0c' * 16, + '546573742057697468205472756e636174696f6e', + dict(MD5='56461ef2342edc00f9bab995690efd4c'), + 'RFC 2202 #5-MD5 (HMAC-MD5)'), + + # Test case 5 (SHA1) + # NB: We do not implement hash truncation, so we only test the full hash here. + ('0c' * 20, + '546573742057697468205472756e636174696f6e', + dict(SHA1='4c1a03424b55e07fe7f27be1d58bb9324a9a5a04'), + 'RFC 2202 #5-SHA1 (HMAC-SHA1)'), + + # Test case 6 + ('aa' * 80, + '54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a' + + '65204b6579202d2048617368204b6579204669727374', + dict(MD5='6b1ab7fe4bd7bf8f0b62e6ce61b9d0cd', + SHA1='aa4ae5e15272d00e95705637ce8a3b55ed402112'), + 'RFC 2202 #6 (HMAC-MD5/SHA1)'), + + # Test case 7 + ('aa' * 80, + '54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a' + + '65204b657920616e64204c6172676572205468616e204f6e6520426c6f636b2d' + + '53697a652044617461', + dict(MD5='6f630fad67cda0ee1fb1f562db3aa53e', + SHA1='e8e99d0f45237d786d6bbaa7965c7808bbff1a91'), + 'RFC 2202 #7 (HMAC-MD5/SHA1)'), + + ## Test vectors from RFC 4231 ## + # 4.2. Test Case 1 + ('0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b', + '4869205468657265', + dict(SHA256=''' + b0344c61d8db38535ca8afceaf0bf12b + 881dc200c9833da726e9376c2e32cff7 + '''), + 'RFC 4231 #1 (HMAC-SHA256)'), + + # 4.3. Test Case 2 - Test with a key shorter than the length of the HMAC + # output. + ('4a656665', + '7768617420646f2079612077616e7420666f72206e6f7468696e673f', + dict(SHA256=''' + 5bdcc146bf60754e6a042426089575c7 + 5a003f089d2739839dec58b964ec3843 + '''), + 'RFC 4231 #2 (HMAC-SHA256)'), + + # 4.4. Test Case 3 - Test with a combined length of key and data that is + # larger than 64 bytes (= block-size of SHA-224 and SHA-256). + ('aa' * 20, + 'dd' * 50, + dict(SHA256=''' + 773ea91e36800e46854db8ebd09181a7 + 2959098b3ef8c122d9635514ced565fe + '''), + 'RFC 4231 #3 (HMAC-SHA256)'), + + # 4.5. Test Case 4 - Test with a combined length of key and data that is + # larger than 64 bytes (= block-size of SHA-224 and SHA-256). + ('0102030405060708090a0b0c0d0e0f10111213141516171819', + 'cd' * 50, + dict(SHA256=''' + 82558a389a443c0ea4cc819899f2083a + 85f0faa3e578f8077a2e3ff46729665b + '''), + 'RFC 4231 #4 (HMAC-SHA256)'), + + # 4.6. Test Case 5 - Test with a truncation of output to 128 bits. + # + # Not included because we do not implement hash truncation. + # + + # 4.7. Test Case 6 - Test with a key larger than 128 bytes (= block-size of + # SHA-384 and SHA-512). + ('aa' * 131, + '54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a' + + '65204b6579202d2048617368204b6579204669727374', + dict(SHA256=''' + 60e431591ee0b67f0d8a26aacbf5b77f + 8e0bc6213728c5140546040f0ee37f54 + '''), + 'RFC 4231 #6 (HMAC-SHA256)'), + + # 4.8. Test Case 7 - Test with a key and data that is larger than 128 bytes + # (= block-size of SHA-384 and SHA-512). + ('aa' * 131, + '5468697320697320612074657374207573696e672061206c6172676572207468' + + '616e20626c6f636b2d73697a65206b657920616e642061206c61726765722074' + + '68616e20626c6f636b2d73697a6520646174612e20546865206b6579206e6565' + + '647320746f20626520686173686564206265666f7265206265696e6720757365' + + '642062792074686520484d414320616c676f726974686d2e', + dict(SHA256=''' + 9b09ffa71b942fcb27635fbcd5b0e944 + bfdc63644f0713938a7f51535c3a35e2 + '''), + 'RFC 4231 #7 (HMAC-SHA256)'), + + # Test case 8 (SHA224) + ('4a656665', + '7768617420646f2079612077616e74' + + '20666f72206e6f7468696e673f', + dict(SHA224='a30e01098bc6dbbf45690f3a7e9e6d0f8bbea2a39e6148008fd05e44'), + 'RFC 4634 8.4 SHA224 (HMAC-SHA224)'), + + # Test case 9 (SHA384) + ('4a656665', + '7768617420646f2079612077616e74' + + '20666f72206e6f7468696e673f', + dict(SHA384='af45d2e376484031617f78d2b58a6b1b9c7ef464f5a01b47e42ec3736322445e8e2240ca5e69e2c78b3239ecfab21649'), + 'RFC 4634 8.4 SHA384 (HMAC-SHA384)'), + + # Test case 10 (SHA512) + ('4a656665', + '7768617420646f2079612077616e74' + + '20666f72206e6f7468696e673f', + dict(SHA512='164b7a7bfcf819e2e395fbe73b56e0a387bd64222e831fd610270cd7ea2505549758bf75c05a994a6d034f65f8f0e6fdcaeab1a34d4a6b4b636e070a38bce737'), + 'RFC 4634 8.4 SHA512 (HMAC-SHA512)'), + + # Test case 11 (RIPEMD) + ('0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b', + xl("Hi There"), + dict(RIPEMD160='24cb4bd67d20fc1a5d2ed7732dcc39377f0a5668'), + 'RFC 2286 #1 (HMAC-RIPEMD)'), + + # Test case 12 (RIPEMD) + (xl("Jefe"), + xl("what do ya want for nothing?"), + dict(RIPEMD160='dda6c0213a485a9e24f4742064a7f033b43c4069'), + 'RFC 2286 #2 (HMAC-RIPEMD)'), + + # Test case 13 (RIPEMD) + ('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', + 'dd' * 50, + dict(RIPEMD160='b0b105360de759960ab4f35298e116e295d8e7c1'), + 'RFC 2286 #3 (HMAC-RIPEMD)'), + + # Test case 14 (RIPEMD) + ('0102030405060708090a0b0c0d0e0f10111213141516171819', + 'cd' * 50, + dict(RIPEMD160='d5ca862f4d21d5e610e18b4cf1beb97a4365ecf4'), + 'RFC 2286 #4 (HMAC-RIPEMD)'), + + # Test case 15 (RIPEMD) + ('0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c', + xl("Test With Truncation"), + dict(RIPEMD160='7619693978f91d90539ae786500ff3d8e0518e39'), + 'RFC 2286 #5 (HMAC-RIPEMD)'), + + # Test case 16 (RIPEMD) + ('aa' * 80, + xl("Test Using Larger Than Block-Size Key - Hash Key First"), + dict(RIPEMD160='6466ca07ac5eac29e1bd523e5ada7605b791fd8b'), + 'RFC 2286 #6 (HMAC-RIPEMD)'), + + # Test case 17 (RIPEMD) + ('aa' * 80, + xl("Test Using Larger Than Block-Size Key and Larger Than One Block-Size Data"), + dict(RIPEMD160='69ea60798d71616cce5fd0871e23754cd75d5a0a'), + 'RFC 2286 #7 (HMAC-RIPEMD)'), + + # From https://csrc.nist.gov/CSRC/media/Projects/Cryptographic-Standards-and-Guidelines/documents/examples/HMAC_SHA3-224.pdf + ( + '000102030405060708090a0b0c0d0e0f' + '101112131415161718191a1b', + xl('Sample message for keylenblocklen'), + dict(SHA3_224='078695eecc227c636ad31d063a15dd05a7e819a66ec6d8de1e193e59'), + 'NIST CSRC Sample #3 (SHA3-224)' + ), + + # From https://csrc.nist.gov/CSRC/media/Projects/Cryptographic-Standards-and-Guidelines/documents/examples/HMAC_SHA3-256.pdf + ( + '000102030405060708090a0b0c0d0e0f'\ + '101112131415161718191a1b1c1d1e1f', + xl('Sample message for keylenblocklen'), + dict(SHA3_256='9bcf2c238e235c3ce88404e813bd2f3a97185ac6f238c63d6229a00b07974258'), + 'NIST CSRC Sample #3 (SHA3-256)' + ), + + # From https://csrc.nist.gov/CSRC/media/Projects/Cryptographic-Standards-and-Guidelines/documents/examples/HMAC_SHA3-384.pdf + ( + '000102030405060708090a0b0c0d0e0f'\ + '101112131415161718191a1b1c1d1e1f' + '202122232425262728292a2b2c2d2e2f', + xl('Sample message for keylenblocklen'), + dict(SHA3_384='e5ae4c739f455279368ebf36d4f5354c95aa184c899d3870e460ebc288ef1f9470053f73f7c6da2a71bcaec38ce7d6ac'), + 'NIST CSRC Sample #3 (SHA3-384)' + ), + + # From https://csrc.nist.gov/CSRC/media/Projects/Cryptographic-Standards-and-Guidelines/documents/examples/HMAC_SHA3-512.pdf + ( + '000102030405060708090a0b0c0d0e0f'\ + '101112131415161718191a1b1c1d1e1f'\ + '202122232425262728292a2b2c2d2e2f'\ + '303132333435363738393a3b3c3d3e3f', + xl('Sample message for keylenblocklen'), + dict(SHA3_512='5f464f5e5b7848e3885e49b2c385f0694985d0e38966242dc4a5fe3fea4b37d46b65ceced5dcf59438dd840bab22269f0ba7febdb9fcf74602a35666b2a32915'), + 'NIST CSRC Sample #3 (SHA3-512)' + ), + +] + + +class HMAC_Module_and_Instance_Test(unittest.TestCase): + """Test the HMAC construction and verify that it does not + matter if you initialize it with a hash module or + with an hash instance. + + See https://bugs.launchpad.net/pycrypto/+bug/1209399 + """ + + def __init__(self, hashmods): + """Initialize the test with a dictionary of hash modules + indexed by their names""" + + unittest.TestCase.__init__(self) + self.hashmods = hashmods + self.description = "" + + def shortDescription(self): + return self.description + + def runTest(self): + key = b"\x90\x91\x92\x93" * 4 + payload = b"\x00" * 100 + + for hashname, hashmod in self.hashmods.items(): + if hashmod is None: + continue + self.description = "Test HMAC in combination with " + hashname + one = HMAC.new(key, payload, hashmod).digest() + two = HMAC.new(key, payload, hashmod.new()).digest() + self.assertEqual(one, two) + + +class HMAC_None(unittest.TestCase): + + def runTest(self): + + key = b"\x04" * 20 + one = HMAC.new(key, b"", SHA1).digest() + two = HMAC.new(key, None, SHA1).digest() + self.assertEqual(one, two) + + +class ByteArrayTests(unittest.TestCase): + + def runTest(self): + + key = b"0" * 16 + data = b"\x00\x01\x02" + + # Data and key can be a bytearray (during initialization) + key_ba = bytearray(key) + data_ba = bytearray(data) + + h1 = HMAC.new(key, data) + h2 = HMAC.new(key_ba, data_ba) + key_ba[:1] = b'\xFF' + data_ba[:1] = b'\xFF' + self.assertEqual(h1.digest(), h2.digest()) + + # Data can be a bytearray (during operation) + key_ba = bytearray(key) + data_ba = bytearray(data) + + h1 = HMAC.new(key) + h2 = HMAC.new(key) + h1.update(data) + h2.update(data_ba) + data_ba[:1] = b'\xFF' + self.assertEqual(h1.digest(), h2.digest()) + + +class MemoryViewTests(unittest.TestCase): + + def runTest(self): + + key = b"0" * 16 + data = b"\x00\x01\x02" + + def get_mv_ro(data): + return memoryview(data) + + def get_mv_rw(data): + return memoryview(bytearray(data)) + + for get_mv in (get_mv_ro, get_mv_rw): + + # Data and key can be a memoryview (during initialization) + key_mv = get_mv(key) + data_mv = get_mv(data) + + h1 = HMAC.new(key, data) + h2 = HMAC.new(key_mv, data_mv) + if not data_mv.readonly: + key_mv[:1] = b'\xFF' + data_mv[:1] = b'\xFF' + self.assertEqual(h1.digest(), h2.digest()) + + # Data can be a memoryview (during operation) + data_mv = get_mv(data) + + h1 = HMAC.new(key) + h2 = HMAC.new(key) + h1.update(data) + h2.update(data_mv) + if not data_mv.readonly: + data_mv[:1] = b'\xFF' + self.assertEqual(h1.digest(), h2.digest()) + + +def get_tests(config={}): + global test_data + import types + from .common import make_mac_tests + + # A test vector contains multiple results, each one for a + # different hash algorithm. + # Here we expand each test vector into multiple ones, + # and add the relevant parameters that will be passed to new() + exp_test_data = [] + for row in test_data: + for modname in row[2].keys(): + t = list(row) + t[2] = row[2][modname] + t.append(dict(digestmod=globals()[modname])) + exp_test_data.append(t) + tests = make_mac_tests(HMAC, "HMAC", exp_test_data) + tests.append(HMAC_Module_and_Instance_Test(hash_modules)) + tests.append(HMAC_None()) + + tests.append(ByteArrayTests()) + tests.append(MemoryViewTests()) + + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_KMAC.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_KMAC.py new file mode 100644 index 000000000..8e9bf702a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_KMAC.py @@ -0,0 +1,346 @@ +import unittest +from binascii import unhexlify, hexlify + +from Crypto.Util.py3compat import tobytes +from Crypto.Util.strxor import strxor_c +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Hash import KMAC128, KMAC256 + + +class KMACTest(unittest.TestCase): + + def new(self, *args, **kwargs): + return self.KMAC.new(key=b'X' * (self.minimum_key_bits // 8), *args, **kwargs) + + def test_new_positive(self): + + key = b'X' * 32 + + h = self.new() + for new_func in self.KMAC.new, h.new: + + for dbytes in range(self.minimum_bytes, 128 + 1): + hobj = new_func(key=key, mac_len=dbytes) + self.assertEqual(hobj.digest_size, dbytes) + + digest1 = new_func(key=key, data=b"\x90").digest() + digest2 = new_func(key=key).update(b"\x90").digest() + self.assertEqual(digest1, digest2) + + new_func(data=b"A", key=key, custom=b"g") + + hobj = h.new(key=key) + self.assertEqual(hobj.digest_size, self.default_bytes) + + def test_new_negative(self): + + h = self.new() + for new_func in self.KMAC.new, h.new: + self.assertRaises(ValueError, new_func, key=b'X'*32, + mac_len=0) + self.assertRaises(ValueError, new_func, key=b'X'*32, + mac_len=self.minimum_bytes - 1) + self.assertRaises(TypeError, new_func, + key=u"string") + self.assertRaises(TypeError, new_func, + data=u"string") + + def test_default_digest_size(self): + digest = self.new(data=b'abc').digest() + self.assertEqual(len(digest), self.default_bytes) + + def test_update(self): + pieces = [b"\x0A" * 200, b"\x14" * 300] + h = self.new() + h.update(pieces[0]).update(pieces[1]) + digest = h.digest() + h = self.new() + h.update(pieces[0] + pieces[1]) + self.assertEqual(h.digest(), digest) + + def test_update_negative(self): + h = self.new() + self.assertRaises(TypeError, h.update, u"string") + + def test_digest(self): + h = self.new() + digest = h.digest() + + # hexdigest does not change the state + self.assertEqual(h.digest(), digest) + # digest returns a byte string + self.assertTrue(isinstance(digest, type(b"digest"))) + + def test_update_after_digest(self): + msg = b"rrrrttt" + + # Normally, update() cannot be done after digest() + h = self.new(mac_len=32, data=msg[:4]) + dig1 = h.digest() + self.assertRaises(TypeError, h.update, dig1) + + def test_hex_digest(self): + mac = self.new() + digest = mac.digest() + hexdigest = mac.hexdigest() + + # hexdigest is equivalent to digest + self.assertEqual(hexlify(digest), tobytes(hexdigest)) + # hexdigest does not change the state + self.assertEqual(mac.hexdigest(), hexdigest) + # hexdigest returns a string + self.assertTrue(isinstance(hexdigest, type("digest"))) + + def test_verify(self): + h = self.new() + mac = h.digest() + h.verify(mac) + wrong_mac = strxor_c(mac, 255) + self.assertRaises(ValueError, h.verify, wrong_mac) + + def test_hexverify(self): + h = self.new() + mac = h.hexdigest() + h.hexverify(mac) + self.assertRaises(ValueError, h.hexverify, "4556") + + def test_oid(self): + + oid = "2.16.840.1.101.3.4.2." + self.oid_variant + h = self.new() + self.assertEqual(h.oid, oid) + + def test_bytearray(self): + + key = b'0' * 32 + data = b"\x00\x01\x02" + + # Data and key can be a bytearray (during initialization) + key_ba = bytearray(key) + data_ba = bytearray(data) + + h1 = self.KMAC.new(data=data, key=key) + h2 = self.KMAC.new(data=data_ba, key=key_ba) + key_ba[:1] = b'\xFF' + data_ba[:1] = b'\xFF' + + self.assertEqual(h1.digest(), h2.digest()) + + # Data can be a bytearray (during operation) + data_ba = bytearray(data) + + h1 = self.new() + h2 = self.new() + h1.update(data) + h2.update(data_ba) + data_ba[:1] = b'\xFF' + + self.assertEqual(h1.digest(), h2.digest()) + + def test_memoryview(self): + + key = b'0' * 32 + data = b"\x00\x01\x02" + + def get_mv_ro(data): + return memoryview(data) + + def get_mv_rw(data): + return memoryview(bytearray(data)) + + for get_mv in (get_mv_ro, get_mv_rw): + + # Data and key can be a memoryview (during initialization) + key_mv = get_mv(key) + data_mv = get_mv(data) + + h1 = self.KMAC.new(data=data, key=key) + h2 = self.KMAC.new(data=data_mv, key=key_mv) + if not data_mv.readonly: + data_mv[:1] = b'\xFF' + key_mv[:1] = b'\xFF' + + self.assertEqual(h1.digest(), h2.digest()) + + # Data can be a memoryview (during operation) + data_mv = get_mv(data) + + h1 = self.new() + h2 = self.new() + h1.update(data) + h2.update(data_mv) + if not data_mv.readonly: + data_mv[:1] = b'\xFF' + + self.assertEqual(h1.digest(), h2.digest()) + + +class KMAC128Test(KMACTest): + + KMAC = KMAC128 + + minimum_key_bits = 128 + + minimum_bytes = 8 + default_bytes = 64 + + oid_variant = "19" + + +class KMAC256Test(KMACTest): + + KMAC = KMAC256 + + minimum_key_bits = 256 + + minimum_bytes = 8 + default_bytes = 64 + + oid_variant = "20" + + +class NISTExampleTestVectors(unittest.TestCase): + + # https://csrc.nist.gov/CSRC/media/Projects/Cryptographic-Standards-and-Guidelines/documents/examples/KMAC_samples.pdf + test_data = [ + ( + "40 41 42 43 44 45 46 47 48 49 4A 4B 4C 4D 4E 4F" + "50 51 52 53 54 55 56 57 58 59 5A 5B 5C 5D 5E 5F", + "00 01 02 03", + "", + "E5 78 0B 0D 3E A6 F7 D3 A4 29 C5 70 6A A4 3A 00" + "FA DB D7 D4 96 28 83 9E 31 87 24 3F 45 6E E1 4E", + "Sample #1 NIST", + KMAC128 + ), + ( + "40 41 42 43 44 45 46 47 48 49 4A 4B 4C 4D 4E 4F" + "50 51 52 53 54 55 56 57 58 59 5A 5B 5C 5D 5E 5F", + "00 01 02 03", + "My Tagged Application", + "3B 1F BA 96 3C D8 B0 B5 9E 8C 1A 6D 71 88 8B 71" + "43 65 1A F8 BA 0A 70 70 C0 97 9E 28 11 32 4A A5", + "Sample #2 NIST", + KMAC128 + ), + ( + "40 41 42 43 44 45 46 47 48 49 4A 4B 4C 4D 4E 4F" + "50 51 52 53 54 55 56 57 58 59 5A 5B 5C 5D 5E 5F", + "00 01 02 03 04 05 06 07 08 09 0A 0B 0C 0D 0E 0F" + "10 11 12 13 14 15 16 17 18 19 1A 1B 1C 1D 1E 1F" + "20 21 22 23 24 25 26 27 28 29 2A 2B 2C 2D 2E 2F" + "30 31 32 33 34 35 36 37 38 39 3A 3B 3C 3D 3E 3F" + "40 41 42 43 44 45 46 47 48 49 4A 4B 4C 4D 4E 4F" + "50 51 52 53 54 55 56 57 58 59 5A 5B 5C 5D 5E 5F" + "60 61 62 63 64 65 66 67 68 69 6A 6B 6C 6D 6E 6F" + "70 71 72 73 74 75 76 77 78 79 7A 7B 7C 7D 7E 7F" + "80 81 82 83 84 85 86 87 88 89 8A 8B 8C 8D 8E 8F" + "90 91 92 93 94 95 96 97 98 99 9A 9B 9C 9D 9E 9F" + "A0 A1 A2 A3 A4 A5 A6 A7 A8 A9 AA AB AC AD AE AF" + "B0 B1 B2 B3 B4 B5 B6 B7 B8 B9 BA BB BC BD BE BF" + "C0 C1 C2 C3 C4 C5 C6 C7", + "My Tagged Application", + "1F 5B 4E 6C CA 02 20 9E 0D CB 5C A6 35 B8 9A 15" + "E2 71 EC C7 60 07 1D FD 80 5F AA 38 F9 72 92 30", + "Sample #3 NIST", + KMAC128 + ), + ( + "40 41 42 43 44 45 46 47 48 49 4A 4B 4C 4D 4E 4F" + "50 51 52 53 54 55 56 57 58 59 5A 5B 5C 5D 5E 5F", + "00 01 02 03", + "My Tagged Application", + "20 C5 70 C3 13 46 F7 03 C9 AC 36 C6 1C 03 CB 64" + "C3 97 0D 0C FC 78 7E 9B 79 59 9D 27 3A 68 D2 F7" + "F6 9D 4C C3 DE 9D 10 4A 35 16 89 F2 7C F6 F5 95" + "1F 01 03 F3 3F 4F 24 87 10 24 D9 C2 77 73 A8 DD", + "Sample #4 NIST", + KMAC256 + ), + ( + "40 41 42 43 44 45 46 47 48 49 4A 4B 4C 4D 4E 4F" + "50 51 52 53 54 55 56 57 58 59 5A 5B 5C 5D 5E 5F", + "00 01 02 03 04 05 06 07 08 09 0A 0B 0C 0D 0E 0F" + "10 11 12 13 14 15 16 17 18 19 1A 1B 1C 1D 1E 1F" + "20 21 22 23 24 25 26 27 28 29 2A 2B 2C 2D 2E 2F" + "30 31 32 33 34 35 36 37 38 39 3A 3B 3C 3D 3E 3F" + "40 41 42 43 44 45 46 47 48 49 4A 4B 4C 4D 4E 4F" + "50 51 52 53 54 55 56 57 58 59 5A 5B 5C 5D 5E 5F" + "60 61 62 63 64 65 66 67 68 69 6A 6B 6C 6D 6E 6F" + "70 71 72 73 74 75 76 77 78 79 7A 7B 7C 7D 7E 7F" + "80 81 82 83 84 85 86 87 88 89 8A 8B 8C 8D 8E 8F" + "90 91 92 93 94 95 96 97 98 99 9A 9B 9C 9D 9E 9F" + "A0 A1 A2 A3 A4 A5 A6 A7 A8 A9 AA AB AC AD AE AF" + "B0 B1 B2 B3 B4 B5 B6 B7 B8 B9 BA BB BC BD BE BF" + "C0 C1 C2 C3 C4 C5 C6 C7", + "", + "75 35 8C F3 9E 41 49 4E 94 97 07 92 7C EE 0A F2" + "0A 3F F5 53 90 4C 86 B0 8F 21 CC 41 4B CF D6 91" + "58 9D 27 CF 5E 15 36 9C BB FF 8B 9A 4C 2E B1 78" + "00 85 5D 02 35 FF 63 5D A8 25 33 EC 6B 75 9B 69", + "Sample #5 NIST", + KMAC256 + ), + ( + "40 41 42 43 44 45 46 47 48 49 4A 4B 4C 4D 4E 4F" + "50 51 52 53 54 55 56 57 58 59 5A 5B 5C 5D 5E 5F", + "00 01 02 03 04 05 06 07 08 09 0A 0B 0C 0D 0E 0F" + "10 11 12 13 14 15 16 17 18 19 1A 1B 1C 1D 1E 1F" + "20 21 22 23 24 25 26 27 28 29 2A 2B 2C 2D 2E 2F" + "30 31 32 33 34 35 36 37 38 39 3A 3B 3C 3D 3E 3F" + "40 41 42 43 44 45 46 47 48 49 4A 4B 4C 4D 4E 4F" + "50 51 52 53 54 55 56 57 58 59 5A 5B 5C 5D 5E 5F" + "60 61 62 63 64 65 66 67 68 69 6A 6B 6C 6D 6E 6F" + "70 71 72 73 74 75 76 77 78 79 7A 7B 7C 7D 7E 7F" + "80 81 82 83 84 85 86 87 88 89 8A 8B 8C 8D 8E 8F" + "90 91 92 93 94 95 96 97 98 99 9A 9B 9C 9D 9E 9F" + "A0 A1 A2 A3 A4 A5 A6 A7 A8 A9 AA AB AC AD AE AF" + "B0 B1 B2 B3 B4 B5 B6 B7 B8 B9 BA BB BC BD BE BF" + "C0 C1 C2 C3 C4 C5 C6 C7", + "My Tagged Application", + "B5 86 18 F7 1F 92 E1 D5 6C 1B 8C 55 DD D7 CD 18" + "8B 97 B4 CA 4D 99 83 1E B2 69 9A 83 7D A2 E4 D9" + "70 FB AC FD E5 00 33 AE A5 85 F1 A2 70 85 10 C3" + "2D 07 88 08 01 BD 18 28 98 FE 47 68 76 FC 89 65", + "Sample #6 NIST", + KMAC256 + ), + ] + + def setUp(self): + td = [] + for key, data, custom, mac, text, module in self.test_data: + ni = ( + unhexlify(key.replace(" ", "")), + unhexlify(data.replace(" ", "")), + custom.encode(), + unhexlify(mac.replace(" ", "")), + text, + module + ) + td.append(ni) + self.test_data = td + + def runTest(self): + + for key, data, custom, mac, text, module in self.test_data: + h = module.new(data=data, key=key, custom=custom, mac_len=len(mac)) + mac_tag = h.digest() + self.assertEqual(mac_tag, mac, msg=text) + + +def get_tests(config={}): + tests = [] + + tests += list_test_cases(KMAC128Test) + tests += list_test_cases(KMAC256Test) + tests.append(NISTExampleTestVectors()) + + return tests + + +if __name__ == '__main__': + def suite(): + return unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_KangarooTwelve.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_KangarooTwelve.py new file mode 100644 index 000000000..fe161f06e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_KangarooTwelve.py @@ -0,0 +1,367 @@ +# =================================================================== +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.KangarooTwelve""" + +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Hash import KangarooTwelve as K12 +from Crypto.Util.py3compat import b, bchr + + +class KangarooTwelveTest(unittest.TestCase): + + def test_length_encode(self): + self.assertEqual(K12._length_encode(0), b'\x00') + self.assertEqual(K12._length_encode(12), b'\x0C\x01') + self.assertEqual(K12._length_encode(65538), b'\x01\x00\x02\x03') + + def test_new_positive(self): + + xof1 = K12.new() + xof2 = K12.new(data=b("90")) + xof3 = K12.new().update(b("90")) + + self.assertNotEqual(xof1.read(10), xof2.read(10)) + xof3.read(10) + self.assertEqual(xof2.read(10), xof3.read(10)) + + xof1 = K12.new() + ref = xof1.read(10) + xof2 = K12.new(custom=b("")) + xof3 = K12.new(custom=b("foo")) + + self.assertEqual(ref, xof2.read(10)) + self.assertNotEqual(ref, xof3.read(10)) + + xof1 = K12.new(custom=b("foo")) + xof2 = K12.new(custom=b("foo"), data=b("90")) + xof3 = K12.new(custom=b("foo")).update(b("90")) + + self.assertNotEqual(xof1.read(10), xof2.read(10)) + xof3.read(10) + self.assertEqual(xof2.read(10), xof3.read(10)) + + def test_update(self): + pieces = [bchr(10) * 200, bchr(20) * 300] + h = K12.new() + h.update(pieces[0]).update(pieces[1]) + digest = h.read(10) + h = K12.new() + h.update(pieces[0] + pieces[1]) + self.assertEqual(h.read(10), digest) + + def test_update_negative(self): + h = K12.new() + self.assertRaises(TypeError, h.update, u"string") + + def test_digest(self): + h = K12.new() + digest = h.read(90) + + # read returns a byte string of the right length + self.assertTrue(isinstance(digest, type(b("digest")))) + self.assertEqual(len(digest), 90) + + def test_update_after_read(self): + mac = K12.new() + mac.update(b("rrrr")) + mac.read(90) + self.assertRaises(TypeError, mac.update, b("ttt")) + + +def txt2bin(txt): + clean = txt.replace(" ", "").replace("\n", "").replace("\r", "") + return unhexlify(clean) + + +def ptn(n): + res = bytearray(n) + pattern = b"".join([bchr(x) for x in range(0, 0xFB)]) + for base in range(0, n - 0xFB, 0xFB): + res[base:base + 0xFB] = pattern + remain = n % 0xFB + if remain: + base = (n // 0xFB) * 0xFB + res[base:] = pattern[:remain] + assert(len(res) == n) + return res + + +def chunked(source, size): + for i in range(0, len(source), size): + yield source[i:i+size] + + +class KangarooTwelveTV(unittest.TestCase): + + # https://github.com/XKCP/XKCP/blob/master/tests/TestVectors/KangarooTwelve.txt + + def test_zero_1(self): + tv = """1A C2 D4 50 FC 3B 42 05 D1 9D A7 BF CA 1B 37 51 + 3C 08 03 57 7A C7 16 7F 06 FE 2C E1 F0 EF 39 E5""" + + btv = txt2bin(tv) + res = K12.new().read(32) + self.assertEqual(res, btv) + + def test_zero_2(self): + tv = """1A C2 D4 50 FC 3B 42 05 D1 9D A7 BF CA 1B 37 51 + 3C 08 03 57 7A C7 16 7F 06 FE 2C E1 F0 EF 39 E5 + 42 69 C0 56 B8 C8 2E 48 27 60 38 B6 D2 92 96 6C + C0 7A 3D 46 45 27 2E 31 FF 38 50 81 39 EB 0A 71""" + + btv = txt2bin(tv) + res = K12.new().read(64) + self.assertEqual(res, btv) + + def test_zero_3(self): + tv = """E8 DC 56 36 42 F7 22 8C 84 68 4C 89 84 05 D3 A8 + 34 79 91 58 C0 79 B1 28 80 27 7A 1D 28 E2 FF 6D""" + + btv = txt2bin(tv) + res = K12.new().read(10032) + self.assertEqual(res[-32:], btv) + + def test_ptn_1(self): + tv = """2B DA 92 45 0E 8B 14 7F 8A 7C B6 29 E7 84 A0 58 + EF CA 7C F7 D8 21 8E 02 D3 45 DF AA 65 24 4A 1F""" + + btv = txt2bin(tv) + res = K12.new(data=ptn(1)).read(32) + self.assertEqual(res, btv) + + def test_ptn_17(self): + tv = """6B F7 5F A2 23 91 98 DB 47 72 E3 64 78 F8 E1 9B + 0F 37 12 05 F6 A9 A9 3A 27 3F 51 DF 37 12 28 88""" + + btv = txt2bin(tv) + res = K12.new(data=ptn(17)).read(32) + self.assertEqual(res, btv) + + def test_ptn_17_2(self): + tv = """0C 31 5E BC DE DB F6 14 26 DE 7D CF 8F B7 25 D1 + E7 46 75 D7 F5 32 7A 50 67 F3 67 B1 08 EC B6 7C""" + + btv = txt2bin(tv) + res = K12.new(data=ptn(17**2)).read(32) + self.assertEqual(res, btv) + + def test_ptn_17_3(self): + tv = """CB 55 2E 2E C7 7D 99 10 70 1D 57 8B 45 7D DF 77 + 2C 12 E3 22 E4 EE 7F E4 17 F9 2C 75 8F 0D 59 D0""" + + btv = txt2bin(tv) + res = K12.new(data=ptn(17**3)).read(32) + self.assertEqual(res, btv) + + def test_ptn_17_4(self): + tv = """87 01 04 5E 22 20 53 45 FF 4D DA 05 55 5C BB 5C + 3A F1 A7 71 C2 B8 9B AE F3 7D B4 3D 99 98 B9 FE""" + + btv = txt2bin(tv) + data = ptn(17**4) + + # All at once + res = K12.new(data=data).read(32) + self.assertEqual(res, btv) + + # Byte by byte + k12 = K12.new() + for x in data: + k12.update(bchr(x)) + res = k12.read(32) + self.assertEqual(res, btv) + + # Chunks of various prime sizes + for chunk_size in (13, 17, 19, 23, 31): + k12 = K12.new() + for x in chunked(data, chunk_size): + k12.update(x) + res = k12.read(32) + self.assertEqual(res, btv) + + def test_ptn_17_5(self): + tv = """84 4D 61 09 33 B1 B9 96 3C BD EB 5A E3 B6 B0 5C + C7 CB D6 7C EE DF 88 3E B6 78 A0 A8 E0 37 16 82""" + + btv = txt2bin(tv) + data = ptn(17**5) + + # All at once + res = K12.new(data=data).read(32) + self.assertEqual(res, btv) + + # Chunks + k12 = K12.new() + for chunk in chunked(data, 8192): + k12.update(chunk) + res = k12.read(32) + self.assertEqual(res, btv) + + def test_ptn_17_6(self): + tv = """3C 39 07 82 A8 A4 E8 9F A6 36 7F 72 FE AA F1 32 + 55 C8 D9 58 78 48 1D 3C D8 CE 85 F5 8E 88 0A F8""" + + btv = txt2bin(tv) + data = ptn(17**6) + + # All at once + res = K12.new(data=data).read(32) + self.assertEqual(res, btv) + + def test_ptn_c_1(self): + tv = """FA B6 58 DB 63 E9 4A 24 61 88 BF 7A F6 9A 13 30 + 45 F4 6E E9 84 C5 6E 3C 33 28 CA AF 1A A1 A5 83""" + + btv = txt2bin(tv) + custom = ptn(1) + + # All at once + res = K12.new(custom=custom).read(32) + self.assertEqual(res, btv) + + def test_ptn_c_41(self): + tv = """D8 48 C5 06 8C ED 73 6F 44 62 15 9B 98 67 FD 4C + 20 B8 08 AC C3 D5 BC 48 E0 B0 6B A0 A3 76 2E C4""" + + btv = txt2bin(tv) + custom = ptn(41) + + # All at once + res = K12.new(data=b'\xFF', custom=custom).read(32) + self.assertEqual(res, btv) + + def test_ptn_c_41_2(self): + tv = """C3 89 E5 00 9A E5 71 20 85 4C 2E 8C 64 67 0A C0 + 13 58 CF 4C 1B AF 89 44 7A 72 42 34 DC 7C ED 74""" + + btv = txt2bin(tv) + custom = ptn(41**2) + + # All at once + res = K12.new(data=b'\xFF' * 3, custom=custom).read(32) + self.assertEqual(res, btv) + + def test_ptn_c_41_3(self): + tv = """75 D2 F8 6A 2E 64 45 66 72 6B 4F BC FC 56 57 B9 + DB CF 07 0C 7B 0D CA 06 45 0A B2 91 D7 44 3B CF""" + + btv = txt2bin(tv) + custom = ptn(41**3) + + # All at once + res = K12.new(data=b'\xFF' * 7, custom=custom).read(32) + self.assertEqual(res, btv) + + # https://datatracker.ietf.org/doc/draft-irtf-cfrg-kangarootwelve/ + + def test_ptn_8191(self): + tv = """1B 57 76 36 F7 23 64 3E 99 0C C7 D6 A6 59 83 74 + 36 FD 6A 10 36 26 60 0E B8 30 1C D1 DB E5 53 D6""" + + btv = txt2bin(tv) + + # All at once + res = K12.new(data=ptn(8191)).read(32) + self.assertEqual(res, btv) + + def test_ptn_8192(self): + tv = """48 F2 56 F6 77 2F 9E DF B6 A8 B6 61 EC 92 DC 93 + B9 5E BD 05 A0 8A 17 B3 9A E3 49 08 70 C9 26 C3""" + + btv = txt2bin(tv) + + # All at once + res = K12.new(data=ptn(8192)).read(32) + self.assertEqual(res, btv) + + def test_ptn_8192_8189(self): + tv = """3E D1 2F 70 FB 05 DD B5 86 89 51 0A B3 E4 D2 3C + 6C 60 33 84 9A A0 1E 1D 8C 22 0A 29 7F ED CD 0B""" + + btv = txt2bin(tv) + + # All at once + res = K12.new(data=ptn(8192), custom=ptn(8189)).read(32) + self.assertEqual(res, btv) + + def test_ptn_8192_8190(self): + tv = """6A 7C 1B 6A 5C D0 D8 C9 CA 94 3A 4A 21 6C C6 46 + 04 55 9A 2E A4 5F 78 57 0A 15 25 3D 67 BA 00 AE""" + + btv = txt2bin(tv) + + # All at once + res = K12.new(data=ptn(8192), custom=ptn(8190)).read(32) + self.assertEqual(res, btv) + + ### + + def test_1(self): + tv = "fd608f91d81904a9916e78a18f65c157a78d63f93d8f6367db0524526a5ea2bb" + + btv = txt2bin(tv) + res = K12.new(data=b'', custom=ptn(100)).read(32) + self.assertEqual(res, btv) + + def test_2(self): + tv4 = "5a4ec9a649f81916d4ce1553492962f7868abf8dd1ceb2f0cb3682ea95cda6a6" + tv3 = "441688fe4fe4ae9425eb3105eb445eb2b3a6f67b66eff8e74ebfbc49371f6d4c" + tv2 = "17269a57759af0214c84a0fd9bc851f4d95f80554cfed4e7da8a6ee1ff080131" + tv1 = "33826990c09dc712ba7224f0d9be319e2720de95a4c1afbd2211507dae1c703a" + tv0 = "9f4d3aba908ddc096e4d3a71da954f917b9752f05052b9d26d916a6fbc75bf3e" + + res = K12.new(data=b'A' * (8192 - 4), custom=b'B').read(32) + self.assertEqual(res, txt2bin(tv4)) + + res = K12.new(data=b'A' * (8192 - 3), custom=b'B').read(32) + self.assertEqual(res, txt2bin(tv3)) + + res = K12.new(data=b'A' * (8192 - 2), custom=b'B').read(32) + self.assertEqual(res, txt2bin(tv2)) + + res = K12.new(data=b'A' * (8192 - 1), custom=b'B').read(32) + self.assertEqual(res, txt2bin(tv1)) + + res = K12.new(data=b'A' * (8192 - 0), custom=b'B').read(32) + self.assertEqual(res, txt2bin(tv0)) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(KangarooTwelveTest) + tests += list_test_cases(KangarooTwelveTV) + return tests + + +if __name__ == '__main__': + def suite(): + return unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_MD2.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_MD2.py new file mode 100644 index 000000000..93751687f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_MD2.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/MD2.py: Self-test for the MD2 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.MD2""" + +from Crypto.Util.py3compat import * + +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + # Test vectors from RFC 1319 + ('8350e5a3e24c153df2275c9f80692773', '', "'' (empty string)"), + ('32ec01ec4a6dac72c0ab96fb34c0b5d1', 'a'), + ('da853b0d3f88d99b30283a69e6ded6bb', 'abc'), + ('ab4f496bfb2a530b219ff33031fe06b0', 'message digest'), + + ('4e8ddff3650292ab5a4108c3aa47940b', 'abcdefghijklmnopqrstuvwxyz', + 'a-z'), + + ('da33def2a42df13975352846c30338cd', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', + 'A-Z, a-z, 0-9'), + + ('d5976f79d83d3a0dc9806c3c66f3efd8', + '1234567890123456789012345678901234567890123456' + + '7890123456789012345678901234567890', + "'1234567890' * 8"), +] + +def get_tests(config={}): + from Crypto.Hash import MD2 + from .common import make_hash_tests + return make_hash_tests(MD2, "MD2", test_data, + digest_size=16, + oid="1.2.840.113549.2.2") + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_MD4.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_MD4.py new file mode 100644 index 000000000..17b48a778 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_MD4.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/MD4.py: Self-test for the MD4 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.MD4""" + +__revision__ = "$Id$" + +from Crypto.Util.py3compat import * + +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + # Test vectors from RFC 1320 + ('31d6cfe0d16ae931b73c59d7e0c089c0', '', "'' (empty string)"), + ('bde52cb31de33e46245e05fbdbd6fb24', 'a'), + ('a448017aaf21d8525fc10ae87aa6729d', 'abc'), + ('d9130a8164549fe818874806e1c7014b', 'message digest'), + + ('d79e1c308aa5bbcdeea8ed63df412da9', 'abcdefghijklmnopqrstuvwxyz', + 'a-z'), + + ('043f8582f241db351ce627e153e7f0e4', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', + 'A-Z, a-z, 0-9'), + + ('e33b4ddc9c38f2199c3e7b164fcc0536', + '1234567890123456789012345678901234567890123456' + + '7890123456789012345678901234567890', + "'1234567890' * 8"), +] + +def get_tests(config={}): + from Crypto.Hash import MD4 + from .common import make_hash_tests + return make_hash_tests(MD4, "MD4", test_data, + digest_size=16, + oid="1.2.840.113549.2.4") + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_MD5.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_MD5.py new file mode 100644 index 000000000..830ace731 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_MD5.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/MD5.py: Self-test for the MD5 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.MD5""" + +from Crypto.Util.py3compat import * +from Crypto.Hash import MD5 +from binascii import unhexlify +import unittest +from Crypto.SelfTest.st_common import list_test_cases + + +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + # Test vectors from RFC 1321 + ('d41d8cd98f00b204e9800998ecf8427e', '', "'' (empty string)"), + ('0cc175b9c0f1b6a831c399e269772661', 'a'), + ('900150983cd24fb0d6963f7d28e17f72', 'abc'), + ('f96b697d7cb7938d525a2f31aaf161d0', 'message digest'), + + ('c3fcd3d76192e4007dfb496cca67e13b', 'abcdefghijklmnopqrstuvwxyz', + 'a-z'), + + ('d174ab98d277d9f5a5611c2c9f419d9f', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', + 'A-Z, a-z, 0-9'), + + ('57edf4a22be3c955ac49da2e2107b67a', + '1234567890123456789012345678901234567890123456' + + '7890123456789012345678901234567890', + "'1234567890' * 8"), + + # https://www.cosic.esat.kuleuven.be/nessie/testvectors/hash/md5/Md5-128.unverified.test-vectors + ('57EDF4A22BE3C955AC49DA2E2107B67A', '1234567890' * 8, 'Set 1, vector #7'), + ('7707D6AE4E027C70EEA2A935C2296F21', 'a'*1000000, 'Set 1, vector #8'), +] + + +class Md5IterTest(unittest.TestCase): + + def runTest(self): + message = b("\x00") * 16 + result1 = "4AE71336E44BF9BF79D2752E234818A5".lower() + result2 = "1A83F51285E4D89403D00C46EF8508FE".lower() + + h = MD5.new(message) + message = h.digest() + self.assertEqual(h.hexdigest(), result1) + + for _ in range(99999): + h = MD5.new(message) + message = h.digest() + + self.assertEqual(h.hexdigest(), result2) + + +def get_tests(config={}): + from .common import make_hash_tests + + tests = make_hash_tests(MD5, "MD5", test_data, + digest_size=16, + oid="1.2.840.113549.2.5") + if config.get('slow_tests'): + tests += [ Md5IterTest() ] + return tests + + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_Poly1305.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_Poly1305.py new file mode 100644 index 000000000..0612d4eda --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_Poly1305.py @@ -0,0 +1,542 @@ +# +# SelfTest/Hash/test_Poly1305.py: Self-test for the Poly1305 module +# +# =================================================================== +# +# Copyright (c) 2018, Helder Eijs +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-test suite for Crypto.Hash._Poly1305""" + +import json +import unittest +from binascii import unhexlify, hexlify + +from .common import make_mac_tests +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Hash import Poly1305 +from Crypto.Cipher import AES, ChaCha20 + +from Crypto.Util.py3compat import tobytes +from Crypto.Util.strxor import strxor_c + +# This is a list of (r+s keypair, data, result, description, keywords) tuples. +test_data_basic = [ + ( + "85d6be7857556d337f4452fe42d506a80103808afb0db2fd4abff6af4149f51b", + hexlify(b"Cryptographic Forum Research Group").decode(), + "a8061dc1305136c6c22b8baf0c0127a9", + "RFC7539" + ), + ( + "746869732069732033322d62797465206b657920666f7220506f6c7931333035", + "0000000000000000000000000000000000000000000000000000000000000000", + "49ec78090e481ec6c26b33b91ccc0307", + "https://tools.ietf.org/html/draft-agl-tls-chacha20poly1305-00#section-7 A", + ), + ( + "746869732069732033322d62797465206b657920666f7220506f6c7931333035", + "48656c6c6f20776f726c6421", + "a6f745008f81c916a20dcc74eef2b2f0", + "https://tools.ietf.org/html/draft-agl-tls-chacha20poly1305-00#section-7 B", + ), + ( + "746869732069732033322d62797465206b657920666f7220506f6c7931333035", + "", + "6b657920666f7220506f6c7931333035", + "Generated with pure Python", + ), + ( + "746869732069732033322d62797465206b657920666f7220506f6c7931333035", + "FF", + "f7e4e0ef4c46d106219da3d1bdaeb3ff", + "Generated with pure Python", + ), + ( + "746869732069732033322d62797465206b657920666f7220506f6c7931333035", + "FF00", + "7471eceeb22988fc936da1d6e838b70e", + "Generated with pure Python", + ), + ( + "746869732069732033322d62797465206b657920666f7220506f6c7931333035", + "AA" * 17, + "32590bc07cb2afaccca3f67f122975fe", + "Generated with pure Python", + ), + ( + "00" * 32, + "00" * 64, + "00" * 16, + "RFC7539 A.3 #1", + ), + ( + "0000000000000000000000000000000036e5f6b5c5e06070f0efca96227a863e", + hexlify( + b"Any submission t" + b"o the IETF inten" + b"ded by the Contr" + b"ibutor for publi" + b"cation as all or" + b" part of an IETF" + b" Internet-Draft " + b"or RFC and any s" + b"tatement made wi" + b"thin the context" + b" of an IETF acti" + b"vity is consider" + b"ed an \"IETF Cont" + b"ribution\". Such " + b"statements inclu" + b"de oral statemen" + b"ts in IETF sessi" + b"ons, as well as " + b"written and elec" + b"tronic communica" + b"tions made at an" + b"y time or place," + b" which are addre" + b"ssed to").decode(), + "36e5f6b5c5e06070f0efca96227a863e", + "RFC7539 A.3 #2", + ), + ( + "36e5f6b5c5e06070f0efca96227a863e00000000000000000000000000000000", + hexlify( + b"Any submission t" + b"o the IETF inten" + b"ded by the Contr" + b"ibutor for publi" + b"cation as all or" + b" part of an IETF" + b" Internet-Draft " + b"or RFC and any s" + b"tatement made wi" + b"thin the context" + b" of an IETF acti" + b"vity is consider" + b"ed an \"IETF Cont" + b"ribution\". Such " + b"statements inclu" + b"de oral statemen" + b"ts in IETF sessi" + b"ons, as well as " + b"written and elec" + b"tronic communica" + b"tions made at an" + b"y time or place," + b" which are addre" + b"ssed to").decode(), + "f3477e7cd95417af89a6b8794c310cf0", + "RFC7539 A.3 #3", + ), + ( + "1c9240a5eb55d38af333888604f6b5f0473917c1402b80099dca5cbc207075c0", + "2754776173206272696c6c69672c2061" + "6e642074686520736c6974687920746f" + "7665730a446964206779726520616e64" + "2067696d626c6520696e207468652077" + "6162653a0a416c6c206d696d73792077" + "6572652074686520626f726f676f7665" + "732c0a416e6420746865206d6f6d6520" + "7261746873206f757467726162652e", + "4541669a7eaaee61e708dc7cbcc5eb62", + "RFC7539 A.3 #4", + ), + ( + "02" + "00" * 31, + "FF" * 16, + "03" + "00" * 15, + "RFC7539 A.3 #5", + ), + ( + "02" + "00" * 15 + "FF" * 16, + "02" + "00" * 15, + "03" + "00" * 15, + "RFC7539 A.3 #6", + ), + ( + "01" + "00" * 31, + "FF" * 16 + "F0" + "FF" * 15 + "11" + "00" * 15, + "05" + "00" * 15, + "RFC7539 A.3 #7", + ), + ( + "01" + "00" * 31, + "FF" * 16 + "FB" + "FE" * 15 + "01" * 16, + "00" * 16, + "RFC7539 A.3 #8", + ), + ( + "02" + "00" * 31, + "FD" + "FF" * 15, + "FA" + "FF" * 15, + "RFC7539 A.3 #9", + ), + ( + "01 00 00 00 00 00 00 00 04 00 00 00 00 00 00 00" + "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00", + "E3 35 94 D7 50 5E 43 B9 00 00 00 00 00 00 00 00" + "33 94 D7 50 5E 43 79 CD 01 00 00 00 00 00 00 00" + "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00" + "01 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00", + "14 00 00 00 00 00 00 00 55 00 00 00 00 00 00 00", + "RFC7539 A.3 #10", + ), + ( + "01 00 00 00 00 00 00 00 04 00 00 00 00 00 00 00" + "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00", + "E3 35 94 D7 50 5E 43 B9 00 00 00 00 00 00 00 00" + "33 94 D7 50 5E 43 79 CD 01 00 00 00 00 00 00 00" + "00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00", + "13" + "00" * 15, + "RFC7539 A.3 #11", + ), +] + +# This is a list of (key(k+r), data, result, description, keywords) tuples. +test_data_aes = [ + ( + "ec074c835580741701425b623235add6851fc40c3467ac0be05cc20404f3f700", + "f3f6", + "f4c633c3044fc145f84f335cb81953de", + "http://cr.yp.to/mac/poly1305-20050329.pdf", + { 'cipher':AES, 'nonce':unhexlify("fb447350c4e868c52ac3275cf9d4327e") } + ), + ( + "75deaa25c09f208e1dc4ce6b5cad3fbfa0f3080000f46400d0c7e9076c834403", + "", + "dd3fab2251f11ac759f0887129cc2ee7", + "http://cr.yp.to/mac/poly1305-20050329.pdf", + { 'cipher':AES, 'nonce':unhexlify("61ee09218d29b0aaed7e154a2c5509cc") } + ), + ( + "6acb5f61a7176dd320c5c1eb2edcdc7448443d0bb0d21109c89a100b5ce2c208", + "663cea190ffb83d89593f3f476b6bc24" + "d7e679107ea26adb8caf6652d0656136", + "0ee1c16bb73f0f4fd19881753c01cdbe", + "http://cr.yp.to/mac/poly1305-20050329.pdf", + { 'cipher':AES, 'nonce':unhexlify("ae212a55399729595dea458bc621ff0e") } + ), + ( + "e1a5668a4d5b66a5f68cc5424ed5982d12976a08c4426d0ce8a82407c4f48207", + "ab0812724a7f1e342742cbed374d94d1" + "36c6b8795d45b3819830f2c04491faf0" + "990c62e48b8018b2c3e4a0fa3134cb67" + "fa83e158c994d961c4cb21095c1bf9", + "5154ad0d2cb26e01274fc51148491f1b", + "http://cr.yp.to/mac/poly1305-20050329.pdf", + { 'cipher':AES, 'nonce':unhexlify("9ae831e743978d3a23527c7128149e3a") } + ), +] + +test_data_chacha20 = [ + ( + "00" * 32, + "FF" * 15, + "13cc5bbadc36b03a5163928f0bcb65aa", + "RFC7539 A.4 #1", + { 'cipher':ChaCha20, 'nonce':unhexlify("00" * 12) } + ), + ( + "00" * 31 + "01", + "FF" * 15, + "0baf33c1d6df211bdd50a6767e98e00a", + "RFC7539 A.4 #2", + { 'cipher':ChaCha20, 'nonce':unhexlify("00" * 11 + "02") } + ), + ( + "1c 92 40 a5 eb 55 d3 8a f3 33 88 86 04 f6 b5 f0" + "47 39 17 c1 40 2b 80 09 9d ca 5c bc 20 70 75 c0", + "FF" * 15, + "e8b4c6db226cd8939e65e02eebf834ce", + "RFC7539 A.4 #3", + { 'cipher':ChaCha20, 'nonce':unhexlify("00" * 11 + "02") } + ), + ( + "1c 92 40 a5 eb 55 d3 8a f3 33 88 86 04 f6 b5 f0" + "47 39 17 c1 40 2b 80 09 9d ca 5c bc 20 70 75 c0", + "f3 33 88 86 00 00 00 00 00 00 4e 91 00 00 00 00" + "64 a0 86 15 75 86 1a f4 60 f0 62 c7 9b e6 43 bd" + "5e 80 5c fd 34 5c f3 89 f1 08 67 0a c7 6c 8c b2" + "4c 6c fc 18 75 5d 43 ee a0 9e e9 4e 38 2d 26 b0" + "bd b7 b7 3c 32 1b 01 00 d4 f0 3b 7f 35 58 94 cf" + "33 2f 83 0e 71 0b 97 ce 98 c8 a8 4a bd 0b 94 81" + "14 ad 17 6e 00 8d 33 bd 60 f9 82 b1 ff 37 c8 55" + "97 97 a0 6e f4 f0 ef 61 c1 86 32 4e 2b 35 06 38" + "36 06 90 7b 6a 7c 02 b0 f9 f6 15 7b 53 c8 67 e4" + "b9 16 6c 76 7b 80 4d 46 a5 9b 52 16 cd e7 a4 e9" + "90 40 c5 a4 04 33 22 5e e2 82 a1 b0 a0 6c 52 3e" + "af 45 34 d7 f8 3f a1 15 5b 00 47 71 8c bc 54 6a" + "0d 07 2b 04 b3 56 4e ea 1b 42 22 73 f5 48 27 1a" + "0b b2 31 60 53 fa 76 99 19 55 eb d6 31 59 43 4e" + "ce bb 4e 46 6d ae 5a 10 73 a6 72 76 27 09 7a 10" + "49 e6 17 d9 1d 36 10 94 fa 68 f0 ff 77 98 71 30" + "30 5b ea ba 2e da 04 df 99 7b 71 4d 6c 6f 2c 29" + "a6 ad 5c b4 02 2b 02 70 9b 00 00 00 00 00 00 00" + "0c 00 00 00 00 00 00 00 09 01 00 00 00 00 00 00", + "ee ad 9d 67 89 0c bb 22 39 23 36 fe a1 85 1f 38", + "RFC7539 A.5", + { 'cipher':ChaCha20, 'nonce':unhexlify("000000000102030405060708") } + ), +] + + +class Poly1305Test_AES(unittest.TestCase): + + key = b'\x11' * 32 + + def test_new_positive(self): + + data = b'r' * 100 + + h1 = Poly1305.new(key=self.key, cipher=AES) + self.assertEqual(h1.digest_size, 16) + self.assertEqual(len(h1.nonce), 16) + d1 = h1.update(data).digest() + self.assertEqual(len(d1), 16) + + h2 = Poly1305.new(key=self.key, nonce=h1.nonce, data=data, cipher=AES) + d2 = h2.digest() + self.assertEqual(h1.nonce, h2.nonce) + self.assertEqual(d1, d2) + + def test_new_negative(self): + from Crypto.Cipher import DES3 + + self.assertRaises(ValueError, Poly1305.new, key=self.key[:31], cipher=AES) + self.assertRaises(ValueError, Poly1305.new, key=self.key, cipher=DES3) + self.assertRaises(ValueError, Poly1305.new, key=self.key, nonce=b'1' * 15, cipher=AES) + self.assertRaises(TypeError, Poly1305.new, key=u"2" * 32, cipher=AES) + self.assertRaises(TypeError, Poly1305.new, key=self.key, data=u"2" * 100, cipher=AES) + + def test_update(self): + pieces = [b"\x0A" * 200, b"\x14" * 300] + h1 = Poly1305.new(key=self.key, cipher=AES) + h1.update(pieces[0]).update(pieces[1]) + d1 = h1.digest() + + h2 = Poly1305.new(key=self.key, cipher=AES, nonce=h1.nonce) + h2.update(pieces[0] + pieces[1]) + d2 = h2.digest() + self.assertEqual(d1, d2) + + def test_update_negative(self): + h = Poly1305.new(key=self.key, cipher=AES) + self.assertRaises(TypeError, h.update, u"string") + + def test_digest(self): + h = Poly1305.new(key=self.key, cipher=AES) + digest = h.digest() + + # hexdigest does not change the state + self.assertEqual(h.digest(), digest) + # digest returns a byte string + self.assertTrue(isinstance(digest, type(b"digest"))) + + def test_update_after_digest(self): + msg=b"rrrrttt" + + # Normally, update() cannot be done after digest() + h = Poly1305.new(key=self.key, data=msg[:4], cipher=AES) + h.digest() + self.assertRaises(TypeError, h.update, msg[4:]) + + def test_hex_digest(self): + mac = Poly1305.new(key=self.key, cipher=AES) + digest = mac.digest() + hexdigest = mac.hexdigest() + + # hexdigest is equivalent to digest + self.assertEqual(hexlify(digest), tobytes(hexdigest)) + # hexdigest does not change the state + self.assertEqual(mac.hexdigest(), hexdigest) + # hexdigest returns a string + self.assertTrue(isinstance(hexdigest, type("digest"))) + + def test_verify(self): + h = Poly1305.new(key=self.key, cipher=AES) + mac = h.digest() + h.verify(mac) + wrong_mac = strxor_c(mac, 255) + self.assertRaises(ValueError, h.verify, wrong_mac) + + def test_hexverify(self): + h = Poly1305.new(key=self.key, cipher=AES) + mac = h.hexdigest() + h.hexverify(mac) + self.assertRaises(ValueError, h.hexverify, "4556") + + def test_bytearray(self): + + data = b"\x00\x01\x02" + h0 = Poly1305.new(key=self.key, data=data, cipher=AES) + d_ref = h0.digest() + + # Data and key can be a bytearray (during initialization) + key_ba = bytearray(self.key) + data_ba = bytearray(data) + + h1 = Poly1305.new(key=self.key, data=data, cipher=AES, nonce=h0.nonce) + h2 = Poly1305.new(key=key_ba, data=data_ba, cipher=AES, nonce=h0.nonce) + key_ba[:1] = b'\xFF' + data_ba[:1] = b'\xEE' + + self.assertEqual(h1.digest(), d_ref) + self.assertEqual(h2.digest(), d_ref) + + # Data can be a bytearray (during operation) + data_ba = bytearray(data) + + h1 = Poly1305.new(key=self.key, cipher=AES) + h2 = Poly1305.new(key=self.key, cipher=AES, nonce=h1.nonce) + h1.update(data) + h2.update(data_ba) + data_ba[:1] = b'\xFF' + + self.assertEqual(h1.digest(), h2.digest()) + + def test_memoryview(self): + + data = b"\x00\x01\x02" + + def get_mv_ro(data): + return memoryview(data) + + def get_mv_rw(data): + return memoryview(bytearray(data)) + + for get_mv in (get_mv_ro, get_mv_rw): + + # Data and key can be a memoryview (during initialization) + key_mv = get_mv(self.key) + data_mv = get_mv(data) + + h1 = Poly1305.new(key=self.key, data=data, cipher=AES) + h2 = Poly1305.new(key=key_mv, data=data_mv, cipher=AES, + nonce=h1.nonce) + if not data_mv.readonly: + data_mv[:1] = b'\xFF' + key_mv[:1] = b'\xFF' + + self.assertEqual(h1.digest(), h2.digest()) + + # Data can be a memoryview (during operation) + data_mv = get_mv(data) + + h1 = Poly1305.new(key=self.key, cipher=AES) + h2 = Poly1305.new(key=self.key, cipher=AES, nonce=h1.nonce) + h1.update(data) + h2.update(data_mv) + if not data_mv.readonly: + data_mv[:1] = b'\xFF' + + self.assertEqual(h1.digest(), h2.digest()) + + +class Poly1305Test_ChaCha20(unittest.TestCase): + + key = b'\x11' * 32 + + def test_new_positive(self): + data = b'r' * 100 + + h1 = Poly1305.new(key=self.key, cipher=ChaCha20) + self.assertEqual(h1.digest_size, 16) + self.assertEqual(len(h1.nonce), 12) + + h2 = Poly1305.new(key=self.key, cipher=ChaCha20, nonce = b'8' * 8) + self.assertEqual(len(h2.nonce), 8) + self.assertEqual(h2.nonce, b'8' * 8) + + def test_new_negative(self): + + self.assertRaises(ValueError, Poly1305.new, key=self.key, nonce=b'1' * 7, cipher=ChaCha20) + + +# +# make_mac_tests() expect a new() function with signature new(key, data, +# **kwargs), and we need to adapt Poly1305's, as it only uses keywords +# +class Poly1305_New(object): + + @staticmethod + def new(key, *data, **kwds): + _kwds = dict(kwds) + if len(data) == 1: + _kwds['data'] = data[0] + _kwds['key'] = key + return Poly1305.new(**_kwds) + + +class Poly1305_Basic(object): + + @staticmethod + def new(key, *data, **kwds): + from Crypto.Hash.Poly1305 import Poly1305_MAC + + if len(data) == 1: + msg = data[0] + else: + msg = None + + return Poly1305_MAC(key[:16], key[16:], msg) + + +class Poly1305AES_MC(unittest.TestCase): + + def runTest(self): + tag = unhexlify(b"fb447350c4e868c52ac3275cf9d4327e") + + msg = b'' + for msg_len in range(5000 + 1): + key = tag + strxor_c(tag, 0xFF) + nonce = tag[::-1] + if msg_len > 0: + msg = msg + tobytes(tag[0]) + auth = Poly1305.new(key=key, nonce=nonce, cipher=AES, data=msg) + tag = auth.digest() + + # Compare against output of original DJB's poly1305aes-20050218 + self.assertEqual("CDFA436DDD629C7DC20E1128530BAED2", auth.hexdigest().upper()) + + +def get_tests(config={}): + tests = make_mac_tests(Poly1305_Basic, "Poly1305", test_data_basic) + tests += make_mac_tests(Poly1305_New, "Poly1305", test_data_aes) + tests += make_mac_tests(Poly1305_New, "Poly1305", test_data_chacha20) + tests += [ Poly1305AES_MC() ] + tests += list_test_cases(Poly1305Test_AES) + tests += list_test_cases(Poly1305Test_ChaCha20) + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_RIPEMD160.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_RIPEMD160.py new file mode 100644 index 000000000..153c5700f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_RIPEMD160.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_RIPEMD160.py: Self-test for the RIPEMD-160 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +#"""Self-test suite for Crypto.Hash.RIPEMD160""" + +from Crypto.Util.py3compat import * + +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + # Test vectors downloaded 2008-09-12 from + # http://homes.esat.kuleuven.be/~bosselae/ripemd160.html + ('9c1185a5c5e9fc54612808977ee8f548b2258d31', '', "'' (empty string)"), + ('0bdc9d2d256b3ee9daae347be6f4dc835a467ffe', 'a'), + ('8eb208f7e05d987a9b044a8e98c6b087f15a0bfc', 'abc'), + ('5d0689ef49d2fae572b881b123a85ffa21595f36', 'message digest'), + + ('f71c27109c692c1b56bbdceb5b9d2865b3708dbc', + 'abcdefghijklmnopqrstuvwxyz', + 'a-z'), + + ('12a053384a9c0c88e405a06c27dcf49ada62eb2b', + 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq', + 'abcdbcd...pnopq'), + + ('b0e20b6e3116640286ed3a87a5713079b21f5189', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', + 'A-Z, a-z, 0-9'), + + ('9b752e45573d4b39f4dbd3323cab82bf63326bfb', + '1234567890' * 8, + "'1234567890' * 8"), + + ('52783243c1697bdbe16d37f97f68f08325dc1528', + 'a' * 10**6, + '"a" * 10**6'), +] + +def get_tests(config={}): + from Crypto.Hash import RIPEMD160 + from .common import make_hash_tests + return make_hash_tests(RIPEMD160, "RIPEMD160", test_data, + digest_size=20, + oid="1.3.36.3.2.1") + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA1.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA1.py new file mode 100644 index 000000000..a883a44b5 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA1.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/SHA1.py: Self-test for the SHA-1 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA""" + +from binascii import hexlify + +from Crypto.SelfTest.loader import load_test_vectors + +# Test vectors from various sources +# This is a list of (expected_result, input[, description]) tuples. +test_data_various = [ + # FIPS PUB 180-2, A.1 - "One-Block Message" + ('a9993e364706816aba3e25717850c26c9cd0d89d', 'abc'), + + # FIPS PUB 180-2, A.2 - "Multi-Block Message" + ('84983e441c3bd26ebaae4aa1f95129e5e54670f1', + 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'), + + # FIPS PUB 180-2, A.3 - "Long Message" +# ('34aa973cd4c4daa4f61eeb2bdbad27316534016f', +# 'a' * 10**6, +# '"a" * 10**6'), + + # RFC 3174: Section 7.3, "TEST4" (multiple of 512 bits) + ('dea356a2cddd90c7a7ecedc5ebb563934f460452', + '01234567' * 80, + '"01234567" * 80'), +] + +def get_tests(config={}): + from Crypto.Hash import SHA1 + from .common import make_hash_tests + + tests = [] + + test_vectors = load_test_vectors(("Hash", "SHA1"), + "SHA1ShortMsg.rsp", + "KAT SHA-1", + { "len" : lambda x: int(x) } ) or [] + + test_data = test_data_various[:] + for tv in test_vectors: + try: + if tv.startswith('['): + continue + except AttributeError: + pass + if tv.len == 0: + tv.msg = b"" + test_data.append((hexlify(tv.md), tv.msg, tv.desc)) + + tests = make_hash_tests(SHA1, "SHA1", test_data, + digest_size=20, + oid="1.3.14.3.2.26") + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA224.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA224.py new file mode 100644 index 000000000..cf81ad980 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA224.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_SHA224.py: Self-test for the SHA-224 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA224""" + +# Test vectors from various sources +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + + # RFC 3874: Section 3.1, "Test Vector #1 + ('23097d223405d8228642a477bda255b32aadbce4bda0b3f7e36c9da7', 'abc'), + + # RFC 3874: Section 3.2, "Test Vector #2 + ('75388b16512776cc5dba5da1fd890150b0c6455cb4f58b1952522525', 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'), + + # RFC 3874: Section 3.3, "Test Vector #3 + ('20794655980c91d8bbb4c1ea97618a4bf03f42581948b2ee4ee7ad67', 'a' * 10**6, "'a' * 10**6"), + + # Examples from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm + ('d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f', ''), + + ('49b08defa65e644cbf8a2dd9270bdededabc741997d1dadd42026d7b', + 'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'), + + ('58911e7fccf2971a7d07f93162d8bd13568e71aa8fc86fc1fe9043d1', + 'Frank jagt im komplett verwahrlosten Taxi quer durch Bayern'), + +] + +def get_tests(config={}): + from Crypto.Hash import SHA224 + from .common import make_hash_tests + return make_hash_tests(SHA224, "SHA224", test_data, + digest_size=28, + oid='2.16.840.1.101.3.4.2.4') + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA256.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA256.py new file mode 100644 index 000000000..bb99326db --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA256.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_SHA256.py: Self-test for the SHA-256 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA256""" + +import unittest +from Crypto.Util.py3compat import * + +class LargeSHA256Test(unittest.TestCase): + def runTest(self): + """SHA256: 512/520 MiB test""" + from Crypto.Hash import SHA256 + zeros = bchr(0x00) * (1024*1024) + + h = SHA256.new(zeros) + for i in range(511): + h.update(zeros) + + # This test vector is from PyCrypto's old testdata.py file. + self.assertEqual('9acca8e8c22201155389f65abbf6bc9723edc7384ead80503839f49dcc56d767', h.hexdigest()) # 512 MiB + + for i in range(8): + h.update(zeros) + + # This test vector is from PyCrypto's old testdata.py file. + self.assertEqual('abf51ad954b246009dfe5a50ecd582fd5b8f1b8b27f30393853c3ef721e7fa6e', h.hexdigest()) # 520 MiB + +def get_tests(config={}): + # Test vectors from FIPS PUB 180-2 + # This is a list of (expected_result, input[, description]) tuples. + test_data = [ + # FIPS PUB 180-2, B.1 - "One-Block Message" + ('ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad', + 'abc'), + + # FIPS PUB 180-2, B.2 - "Multi-Block Message" + ('248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1', + 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'), + + # FIPS PUB 180-2, B.3 - "Long Message" + ('cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0', + 'a' * 10**6, + '"a" * 10**6'), + + # Test for an old PyCrypto bug. + ('f7fd017a3c721ce7ff03f3552c0813adcc48b7f33f07e5e2ba71e23ea393d103', + 'This message is precisely 55 bytes long, to test a bug.', + 'Length = 55 (mod 64)'), + + # Example from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm + ('e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', ''), + + ('d32b568cd1b96d459e7291ebf4b25d007f275c9f13149beeb782fac0716613f8', + 'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'), + ] + + from Crypto.Hash import SHA256 + from .common import make_hash_tests + tests = make_hash_tests(SHA256, "SHA256", test_data, + digest_size=32, + oid="2.16.840.1.101.3.4.2.1") + + if config.get('slow_tests'): + tests += [LargeSHA256Test()] + + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA384.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA384.py new file mode 100644 index 000000000..c682eb439 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA384.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_SHA.py: Self-test for the SHA-384 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA384""" + +# Test vectors from various sources +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + + # RFC 4634: Section Page 8.4, "Test 1" + ('cb00753f45a35e8bb5a03d699ac65007272c32ab0eded1631a8b605a43ff5bed8086072ba1e7cc2358baeca134c825a7', 'abc'), + + # RFC 4634: Section Page 8.4, "Test 2.2" + ('09330c33f71147e83d192fc782cd1b4753111b173b3b05d22fa08086e3b0f712fcc7c71a557e2db966c3e9fa91746039', 'abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu'), + + # RFC 4634: Section Page 8.4, "Test 3" + ('9d0e1809716474cb086e834e310a4a1ced149e9c00f248527972cec5704c2a5b07b8b3dc38ecc4ebae97ddd87f3d8985', 'a' * 10**6, "'a' * 10**6"), + + # Taken from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm + ('38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b', ''), + + # Example from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm + ('71e8383a4cea32d6fd6877495db2ee353542f46fa44bc23100bca48f3366b84e809f0708e81041f427c6d5219a286677', + 'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'), + +] + +def get_tests(config={}): + from Crypto.Hash import SHA384 + from .common import make_hash_tests + return make_hash_tests(SHA384, "SHA384", test_data, + digest_size=48, + oid='2.16.840.1.101.3.4.2.2') + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA3_224.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA3_224.py new file mode 100644 index 000000000..f92147a73 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA3_224.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_SHA3_224.py: Self-test for the SHA-3/224 hash function +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA3_224""" + +import unittest +from binascii import hexlify + +from Crypto.SelfTest.loader import load_test_vectors +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Hash import SHA3_224 as SHA3 +from Crypto.Util.py3compat import b + + +class APITest(unittest.TestCase): + + def test_update_after_digest(self): + msg=b("rrrrttt") + + # Normally, update() cannot be done after digest() + h = SHA3.new(data=msg[:4]) + dig1 = h.digest() + self.assertRaises(TypeError, h.update, msg[4:]) + dig2 = SHA3.new(data=msg).digest() + + # With the proper flag, it is allowed + h = SHA3.new(data=msg[:4], update_after_digest=True) + self.assertEqual(h.digest(), dig1) + # ... and the subsequent digest applies to the entire message + # up to that point + h.update(msg[4:]) + self.assertEqual(h.digest(), dig2) + + +def get_tests(config={}): + from .common import make_hash_tests + + tests = [] + + test_vectors = load_test_vectors(("Hash", "SHA3"), + "ShortMsgKAT_SHA3-224.txt", + "KAT SHA-3 224", + { "len" : lambda x: int(x) } ) or [] + + test_data = [] + for tv in test_vectors: + if tv.len == 0: + tv.msg = b("") + test_data.append((hexlify(tv.md), tv.msg, tv.desc)) + + tests += make_hash_tests(SHA3, "SHA3_224", test_data, + digest_size=SHA3.digest_size, + oid="2.16.840.1.101.3.4.2.7") + tests += list_test_cases(APITest) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA3_256.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA3_256.py new file mode 100644 index 000000000..432c9321b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA3_256.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_SHA3_256.py: Self-test for the SHA-3/256 hash function +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA3_256""" + +import unittest +from binascii import hexlify + +from Crypto.SelfTest.loader import load_test_vectors +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Hash import SHA3_256 as SHA3 +from Crypto.Util.py3compat import b + + +class APITest(unittest.TestCase): + + def test_update_after_digest(self): + msg=b("rrrrttt") + + # Normally, update() cannot be done after digest() + h = SHA3.new(data=msg[:4]) + dig1 = h.digest() + self.assertRaises(TypeError, h.update, msg[4:]) + dig2 = SHA3.new(data=msg).digest() + + # With the proper flag, it is allowed + h = SHA3.new(data=msg[:4], update_after_digest=True) + self.assertEqual(h.digest(), dig1) + # ... and the subsequent digest applies to the entire message + # up to that point + h.update(msg[4:]) + self.assertEqual(h.digest(), dig2) + + +def get_tests(config={}): + from .common import make_hash_tests + + tests = [] + + test_vectors = load_test_vectors(("Hash", "SHA3"), + "ShortMsgKAT_SHA3-256.txt", + "KAT SHA-3 256", + { "len" : lambda x: int(x) } ) or [] + + test_data = [] + for tv in test_vectors: + if tv.len == 0: + tv.msg = b("") + test_data.append((hexlify(tv.md), tv.msg, tv.desc)) + + + tests += make_hash_tests(SHA3, "SHA3_256", test_data, + digest_size=SHA3.digest_size, + oid="2.16.840.1.101.3.4.2.8") + tests += list_test_cases(APITest) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA3_384.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA3_384.py new file mode 100644 index 000000000..b0ba1bfee --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA3_384.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_SHA3_384.py: Self-test for the SHA-3/384 hash function +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA3_384""" + +import unittest +from binascii import hexlify + +from Crypto.SelfTest.loader import load_test_vectors +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Hash import SHA3_384 as SHA3 +from Crypto.Util.py3compat import b + + +class APITest(unittest.TestCase): + + def test_update_after_digest(self): + msg=b("rrrrttt") + + # Normally, update() cannot be done after digest() + h = SHA3.new(data=msg[:4]) + dig1 = h.digest() + self.assertRaises(TypeError, h.update, msg[4:]) + dig2 = SHA3.new(data=msg).digest() + + # With the proper flag, it is allowed + h = SHA3.new(data=msg[:4], update_after_digest=True) + self.assertEqual(h.digest(), dig1) + # ... and the subsequent digest applies to the entire message + # up to that point + h.update(msg[4:]) + self.assertEqual(h.digest(), dig2) + + +def get_tests(config={}): + from .common import make_hash_tests + + tests = [] + + test_vectors = load_test_vectors(("Hash", "SHA3"), + "ShortMsgKAT_SHA3-384.txt", + "KAT SHA-3 384", + { "len" : lambda x: int(x) } ) or [] + + test_data = [] + for tv in test_vectors: + if tv.len == 0: + tv.msg = b("") + test_data.append((hexlify(tv.md), tv.msg, tv.desc)) + + tests += make_hash_tests(SHA3, "SHA3_384", test_data, + digest_size=SHA3.digest_size, + oid="2.16.840.1.101.3.4.2.9") + tests += list_test_cases(APITest) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA3_512.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA3_512.py new file mode 100644 index 000000000..7d1007a62 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA3_512.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_SHA3_512.py: Self-test for the SHA-3/512 hash function +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA3_512""" + +import unittest +from binascii import hexlify + +from Crypto.SelfTest.loader import load_test_vectors +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Hash import SHA3_512 as SHA3 +from Crypto.Util.py3compat import b + + +class APITest(unittest.TestCase): + + def test_update_after_digest(self): + msg=b("rrrrttt") + + # Normally, update() cannot be done after digest() + h = SHA3.new(data=msg[:4]) + dig1 = h.digest() + self.assertRaises(TypeError, h.update, msg[4:]) + dig2 = SHA3.new(data=msg).digest() + + # With the proper flag, it is allowed + h = SHA3.new(data=msg[:4], update_after_digest=True) + self.assertEqual(h.digest(), dig1) + # ... and the subsequent digest applies to the entire message + # up to that point + h.update(msg[4:]) + self.assertEqual(h.digest(), dig2) + + +def get_tests(config={}): + from .common import make_hash_tests + + tests = [] + + test_vectors = load_test_vectors(("Hash", "SHA3"), + "ShortMsgKAT_SHA3-512.txt", + "KAT SHA-3 512", + { "len" : lambda x: int(x) } ) or [] + + test_data = [] + for tv in test_vectors: + if tv.len == 0: + tv.msg = b("") + test_data.append((hexlify(tv.md), tv.msg, tv.desc)) + + tests += make_hash_tests(SHA3, "SHA3_512", test_data, + digest_size=SHA3.digest_size, + oid="2.16.840.1.101.3.4.2.10") + tests += list_test_cases(APITest) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA512.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA512.py new file mode 100644 index 000000000..20961aca9 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHA512.py @@ -0,0 +1,140 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_SHA512.py: Self-test for the SHA-512 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA512""" + +from binascii import hexlify + +from Crypto.Hash import SHA512 +from .common import make_hash_tests +from Crypto.SelfTest.loader import load_test_vectors + +# Test vectors from various sources +# This is a list of (expected_result, input[, description]) tuples. +test_data_512_other = [ + + # RFC 4634: Section Page 8.4, "Test 1" + ('ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f', 'abc'), + + # RFC 4634: Section Page 8.4, "Test 2.1" + ('8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909', 'abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu'), + + # RFC 4634: Section Page 8.4, "Test 3" + ('e718483d0ce769644e2e42c7bc15b4638e1f98b13b2044285632a803afa973ebde0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b', 'a' * 10**6, "'a' * 10**6"), + + # Taken from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm + ('cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e', ''), + + ('af9ed2de700433b803240a552b41b5a472a6ef3fe1431a722b2063c75e9f07451f67a28e37d09cde769424c96aea6f8971389db9e1993d6c565c3c71b855723c', 'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'), +] + + +def get_tests_SHA512(): + + test_vectors = load_test_vectors(("Hash", "SHA2"), + "SHA512ShortMsg.rsp", + "KAT SHA-512", + {"len": lambda x: int(x)}) or [] + + test_data = test_data_512_other[:] + for tv in test_vectors: + try: + if tv.startswith('['): + continue + except AttributeError: + pass + if tv.len == 0: + tv.msg = b"" + test_data.append((hexlify(tv.md), tv.msg, tv.desc)) + + tests = make_hash_tests(SHA512, "SHA512", test_data, + digest_size=64, + oid="2.16.840.1.101.3.4.2.3") + return tests + + +def get_tests_SHA512_224(): + + test_vectors = load_test_vectors(("Hash", "SHA2"), + "SHA512_224ShortMsg.rsp", + "KAT SHA-512/224", + {"len": lambda x: int(x)}) or [] + + test_data = [] + for tv in test_vectors: + try: + if tv.startswith('['): + continue + except AttributeError: + pass + if tv.len == 0: + tv.msg = b"" + test_data.append((hexlify(tv.md), tv.msg, tv.desc)) + + tests = make_hash_tests(SHA512, "SHA512/224", test_data, + digest_size=28, + oid="2.16.840.1.101.3.4.2.5", + extra_params={ "truncate" : "224" }) + return tests + + +def get_tests_SHA512_256(): + + test_vectors = load_test_vectors(("Hash", "SHA2"), + "SHA512_256ShortMsg.rsp", + "KAT SHA-512/256", + {"len": lambda x: int(x)}) or [] + + test_data = [] + for tv in test_vectors: + try: + if tv.startswith('['): + continue + except AttributeError: + pass + if tv.len == 0: + tv.msg = b"" + test_data.append((hexlify(tv.md), tv.msg, tv.desc)) + + tests = make_hash_tests(SHA512, "SHA512/256", test_data, + digest_size=32, + oid="2.16.840.1.101.3.4.2.6", + extra_params={ "truncate" : "256" }) + return tests + + +def get_tests(config={}): + + tests = [] + tests += get_tests_SHA512() + tests += get_tests_SHA512_224() + tests += get_tests_SHA512_256() + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHAKE.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHAKE.py new file mode 100644 index 000000000..29bd34ede --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_SHAKE.py @@ -0,0 +1,143 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHAKE128 and SHAKE256""" + +import unittest +from binascii import hexlify, unhexlify + +from Crypto.SelfTest.loader import load_test_vectors +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Hash import SHAKE128, SHAKE256 +from Crypto.Util.py3compat import b, bchr, bord, tobytes + +class SHAKETest(unittest.TestCase): + + def test_new_positive(self): + + xof1 = self.shake.new() + xof2 = self.shake.new(data=b("90")) + xof3 = self.shake.new().update(b("90")) + + self.assertNotEqual(xof1.read(10), xof2.read(10)) + xof3.read(10) + self.assertEqual(xof2.read(10), xof3.read(10)) + + def test_update(self): + pieces = [bchr(10) * 200, bchr(20) * 300] + h = self.shake.new() + h.update(pieces[0]).update(pieces[1]) + digest = h.read(10) + h = self.shake.new() + h.update(pieces[0] + pieces[1]) + self.assertEqual(h.read(10), digest) + + def test_update_negative(self): + h = self.shake.new() + self.assertRaises(TypeError, h.update, u"string") + + def test_digest(self): + h = self.shake.new() + digest = h.read(90) + + # read returns a byte string of the right length + self.assertTrue(isinstance(digest, type(b("digest")))) + self.assertEqual(len(digest), 90) + + def test_update_after_read(self): + mac = self.shake.new() + mac.update(b("rrrr")) + mac.read(90) + self.assertRaises(TypeError, mac.update, b("ttt")) + + +class SHAKE128Test(SHAKETest): + shake = SHAKE128 + + +class SHAKE256Test(SHAKETest): + shake = SHAKE256 + + +class SHAKEVectors(unittest.TestCase): + pass + + +test_vectors_128 = load_test_vectors(("Hash", "SHA3"), + "ShortMsgKAT_SHAKE128.txt", + "Short Messages KAT SHAKE128", + { "len" : lambda x: int(x) } ) or [] + +for idx, tv in enumerate(test_vectors_128): + if tv.len == 0: + data = b("") + else: + data = tobytes(tv.msg) + + def new_test(self, data=data, result=tv.md): + hobj = SHAKE128.new(data=data) + digest = hobj.read(len(result)) + self.assertEqual(digest, result) + + setattr(SHAKEVectors, "test_128_%d" % idx, new_test) + + +test_vectors_256 = load_test_vectors(("Hash", "SHA3"), + "ShortMsgKAT_SHAKE256.txt", + "Short Messages KAT SHAKE256", + { "len" : lambda x: int(x) } ) or [] + +for idx, tv in enumerate(test_vectors_256): + if tv.len == 0: + data = b("") + else: + data = tobytes(tv.msg) + + def new_test(self, data=data, result=tv.md): + hobj = SHAKE256.new(data=data) + digest = hobj.read(len(result)) + self.assertEqual(digest, result) + + setattr(SHAKEVectors, "test_256_%d" % idx, new_test) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(SHAKE128Test) + tests += list_test_cases(SHAKE256Test) + tests += list_test_cases(SHAKEVectors) + return tests + + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_TupleHash.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_TupleHash.py new file mode 100644 index 000000000..f3fc3d2f9 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_TupleHash.py @@ -0,0 +1,302 @@ +import unittest +from binascii import unhexlify, hexlify + +from Crypto.Util.py3compat import tobytes +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Hash import TupleHash128, TupleHash256 + + +class TupleHashTest(unittest.TestCase): + + def new(self, *args, **kwargs): + return self.TupleHash.new(*args, **kwargs) + + def test_new_positive(self): + + h = self.new() + for new_func in self.TupleHash.new, h.new: + + for dbits in range(64, 1024 + 1, 8): + hobj = new_func(digest_bits=dbits) + self.assertEqual(hobj.digest_size * 8, dbits) + + for dbytes in range(8, 128 + 1): + hobj = new_func(digest_bytes=dbytes) + self.assertEqual(hobj.digest_size, dbytes) + + hobj = h.new() + self.assertEqual(hobj.digest_size, self.default_bytes) + + def test_new_negative(self): + + h = self.new() + for new_func in self.TupleHash.new, h.new: + self.assertRaises(TypeError, new_func, + digest_bytes=self.minimum_bytes, + digest_bits=self.minimum_bits) + self.assertRaises(ValueError, new_func, digest_bytes=0) + self.assertRaises(ValueError, new_func, + digest_bits=self.minimum_bits + 7) + self.assertRaises(ValueError, new_func, + digest_bits=self.minimum_bits - 8) + self.assertRaises(ValueError, new_func, + digest_bits=self.minimum_bytes - 1) + + def test_default_digest_size(self): + digest = self.new().digest() + self.assertEqual(len(digest), self.default_bytes) + + def test_update(self): + h = self.new() + h.update(b'') + h.digest() + + h = self.new() + h.update(b'') + h.update(b'STRING1') + h.update(b'STRING2') + mac1 = h.digest() + + h = self.new() + h.update(b'STRING1') + h.update(b'STRING2') + mac2 = h.digest() + self.assertNotEqual(mac1, mac2) + + h = self.new() + h.update(b'STRING1', b'STRING2') + self.assertEqual(mac2, h.digest()) + + h = self.new() + t = b'STRING1', b'STRING2' + h.update(*t) + self.assertEqual(mac2, h.digest()) + + def test_update_negative(self): + h = self.new() + self.assertRaises(TypeError, h.update, u"string") + self.assertRaises(TypeError, h.update, None) + self.assertRaises(TypeError, h.update, (b'STRING1', b'STRING2')) + + def test_digest(self): + h = self.new() + digest = h.digest() + + # hexdigest does not change the state + self.assertEqual(h.digest(), digest) + # digest returns a byte string + self.assertTrue(isinstance(digest, type(b"digest"))) + + def test_update_after_digest(self): + msg = b"rrrrttt" + + # Normally, update() cannot be done after digest() + h = self.new() + h.update(msg) + dig1 = h.digest() + self.assertRaises(TypeError, h.update, dig1) + + def test_hex_digest(self): + mac = self.new() + digest = mac.digest() + hexdigest = mac.hexdigest() + + # hexdigest is equivalent to digest + self.assertEqual(hexlify(digest), tobytes(hexdigest)) + # hexdigest does not change the state + self.assertEqual(mac.hexdigest(), hexdigest) + # hexdigest returns a string + self.assertTrue(isinstance(hexdigest, type("digest"))) + + def test_bytearray(self): + + data = b"\x00\x01\x02" + + # Data can be a bytearray (during operation) + data_ba = bytearray(data) + + h1 = self.new() + h2 = self.new() + h1.update(data) + h2.update(data_ba) + data_ba[:1] = b'\xFF' + + self.assertEqual(h1.digest(), h2.digest()) + + def test_memoryview(self): + + data = b"\x00\x01\x02" + + def get_mv_ro(data): + return memoryview(data) + + def get_mv_rw(data): + return memoryview(bytearray(data)) + + for get_mv in (get_mv_ro, get_mv_rw): + + # Data can be a memoryview (during operation) + data_mv = get_mv(data) + + h1 = self.new() + h2 = self.new() + h1.update(data) + h2.update(data_mv) + if not data_mv.readonly: + data_mv[:1] = b'\xFF' + + self.assertEqual(h1.digest(), h2.digest()) + + +class TupleHash128Test(TupleHashTest): + + TupleHash = TupleHash128 + + minimum_bytes = 8 + default_bytes = 64 + + minimum_bits = 64 + default_bits = 512 + + +class TupleHash256Test(TupleHashTest): + + TupleHash = TupleHash256 + + minimum_bytes = 8 + default_bytes = 64 + + minimum_bits = 64 + default_bits = 512 + + +class NISTExampleTestVectors(unittest.TestCase): + + # http://csrc.nist.gov/groups/ST/toolkit/documents/Examples/TupleHash_samples.pdf + test_data = [ + ( + ( + "00 01 02", + "10 11 12 13 14 15", + ), + "", + "C5 D8 78 6C 1A FB 9B 82 11 1A B3 4B 65 B2 C0 04" + "8F A6 4E 6D 48 E2 63 26 4C E1 70 7D 3F FC 8E D1", + "KMAC128 Sample #1 NIST", + TupleHash128 + ), + ( + ( + "00 01 02", + "10 11 12 13 14 15", + ), + "My Tuple App", + "75 CD B2 0F F4 DB 11 54 E8 41 D7 58 E2 41 60 C5" + "4B AE 86 EB 8C 13 E7 F5 F4 0E B3 55 88 E9 6D FB", + "KMAC128 Sample #2 NIST", + TupleHash128 + ), + ( + ( + "00 01 02", + "10 11 12 13 14 15", + "20 21 22 23 24 25 26 27 28", + ), + "My Tuple App", + "E6 0F 20 2C 89 A2 63 1E DA 8D 4C 58 8C A5 FD 07" + "F3 9E 51 51 99 8D EC CF 97 3A DB 38 04 BB 6E 84", + "KMAC128 Sample #3 NIST", + TupleHash128 + ), + ( + ( + "00 01 02", + "10 11 12 13 14 15", + ), + "", + "CF B7 05 8C AC A5 E6 68 F8 1A 12 A2 0A 21 95 CE" + "97 A9 25 F1 DB A3 E7 44 9A 56 F8 22 01 EC 60 73" + "11 AC 26 96 B1 AB 5E A2 35 2D F1 42 3B DE 7B D4" + "BB 78 C9 AE D1 A8 53 C7 86 72 F9 EB 23 BB E1 94", + "KMAC256 Sample #4 NIST", + TupleHash256 + ), + ( + ( + "00 01 02", + "10 11 12 13 14 15", + ), + "My Tuple App", + "14 7C 21 91 D5 ED 7E FD 98 DB D9 6D 7A B5 A1 16" + "92 57 6F 5F E2 A5 06 5F 3E 33 DE 6B BA 9F 3A A1" + "C4 E9 A0 68 A2 89 C6 1C 95 AA B3 0A EE 1E 41 0B" + "0B 60 7D E3 62 0E 24 A4 E3 BF 98 52 A1 D4 36 7E", + "KMAC256 Sample #5 NIST", + TupleHash256 + ), + ( + ( + "00 01 02", + "10 11 12 13 14 15", + "20 21 22 23 24 25 26 27 28", + ), + "My Tuple App", + "45 00 0B E6 3F 9B 6B FD 89 F5 47 17 67 0F 69 A9" + "BC 76 35 91 A4 F0 5C 50 D6 88 91 A7 44 BC C6 E7" + "D6 D5 B5 E8 2C 01 8D A9 99 ED 35 B0 BB 49 C9 67" + "8E 52 6A BD 8E 85 C1 3E D2 54 02 1D B9 E7 90 CE", + "KMAC256 Sample #6 NIST", + TupleHash256 + ), + + + + ] + + def setUp(self): + td = [] + for tv_in in self.test_data: + tv_out = [None] * len(tv_in) + + tv_out[0] = [] + for string in tv_in[0]: + tv_out[0].append(unhexlify(string.replace(" ", ""))) + + tv_out[1] = tobytes(tv_in[1]) # Custom + tv_out[2] = unhexlify(tv_in[2].replace(" ", "")) + tv_out[3] = tv_in[3] + tv_out[4] = tv_in[4] + td.append(tv_out) + self.test_data = td + + def runTest(self): + + for data, custom, digest, text, module in self.test_data: + hd1 = module.new(custom=custom, digest_bytes=len(digest)) + hd2 = module.new(custom=custom, digest_bytes=len(digest)) + + # Call update() for each element + for string in data: + hd1.update(string) + + # One single update for all elements + hd2.update(*data) + + self.assertEqual(hd1.digest(), digest, msg=text) + self.assertEqual(hd2.digest(), digest, msg=text) + +def get_tests(config={}): + tests = [] + + tests += list_test_cases(TupleHash128Test) + tests += list_test_cases(TupleHash256Test) + tests.append(NISTExampleTestVectors()) + + return tests + + +if __name__ == '__main__': + def suite(): + return unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_TurboSHAKE.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_TurboSHAKE.py new file mode 100644 index 000000000..b37aee1a6 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_TurboSHAKE.py @@ -0,0 +1,468 @@ +"""Self-test suite for Crypto.Hash.TurboSHAKE128 and TurboSHAKE256""" + +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Hash import TurboSHAKE128, TurboSHAKE256 +from Crypto.Util.py3compat import bchr + + +class TurboSHAKETest(unittest.TestCase): + + def test_new_positive(self): + + xof1 = self.TurboSHAKE.new() + xof1.update(b'90') + + xof2 = self.TurboSHAKE.new(domain=0x1F) + xof2.update(b'90') + + xof3 = self.TurboSHAKE.new(data=b'90') + + out1 = xof1.read(128) + out2 = xof2.read(128) + out3 = xof3.read(128) + + self.assertEqual(out1, out2) + self.assertEqual(out1, out3) + + def test_new_domain(self): + xof1 = self.TurboSHAKE.new(domain=0x1D) + xof2 = self.TurboSHAKE.new(domain=0x20) + self.assertNotEqual(xof1.read(128), xof2.read(128)) + + def test_update(self): + pieces = [bchr(10) * 200, bchr(20) * 300] + + xof1 = self.TurboSHAKE.new() + xof1.update(pieces[0]).update(pieces[1]) + digest1 = xof1.read(10) + + xof2 = self.TurboSHAKE.new() + xof2.update(pieces[0] + pieces[1]) + digest2 = xof2.read(10) + + self.assertEqual(digest1, digest2) + + def test_update_negative(self): + xof1 = self.TurboSHAKE.new() + self.assertRaises(TypeError, xof1.update, u"string") + + def test_read(self): + xof1 = self.TurboSHAKE.new() + digest = xof1.read(90) + + # read returns a byte string of the right length + self.assertTrue(isinstance(digest, bytes)) + self.assertEqual(len(digest), 90) + + def test_update_after_read(self): + xof1 = self.TurboSHAKE.new() + xof1.update(b"rrrr") + xof1.read(90) + self.assertRaises(TypeError, xof1.update, b"ttt") + + def test_new(self): + xof1 = self.TurboSHAKE.new(domain=0x07) + xof1.update(b'90') + digest1 = xof1.read(100) + + xof2 = xof1.new() + xof2.update(b'90') + digest2 = xof2.read(100) + + self.assertEqual(digest1, digest2) + + self.assertRaises(TypeError, xof1.new, domain=0x07) + + +class TurboSHAKE128Test(TurboSHAKETest): + TurboSHAKE = TurboSHAKE128 + + +class TurboSHAKE256Test(TurboSHAKETest): + TurboSHAKE = TurboSHAKE256 + + +def txt2bin(txt): + clean = txt.replace(" ", "").replace("\n", "").replace("\r", "") + return unhexlify(clean) + + +def ptn(n): + res = bytearray(n) + pattern = b"".join([bchr(x) for x in range(0, 0xFB)]) + for base in range(0, n - 0xFB, 0xFB): + res[base:base + 0xFB] = pattern + remain = n % 0xFB + if remain: + base = (n // 0xFB) * 0xFB + res[base:] = pattern[:remain] + assert len(res) == n + return res + + +def chunked(source, size): + for i in range(0, len(source), size): + yield source[i:i+size] + + +class TurboSHAKE128TV(unittest.TestCase): + + def test_zero_1(self): + tv = """1E 41 5F 1C 59 83 AF F2 16 92 17 27 7D 17 BB 53 + 8C D9 45 A3 97 DD EC 54 1F 1C E4 1A F2 C1 B7 4C""" + + btv = txt2bin(tv) + res = TurboSHAKE128.new().read(32) + self.assertEqual(res, btv) + + def test_zero_2(self): + tv = """1E 41 5F 1C 59 83 AF F2 16 92 17 27 7D 17 BB 53 + 8C D9 45 A3 97 DD EC 54 1F 1C E4 1A F2 C1 B7 4C + 3E 8C CA E2 A4 DA E5 6C 84 A0 4C 23 85 C0 3C 15 + E8 19 3B DF 58 73 73 63 32 16 91 C0 54 62 C8 DF""" + + btv = txt2bin(tv) + res = TurboSHAKE128.new().read(64) + self.assertEqual(res, btv) + + def test_zero_3(self): + tv = """A3 B9 B0 38 59 00 CE 76 1F 22 AE D5 48 E7 54 DA + 10 A5 24 2D 62 E8 C6 58 E3 F3 A9 23 A7 55 56 07""" + + btv = txt2bin(tv) + res = TurboSHAKE128.new().read(10032)[-32:] + self.assertEqual(res, btv) + + def test_ptn_1(self): + tv = """55 CE DD 6F 60 AF 7B B2 9A 40 42 AE 83 2E F3 F5 + 8D B7 29 9F 89 3E BB 92 47 24 7D 85 69 58 DA A9""" + + btv = txt2bin(tv) + res = TurboSHAKE128.new(data=ptn(1)).read(32) + self.assertEqual(res, btv) + + def test_ptn_17(self): + tv = """9C 97 D0 36 A3 BA C8 19 DB 70 ED E0 CA 55 4E C6 + E4 C2 A1 A4 FF BF D9 EC 26 9C A6 A1 11 16 12 33""" + + btv = txt2bin(tv) + res = TurboSHAKE128.new(data=ptn(17)).read(32) + self.assertEqual(res, btv) + + def test_ptn_17_2(self): + tv = """96 C7 7C 27 9E 01 26 F7 FC 07 C9 B0 7F 5C DA E1 + E0 BE 60 BD BE 10 62 00 40 E7 5D 72 23 A6 24 D2""" + + btv = txt2bin(tv) + res = TurboSHAKE128.new(data=ptn(17**2)).read(32) + self.assertEqual(res, btv) + + def test_ptn_17_3(self): + tv = """D4 97 6E B5 6B CF 11 85 20 58 2B 70 9F 73 E1 D6 + 85 3E 00 1F DA F8 0E 1B 13 E0 D0 59 9D 5F B3 72""" + + btv = txt2bin(tv) + res = TurboSHAKE128.new(data=ptn(17**3)).read(32) + self.assertEqual(res, btv) + + def test_ptn_17_4(self): + tv = """DA 67 C7 03 9E 98 BF 53 0C F7 A3 78 30 C6 66 4E + 14 CB AB 7F 54 0F 58 40 3B 1B 82 95 13 18 EE 5C""" + + btv = txt2bin(tv) + data = ptn(17**4) + + # All at once + res = TurboSHAKE128.new(data=data).read(32) + self.assertEqual(res, btv) + + # Byte by byte + xof = TurboSHAKE128.new() + for x in data: + xof.update(bchr(x)) + res = xof.read(32) + self.assertEqual(res, btv) + + # Chunks of various prime sizes + for chunk_size in (13, 17, 19, 23, 31): + xof = TurboSHAKE128.new() + for x in chunked(data, chunk_size): + xof.update(x) + res = xof.read(32) + self.assertEqual(res, btv) + + def test_ptn_17_5(self): + tv = """B9 7A 90 6F BF 83 EF 7C 81 25 17 AB F3 B2 D0 AE + A0 C4 F6 03 18 CE 11 CF 10 39 25 12 7F 59 EE CD""" + + btv = txt2bin(tv) + data = ptn(17**5) + + # All at once + res = TurboSHAKE128.new(data=data).read(32) + self.assertEqual(res, btv) + + # Chunks + xof = TurboSHAKE128.new() + for chunk in chunked(data, 8192): + xof.update(chunk) + res = xof.read(32) + self.assertEqual(res, btv) + + def test_ptn_17_6(self): + tv = """35 CD 49 4A DE DE D2 F2 52 39 AF 09 A7 B8 EF 0C + 4D 1C A4 FE 2D 1A C3 70 FA 63 21 6F E7 B4 C2 B1""" + + btv = txt2bin(tv) + data = ptn(17**6) + + res = TurboSHAKE128.new(data=data).read(32) + self.assertEqual(res, btv) + + def test_ffffff_d01(self): + tv = """BF 32 3F 94 04 94 E8 8E E1 C5 40 FE 66 0B E8 A0 + C9 3F 43 D1 5E C0 06 99 84 62 FA 99 4E ED 5D AB""" + + btv = txt2bin(tv) + res = TurboSHAKE128.new(data=b"\xff\xff\xff", domain=0x01).read(32) + self.assertEqual(res, btv) + + def test_ff_d06(self): + tv = """8E C9 C6 64 65 ED 0D 4A 6C 35 D1 35 06 71 8D 68 + 7A 25 CB 05 C7 4C CA 1E 42 50 1A BD 83 87 4A 67""" + + btv = txt2bin(tv) + res = TurboSHAKE128.new(data=b'\xFF', domain=0x06).read(32) + self.assertEqual(res, btv) + + def test_ffffff_d07(self): + tv = """B6 58 57 60 01 CA D9 B1 E5 F3 99 A9 F7 77 23 BB + A0 54 58 04 2D 68 20 6F 72 52 68 2D BA 36 63 ED""" + + btv = txt2bin(tv) + res = TurboSHAKE128.new(data=b'\xFF' * 3, domain=0x07).read(32) + self.assertEqual(res, btv) + + def test_ffffffffffff_d0b(self): + tv = """8D EE AA 1A EC 47 CC EE 56 9F 65 9C 21 DF A8 E1 + 12 DB 3C EE 37 B1 81 78 B2 AC D8 05 B7 99 CC 37""" + + btv = txt2bin(tv) + res = TurboSHAKE128.new(data=b'\xFF' * 7, domain=0x0B).read(32) + self.assertEqual(res, btv) + + def test_ff_d30(self): + tv = """55 31 22 E2 13 5E 36 3C 32 92 BE D2 C6 42 1F A2 + 32 BA B0 3D AA 07 C7 D6 63 66 03 28 65 06 32 5B""" + + btv = txt2bin(tv) + res = TurboSHAKE128.new(data=b'\xFF', domain=0x30).read(32) + self.assertEqual(res, btv) + + def test_ffffff_d7f(self): + tv = """16 27 4C C6 56 D4 4C EF D4 22 39 5D 0F 90 53 BD + A6 D2 8E 12 2A BA 15 C7 65 E5 AD 0E 6E AF 26 F9""" + + btv = txt2bin(tv) + res = TurboSHAKE128.new(data=b'\xFF' * 3, domain=0x7F).read(32) + self.assertEqual(res, btv) + + +class TurboSHAKE256TV(unittest.TestCase): + + def test_zero_1(self): + tv = """36 7A 32 9D AF EA 87 1C 78 02 EC 67 F9 05 AE 13 + C5 76 95 DC 2C 66 63 C6 10 35 F5 9A 18 F8 E7 DB + 11 ED C0 E1 2E 91 EA 60 EB 6B 32 DF 06 DD 7F 00 + 2F BA FA BB 6E 13 EC 1C C2 0D 99 55 47 60 0D B0""" + + btv = txt2bin(tv) + res = TurboSHAKE256.new().read(64) + self.assertEqual(res, btv) + + def test_zero_2(self): + tv = """AB EF A1 16 30 C6 61 26 92 49 74 26 85 EC 08 2F + 20 72 65 DC CF 2F 43 53 4E 9C 61 BA 0C 9D 1D 75""" + + btv = txt2bin(tv) + res = TurboSHAKE256.new().read(10032)[-32:] + self.assertEqual(res, btv) + + def test_ptn_1(self): + tv = """3E 17 12 F9 28 F8 EA F1 05 46 32 B2 AA 0A 24 6E + D8 B0 C3 78 72 8F 60 BC 97 04 10 15 5C 28 82 0E + 90 CC 90 D8 A3 00 6A A2 37 2C 5C 5E A1 76 B0 68 + 2B F2 2B AE 74 67 AC 94 F7 4D 43 D3 9B 04 82 E2""" + + btv = txt2bin(tv) + res = TurboSHAKE256.new(data=ptn(1)).read(64) + self.assertEqual(res, btv) + + def test_ptn_17(self): + tv = """B3 BA B0 30 0E 6A 19 1F BE 61 37 93 98 35 92 35 + 78 79 4E A5 48 43 F5 01 10 90 FA 2F 37 80 A9 E5 + CB 22 C5 9D 78 B4 0A 0F BF F9 E6 72 C0 FB E0 97 + 0B D2 C8 45 09 1C 60 44 D6 87 05 4D A5 D8 E9 C7""" + + btv = txt2bin(tv) + res = TurboSHAKE256.new(data=ptn(17)).read(64) + self.assertEqual(res, btv) + + def test_ptn_17_2(self): + tv = """66 B8 10 DB 8E 90 78 04 24 C0 84 73 72 FD C9 57 + 10 88 2F DE 31 C6 DF 75 BE B9 D4 CD 93 05 CF CA + E3 5E 7B 83 E8 B7 E6 EB 4B 78 60 58 80 11 63 16 + FE 2C 07 8A 09 B9 4A D7 B8 21 3C 0A 73 8B 65 C0""" + + btv = txt2bin(tv) + res = TurboSHAKE256.new(data=ptn(17**2)).read(64) + self.assertEqual(res, btv) + + def test_ptn_17_3(self): + tv = """C7 4E BC 91 9A 5B 3B 0D D1 22 81 85 BA 02 D2 9E + F4 42 D6 9D 3D 42 76 A9 3E FE 0B F9 A1 6A 7D C0 + CD 4E AB AD AB 8C D7 A5 ED D9 66 95 F5 D3 60 AB + E0 9E 2C 65 11 A3 EC 39 7D A3 B7 6B 9E 16 74 FB""" + + btv = txt2bin(tv) + res = TurboSHAKE256.new(data=ptn(17**3)).read(64) + self.assertEqual(res, btv) + + def test_ptn_17_4(self): + tv = """02 CC 3A 88 97 E6 F4 F6 CC B6 FD 46 63 1B 1F 52 + 07 B6 6C 6D E9 C7 B5 5B 2D 1A 23 13 4A 17 0A FD + AC 23 4E AB A9 A7 7C FF 88 C1 F0 20 B7 37 24 61 + 8C 56 87 B3 62 C4 30 B2 48 CD 38 64 7F 84 8A 1D""" + + btv = txt2bin(tv) + data = ptn(17**4) + + # All at once + res = TurboSHAKE256.new(data=data).read(64) + self.assertEqual(res, btv) + + # Byte by byte + xof = TurboSHAKE256.new() + for x in data: + xof.update(bchr(x)) + res = xof.read(64) + self.assertEqual(res, btv) + + # Chunks of various prime sizes + for chunk_size in (13, 17, 19, 23, 31): + xof = TurboSHAKE256.new() + for x in chunked(data, chunk_size): + xof.update(x) + res = xof.read(64) + self.assertEqual(res, btv) + + def test_ptn_17_5(self): + tv = """AD D5 3B 06 54 3E 58 4B 58 23 F6 26 99 6A EE 50 + FE 45 ED 15 F2 02 43 A7 16 54 85 AC B4 AA 76 B4 + FF DA 75 CE DF 6D 8C DC 95 C3 32 BD 56 F4 B9 86 + B5 8B B1 7D 17 78 BF C1 B1 A9 75 45 CD F4 EC 9F""" + + btv = txt2bin(tv) + data = ptn(17**5) + + # All at once + res = TurboSHAKE256.new(data=data).read(64) + self.assertEqual(res, btv) + + # Chunks + xof = TurboSHAKE256.new() + for chunk in chunked(data, 8192): + xof.update(chunk) + res = xof.read(64) + self.assertEqual(res, btv) + + def test_ptn_17_6(self): + tv = """9E 11 BC 59 C2 4E 73 99 3C 14 84 EC 66 35 8E F7 + 1D B7 4A EF D8 4E 12 3F 78 00 BA 9C 48 53 E0 2C + FE 70 1D 9E 6B B7 65 A3 04 F0 DC 34 A4 EE 3B A8 + 2C 41 0F 0D A7 0E 86 BF BD 90 EA 87 7C 2D 61 04""" + + btv = txt2bin(tv) + data = ptn(17**6) + + res = TurboSHAKE256.new(data=data).read(64) + self.assertEqual(res, btv) + + def test_ffffff_d01(self): + tv = """D2 1C 6F BB F5 87 FA 22 82 F2 9A EA 62 01 75 FB + 02 57 41 3A F7 8A 0B 1B 2A 87 41 9C E0 31 D9 33 + AE 7A 4D 38 33 27 A8 A1 76 41 A3 4F 8A 1D 10 03 + AD 7D A6 B7 2D BA 84 BB 62 FE F2 8F 62 F1 24 24""" + + btv = txt2bin(tv) + res = TurboSHAKE256.new(data=b"\xff\xff\xff", domain=0x01).read(64) + self.assertEqual(res, btv) + + def test_ff_d06(self): + tv = """73 8D 7B 4E 37 D1 8B 7F 22 AD 1B 53 13 E3 57 E3 + DD 7D 07 05 6A 26 A3 03 C4 33 FA 35 33 45 52 80 + F4 F5 A7 D4 F7 00 EF B4 37 FE 6D 28 14 05 E0 7B + E3 2A 0A 97 2E 22 E6 3A DC 1B 09 0D AE FE 00 4B""" + + btv = txt2bin(tv) + res = TurboSHAKE256.new(data=b'\xFF', domain=0x06).read(64) + self.assertEqual(res, btv) + + def test_ffffff_d07(self): + tv = """18 B3 B5 B7 06 1C 2E 67 C1 75 3A 00 E6 AD 7E D7 + BA 1C 90 6C F9 3E FB 70 92 EA F2 7F BE EB B7 55 + AE 6E 29 24 93 C1 10 E4 8D 26 00 28 49 2B 8E 09 + B5 50 06 12 B8 F2 57 89 85 DE D5 35 7D 00 EC 67""" + + btv = txt2bin(tv) + res = TurboSHAKE256.new(data=b'\xFF' * 3, domain=0x07).read(64) + self.assertEqual(res, btv) + + def test_ffffffffffff_d0b(self): + tv = """BB 36 76 49 51 EC 97 E9 D8 5F 7E E9 A6 7A 77 18 + FC 00 5C F4 25 56 BE 79 CE 12 C0 BD E5 0E 57 36 + D6 63 2B 0D 0D FB 20 2D 1B BB 8F FE 3D D7 4C B0 + 08 34 FA 75 6C B0 34 71 BA B1 3A 1E 2C 16 B3 C0""" + + btv = txt2bin(tv) + res = TurboSHAKE256.new(data=b'\xFF' * 7, domain=0x0B).read(64) + self.assertEqual(res, btv) + + def test_ff_d30(self): + tv = """F3 FE 12 87 3D 34 BC BB 2E 60 87 79 D6 B7 0E 7F + 86 BE C7 E9 0B F1 13 CB D4 FD D0 C4 E2 F4 62 5E + 14 8D D7 EE 1A 52 77 6C F7 7F 24 05 14 D9 CC FC + 3B 5D DA B8 EE 25 5E 39 EE 38 90 72 96 2C 11 1A""" + + btv = txt2bin(tv) + res = TurboSHAKE256.new(data=b'\xFF', domain=0x30).read(64) + self.assertEqual(res, btv) + + def test_ffffff_d7f(self): + tv = """AB E5 69 C1 F7 7E C3 40 F0 27 05 E7 D3 7C 9A B7 + E1 55 51 6E 4A 6A 15 00 21 D7 0B 6F AC 0B B4 0C + 06 9F 9A 98 28 A0 D5 75 CD 99 F9 BA E4 35 AB 1A + CF 7E D9 11 0B A9 7C E0 38 8D 07 4B AC 76 87 76""" + + btv = txt2bin(tv) + res = TurboSHAKE256.new(data=b'\xFF' * 3, domain=0x7F).read(64) + self.assertEqual(res, btv) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(TurboSHAKE128Test) + tests += list_test_cases(TurboSHAKE256Test) + tests += list_test_cases(TurboSHAKE128TV) + tests += list_test_cases(TurboSHAKE256TV) + return tests + + +if __name__ == '__main__': + def suite(): + return unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_cSHAKE.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_cSHAKE.py new file mode 100644 index 000000000..72ad34115 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_cSHAKE.py @@ -0,0 +1,178 @@ +# =================================================================== +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.cSHAKE128 and cSHAKE256""" + +import unittest + +from Crypto.SelfTest.loader import load_test_vectors +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Hash import cSHAKE128, cSHAKE256, SHAKE128, SHAKE256 +from Crypto.Util.py3compat import b, bchr, tobytes + + +class cSHAKETest(unittest.TestCase): + + def test_left_encode(self): + from Crypto.Hash.cSHAKE128 import _left_encode + self.assertEqual(_left_encode(0), b'\x01\x00') + self.assertEqual(_left_encode(1), b'\x01\x01') + self.assertEqual(_left_encode(256), b'\x02\x01\x00') + + def test_bytepad(self): + from Crypto.Hash.cSHAKE128 import _bytepad + self.assertEqual(_bytepad(b'', 4), b'\x01\x04\x00\x00') + self.assertEqual(_bytepad(b'A', 4), b'\x01\x04A\x00') + self.assertEqual(_bytepad(b'AA', 4), b'\x01\x04AA') + self.assertEqual(_bytepad(b'AAA', 4), b'\x01\x04AAA\x00\x00\x00') + self.assertEqual(_bytepad(b'AAAA', 4), b'\x01\x04AAAA\x00\x00') + self.assertEqual(_bytepad(b'AAAAA', 4), b'\x01\x04AAAAA\x00') + self.assertEqual(_bytepad(b'AAAAAA', 4), b'\x01\x04AAAAAA') + self.assertEqual(_bytepad(b'AAAAAAA', 4), b'\x01\x04AAAAAAA\x00\x00\x00') + + def test_new_positive(self): + + xof1 = self.cshake.new() + xof2 = self.cshake.new(data=b("90")) + xof3 = self.cshake.new().update(b("90")) + + self.assertNotEqual(xof1.read(10), xof2.read(10)) + xof3.read(10) + self.assertEqual(xof2.read(10), xof3.read(10)) + + xof1 = self.cshake.new() + ref = xof1.read(10) + xof2 = self.cshake.new(custom=b("")) + xof3 = self.cshake.new(custom=b("foo")) + + self.assertEqual(ref, xof2.read(10)) + self.assertNotEqual(ref, xof3.read(10)) + + xof1 = self.cshake.new(custom=b("foo")) + xof2 = self.cshake.new(custom=b("foo"), data=b("90")) + xof3 = self.cshake.new(custom=b("foo")).update(b("90")) + + self.assertNotEqual(xof1.read(10), xof2.read(10)) + xof3.read(10) + self.assertEqual(xof2.read(10), xof3.read(10)) + + def test_update(self): + pieces = [bchr(10) * 200, bchr(20) * 300] + h = self.cshake.new() + h.update(pieces[0]).update(pieces[1]) + digest = h.read(10) + h = self.cshake.new() + h.update(pieces[0] + pieces[1]) + self.assertEqual(h.read(10), digest) + + def test_update_negative(self): + h = self.cshake.new() + self.assertRaises(TypeError, h.update, u"string") + + def test_digest(self): + h = self.cshake.new() + digest = h.read(90) + + # read returns a byte string of the right length + self.assertTrue(isinstance(digest, type(b("digest")))) + self.assertEqual(len(digest), 90) + + def test_update_after_read(self): + mac = self.cshake.new() + mac.update(b("rrrr")) + mac.read(90) + self.assertRaises(TypeError, mac.update, b("ttt")) + + def test_shake(self): + # When no customization string is passed, results must match SHAKE + for digest_len in range(64): + xof1 = self.cshake.new(b'TEST') + xof2 = self.shake.new(b'TEST') + self.assertEqual(xof1.read(digest_len), xof2.read(digest_len)) + + +class cSHAKE128Test(cSHAKETest): + cshake = cSHAKE128 + shake = SHAKE128 + + +class cSHAKE256Test(cSHAKETest): + cshake = cSHAKE256 + shake = SHAKE256 + + +class cSHAKEVectors(unittest.TestCase): + pass + + +vector_files = [("ShortMsgSamples_cSHAKE128.txt", "Short Message Samples cSHAKE128", "128_cshake", cSHAKE128), + ("ShortMsgSamples_cSHAKE256.txt", "Short Message Samples cSHAKE256", "256_cshake", cSHAKE256), + ("CustomMsgSamples_cSHAKE128.txt", "Custom Message Samples cSHAKE128", "custom_128_cshake", cSHAKE128), + ("CustomMsgSamples_cSHAKE256.txt", "Custom Message Samples cSHAKE256", "custom_256_cshake", cSHAKE256), + ] + +for file, descr, tag, test_class in vector_files: + + test_vectors = load_test_vectors(("Hash", "SHA3"), file, descr, + {"len": lambda x: int(x), + "nlen": lambda x: int(x), + "slen": lambda x: int(x)}) or [] + + for idx, tv in enumerate(test_vectors): + if getattr(tv, "len", 0) == 0: + data = b("") + else: + data = tobytes(tv.msg) + assert(tv.len == len(tv.msg)*8) + if getattr(tv, "nlen", 0) != 0: + raise ValueError("Unsupported cSHAKE test vector") + if getattr(tv, "slen", 0) == 0: + custom = b("") + else: + custom = tobytes(tv.s) + assert(tv.slen == len(tv.s)*8) + + def new_test(self, data=data, result=tv.md, custom=custom, test_class=test_class): + hobj = test_class.new(data=data, custom=custom) + digest = hobj.read(len(result)) + self.assertEqual(digest, result) + + setattr(cSHAKEVectors, "test_%s_%d" % (tag, idx), new_test) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(cSHAKE128Test) + tests += list_test_cases(cSHAKE256Test) + tests += list_test_cases(cSHAKEVectors) + return tests + + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_keccak.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_keccak.py new file mode 100644 index 000000000..54cdf27d8 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Hash/test_keccak.py @@ -0,0 +1,250 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.keccak""" + +import unittest +from binascii import hexlify, unhexlify + +from Crypto.SelfTest.loader import load_test_vectors +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Hash import keccak +from Crypto.Util.py3compat import b, tobytes, bchr + +class KeccakTest(unittest.TestCase): + + def test_new_positive(self): + + for digest_bits in (224, 256, 384, 512): + hobj = keccak.new(digest_bits=digest_bits) + self.assertEqual(hobj.digest_size, digest_bits // 8) + + hobj2 = hobj.new() + self.assertEqual(hobj2.digest_size, digest_bits // 8) + + for digest_bytes in (28, 32, 48, 64): + hobj = keccak.new(digest_bytes=digest_bytes) + self.assertEqual(hobj.digest_size, digest_bytes) + + hobj2 = hobj.new() + self.assertEqual(hobj2.digest_size, digest_bytes) + + def test_new_positive2(self): + + digest1 = keccak.new(data=b("\x90"), digest_bytes=64).digest() + digest2 = keccak.new(digest_bytes=64).update(b("\x90")).digest() + self.assertEqual(digest1, digest2) + + def test_new_negative(self): + + # keccak.new needs digest size + self.assertRaises(TypeError, keccak.new) + + h = keccak.new(digest_bits=512) + + # Either bits or bytes can be specified + self.assertRaises(TypeError, keccak.new, + digest_bytes=64, + digest_bits=512) + + # Range + self.assertRaises(ValueError, keccak.new, digest_bytes=0) + self.assertRaises(ValueError, keccak.new, digest_bytes=1) + self.assertRaises(ValueError, keccak.new, digest_bytes=65) + self.assertRaises(ValueError, keccak.new, digest_bits=0) + self.assertRaises(ValueError, keccak.new, digest_bits=1) + self.assertRaises(ValueError, keccak.new, digest_bits=513) + + def test_update(self): + pieces = [bchr(10) * 200, bchr(20) * 300] + h = keccak.new(digest_bytes=64) + h.update(pieces[0]).update(pieces[1]) + digest = h.digest() + h = keccak.new(digest_bytes=64) + h.update(pieces[0] + pieces[1]) + self.assertEqual(h.digest(), digest) + + def test_update_negative(self): + h = keccak.new(digest_bytes=64) + self.assertRaises(TypeError, h.update, u"string") + + def test_digest(self): + h = keccak.new(digest_bytes=64) + digest = h.digest() + + # hexdigest does not change the state + self.assertEqual(h.digest(), digest) + # digest returns a byte string + self.assertTrue(isinstance(digest, type(b("digest")))) + + def test_hex_digest(self): + mac = keccak.new(digest_bits=512) + digest = mac.digest() + hexdigest = mac.hexdigest() + + # hexdigest is equivalent to digest + self.assertEqual(hexlify(digest), tobytes(hexdigest)) + # hexdigest does not change the state + self.assertEqual(mac.hexdigest(), hexdigest) + # hexdigest returns a string + self.assertTrue(isinstance(hexdigest, type("digest"))) + + def test_update_after_digest(self): + msg=b("rrrrttt") + + # Normally, update() cannot be done after digest() + h = keccak.new(digest_bits=512, data=msg[:4]) + dig1 = h.digest() + self.assertRaises(TypeError, h.update, msg[4:]) + dig2 = keccak.new(digest_bits=512, data=msg).digest() + + # With the proper flag, it is allowed + h = keccak.new(digest_bits=512, data=msg[:4], update_after_digest=True) + self.assertEqual(h.digest(), dig1) + # ... and the subsequent digest applies to the entire message + # up to that point + h.update(msg[4:]) + self.assertEqual(h.digest(), dig2) + + +class KeccakVectors(unittest.TestCase): + pass + + # TODO: add ExtremelyLong tests + + +test_vectors_224 = load_test_vectors(("Hash", "keccak"), + "ShortMsgKAT_224.txt", + "Short Messages KAT 224", + {"len": lambda x: int(x)}) or [] + +test_vectors_224 += load_test_vectors(("Hash", "keccak"), + "LongMsgKAT_224.txt", + "Long Messages KAT 224", + {"len": lambda x: int(x)}) or [] + +for idx, tv in enumerate(test_vectors_224): + if tv.len == 0: + data = b("") + else: + data = tobytes(tv.msg) + + def new_test(self, data=data, result=tv.md): + hobj = keccak.new(digest_bits=224, data=data) + self.assertEqual(hobj.digest(), result) + + setattr(KeccakVectors, "test_224_%d" % idx, new_test) + +# --- + +test_vectors_256 = load_test_vectors(("Hash", "keccak"), + "ShortMsgKAT_256.txt", + "Short Messages KAT 256", + { "len" : lambda x: int(x) } ) or [] + +test_vectors_256 += load_test_vectors(("Hash", "keccak"), + "LongMsgKAT_256.txt", + "Long Messages KAT 256", + { "len" : lambda x: int(x) } ) or [] + +for idx, tv in enumerate(test_vectors_256): + if tv.len == 0: + data = b("") + else: + data = tobytes(tv.msg) + + def new_test(self, data=data, result=tv.md): + hobj = keccak.new(digest_bits=256, data=data) + self.assertEqual(hobj.digest(), result) + + setattr(KeccakVectors, "test_256_%d" % idx, new_test) + + +# --- + +test_vectors_384 = load_test_vectors(("Hash", "keccak"), + "ShortMsgKAT_384.txt", + "Short Messages KAT 384", + {"len": lambda x: int(x)}) or [] + +test_vectors_384 += load_test_vectors(("Hash", "keccak"), + "LongMsgKAT_384.txt", + "Long Messages KAT 384", + {"len": lambda x: int(x)}) or [] + +for idx, tv in enumerate(test_vectors_384): + if tv.len == 0: + data = b("") + else: + data = tobytes(tv.msg) + + def new_test(self, data=data, result=tv.md): + hobj = keccak.new(digest_bits=384, data=data) + self.assertEqual(hobj.digest(), result) + + setattr(KeccakVectors, "test_384_%d" % idx, new_test) + +# --- + +test_vectors_512 = load_test_vectors(("Hash", "keccak"), + "ShortMsgKAT_512.txt", + "Short Messages KAT 512", + {"len": lambda x: int(x)}) or [] + +test_vectors_512 += load_test_vectors(("Hash", "keccak"), + "LongMsgKAT_512.txt", + "Long Messages KAT 512", + {"len": lambda x: int(x)}) or [] + +for idx, tv in enumerate(test_vectors_512): + if tv.len == 0: + data = b("") + else: + data = tobytes(tv.msg) + + def new_test(self, data=data, result=tv.md): + hobj = keccak.new(digest_bits=512, data=data) + self.assertEqual(hobj.digest(), result) + + setattr(KeccakVectors, "test_512_%d" % idx, new_test) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(KeccakTest) + tests += list_test_cases(KeccakVectors) + return tests + + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/IO/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/IO/__init__.py new file mode 100644 index 000000000..c04a2a780 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/IO/__init__.py @@ -0,0 +1,47 @@ +# +# SelfTest/IO/__init__.py: Self-test for input/output module +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-test for I/O""" + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.IO import test_PKCS8; tests += test_PKCS8.get_tests(config=config) + from Crypto.SelfTest.IO import test_PBES; tests += test_PBES.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/IO/test_PBES.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/IO/test_PBES.py new file mode 100644 index 000000000..029e824a4 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/IO/test_PBES.py @@ -0,0 +1,118 @@ +# +# SelfTest/IO/test_PBES.py: Self-test for the _PBES module +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-tests for Crypto.IO._PBES module""" + +import unittest + +from Crypto.IO._PBES import PBES2 + + +class TestPBES2(unittest.TestCase): + + def setUp(self): + self.ref = b"Test data" + self.passphrase = b"Passphrase" + + def test1(self): + ct = PBES2.encrypt(self.ref, self.passphrase, + 'PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC') + pt = PBES2.decrypt(ct, self.passphrase) + self.assertEqual(self.ref, pt) + + def test2(self): + ct = PBES2.encrypt(self.ref, self.passphrase, + 'PBKDF2WithHMAC-SHA224AndAES128-CBC') + pt = PBES2.decrypt(ct, self.passphrase) + self.assertEqual(self.ref, pt) + + def test3(self): + ct = PBES2.encrypt(self.ref, self.passphrase, + 'PBKDF2WithHMAC-SHA256AndAES192-CBC') + pt = PBES2.decrypt(ct, self.passphrase) + self.assertEqual(self.ref, pt) + + def test4(self): + ct = PBES2.encrypt(self.ref, self.passphrase, + 'PBKDF2WithHMAC-SHA384AndAES256-CBC') + pt = PBES2.decrypt(ct, self.passphrase) + self.assertEqual(self.ref, pt) + + def test5(self): + ct = PBES2.encrypt(self.ref, self.passphrase, + 'PBKDF2WithHMAC-SHA512AndAES128-GCM') + pt = PBES2.decrypt(ct, self.passphrase) + self.assertEqual(self.ref, pt) + + def test6(self): + ct = PBES2.encrypt(self.ref, self.passphrase, + 'PBKDF2WithHMAC-SHA512-224AndAES192-GCM') + pt = PBES2.decrypt(ct, self.passphrase) + self.assertEqual(self.ref, pt) + + def test7(self): + ct = PBES2.encrypt(self.ref, self.passphrase, + 'PBKDF2WithHMAC-SHA3-256AndAES256-GCM') + pt = PBES2.decrypt(ct, self.passphrase) + self.assertEqual(self.ref, pt) + + def test8(self): + ct = PBES2.encrypt(self.ref, self.passphrase, + 'scryptAndAES128-CBC') + pt = PBES2.decrypt(ct, self.passphrase) + self.assertEqual(self.ref, pt) + + def test9(self): + ct = PBES2.encrypt(self.ref, self.passphrase, + 'scryptAndAES192-CBC') + pt = PBES2.decrypt(ct, self.passphrase) + self.assertEqual(self.ref, pt) + + def test10(self): + ct = PBES2.encrypt(self.ref, self.passphrase, + 'scryptAndAES256-CBC') + pt = PBES2.decrypt(ct, self.passphrase) + self.assertEqual(self.ref, pt) + + +def get_tests(config={}): + from Crypto.SelfTest.st_common import list_test_cases + listTests = [] + listTests += list_test_cases(TestPBES2) + return listTests + + +if __name__ == '__main__': + def suite(): + return unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/IO/test_PKCS8.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/IO/test_PKCS8.py new file mode 100644 index 000000000..3525796be --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/IO/test_PKCS8.py @@ -0,0 +1,459 @@ +# +# SelfTest/IO/test_PKCS8.py: Self-test for the PKCS8 module +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-tests for Crypto.IO.PKCS8 module""" + +import unittest +from binascii import unhexlify + +from Crypto.Util.py3compat import * +from Crypto.IO import PKCS8 + +from Crypto.Util.asn1 import DerNull + +oid_key = '1.2.840.113549.1.1.1' + +# Original RSA key (in DER format) +# hexdump -v -e '32/1 "%02x" "\n"' key.der +clear_key=""" +308201ab020100025a00b94a7f7075ab9e79e8196f47be707781e80dd965cf16 +0c951a870b71783b6aaabbd550c0e65e5a3dfe15b8620009f6d7e5efec42a3f0 +6fe20faeebb0c356e79cdec6db4dd427e82d8ae4a5b90996227b8ba54ccfc4d2 +5c08050203010001025a00afa09c70d528299b7552fe766b5d20f9a221d66938 +c3b68371d48515359863ff96f0978d700e08cd6fd3d8a3f97066fc2e0d5f78eb +3a50b8e17ba297b24d1b8e9cdfd18d608668198d724ad15863ef0329195dee89 +3f039395022d0ebe0518df702a8b25954301ec60a97efdcec8eaa4f2e76ca7e8 +8dfbc3f7e0bb83f9a0e8dc47c0f8c746e9df6b022d0c9195de13f09b7be1fdd7 +1f56ae7d973e08bd9fd2c3dfd8936bb05be9cc67bd32d663c7f00d70932a0be3 +c24f022d0ac334eb6cabf1933633db007b763227b0d9971a9ea36aca8b669ec9 +4fcf16352f6b3dcae28e4bd6137db4ddd3022d0400a09f15ee7b351a2481cb03 +09920905c236d09c87afd3022f3afc2a19e3b746672b635238956ee7e6dd62d5 +022d0cd88ed14fcfbda5bbf0257f700147137bbab9c797af7df866704b889aa3 +7e2e93df3ff1a0fd3490111dcdbc4c +""" + +# Same key as above, wrapped in PKCS#8 but w/o password +# +# openssl pkcs8 -topk8 -inform DER -nocrypt -in key.der -outform DER -out keyp8.der +# hexdump -v -e '32/1 "%02x" "\n"' keyp8.der +wrapped_clear_key=""" +308201c5020100300d06092a864886f70d0101010500048201af308201ab0201 +00025a00b94a7f7075ab9e79e8196f47be707781e80dd965cf160c951a870b71 +783b6aaabbd550c0e65e5a3dfe15b8620009f6d7e5efec42a3f06fe20faeebb0 +c356e79cdec6db4dd427e82d8ae4a5b90996227b8ba54ccfc4d25c0805020301 +0001025a00afa09c70d528299b7552fe766b5d20f9a221d66938c3b68371d485 +15359863ff96f0978d700e08cd6fd3d8a3f97066fc2e0d5f78eb3a50b8e17ba2 +97b24d1b8e9cdfd18d608668198d724ad15863ef0329195dee893f039395022d +0ebe0518df702a8b25954301ec60a97efdcec8eaa4f2e76ca7e88dfbc3f7e0bb +83f9a0e8dc47c0f8c746e9df6b022d0c9195de13f09b7be1fdd71f56ae7d973e +08bd9fd2c3dfd8936bb05be9cc67bd32d663c7f00d70932a0be3c24f022d0ac3 +34eb6cabf1933633db007b763227b0d9971a9ea36aca8b669ec94fcf16352f6b +3dcae28e4bd6137db4ddd3022d0400a09f15ee7b351a2481cb0309920905c236 +d09c87afd3022f3afc2a19e3b746672b635238956ee7e6dd62d5022d0cd88ed1 +4fcfbda5bbf0257f700147137bbab9c797af7df866704b889aa37e2e93df3ff1 +a0fd3490111dcdbc4c +""" + +### +# +# The key above will now be encrypted with different algorithms. +# The password is always 'TestTest'. +# +# Each item in the wrapped_enc_keys list contains: +# * wrap algorithm +# * iteration count +# * Salt +# * IV +# * Expected result +### +wrapped_enc_keys = [] + +# +# openssl pkcs8 -topk8 -passin pass:TestTest -inform DER -in key.der -outform DER -out keyenc.der -v2 des3 +# hexdump -v -e '32/1 "%02x" "\n"' keyenc.der +# +wrapped_enc_keys.append(( +'PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC', +2048, +"47EA7227D8B22E2F", # IV +"E3F7A838AB911A4D", # Salt +""" +30820216304006092a864886f70d01050d3033301b06092a864886f70d01050c +300e0408e3f7a838ab911a4d02020800301406082a864886f70d0307040847ea +7227d8b22e2f048201d0ea388b374d2d0e4ceb7a5139f850fdff274884a6e6c0 +64326e09d00dbba9018834edb5a51a6ae3d1806e6e91eebf33788ce71fee0637 +a2ebf58859dd32afc644110c390274a6128b50c39b8d907823810ec471bada86 +6f5b75d8ea04ad310fad2e73621696db8e426cd511ee93ec1714a1a7db45e036 +4bf20d178d1f16bbb250b32c2d200093169d588de65f7d99aad9ddd0104b44f1 +326962e1520dfac3c2a800e8a14f678dff2b3d0bb23f69da635bf2a643ac934e +219a447d2f4460b67149e860e54f365da130763deefa649c72b0dcd48966a2d3 +4a477444782e3e66df5a582b07bbb19778a79bd355074ce331f4a82eb966b0c4 +52a09eab6116f2722064d314ae433b3d6e81d2436e93fdf446112663cde93b87 +9c8be44beb45f18e2c78fee9b016033f01ecda51b9b142091fa69f65ab784d2c +5ad8d34be6f7f1464adfc1e0ef3f7848f40d3bdea4412758f2fcb655c93d8f4d +f6fa48fc5aa4b75dd1c017ab79ac9d737233a6d668f5364ccf47786debd37334 +9c10c9e6efbe78430a61f71c89948aa32cdc3cc7338cf994147819ce7ab23450 +c8f7d9b94c3bb377d17a3fa204b601526317824b142ff6bc843fa7815ece89c0 +839573f234dac8d80cc571a045353d61db904a4398d8ef3df5ac +""" +)) + +# +# openssl pkcs8 -topk8 -passin pass:TestTest -inform DER -in key.der -outform DER -out keyenc.der +# hexdump -v -e '32/1 "%02x" "\n"' keyenc.der +# +wrapped_enc_keys.append(( +'skip encryption', # pbeWithMD5AndDES-CBC, only decoding is supported +-1, +"", +"", +""" +308201f1301b06092a864886f70d010503300e0408f9b990c89af1d41b020208 +00048201d0c6267fe8592903891933d559e71a7ca68b2e39150f19daca0f7921 +52f97e249d72f670d5140e9150433310ed7c7ee51927693fd39884cb9551cea5 +a7b746f7edf199f8787d4787a35dad930d7db057b2118851211b645ac8b90fa6 +b0e7d49ac8567cbd5fff226e87aa9129a0f52c45e9307752e8575c3b0ff756b7 +31fda6942d15ecb6b27ea19370ccc79773f47891e80d22b440d81259c4c28eac +e0ca839524116bcf52d8c566e49a95ddb0e5493437279a770a39fd333f3fca91 +55884fad0ba5aaf273121f893059d37dd417da7dcfd0d6fa7494968f13b2cc95 +65633f2c891340193e5ec00e4ee0b0e90b3b93da362a4906360845771ade1754 +9df79140be5993f3424c012598eadd3e7c7c0b4db2c72cf103d7943a5cf61420 +93370b9702386c3dd4eb0a47f34b579624a46a108b2d13921fa1b367495fe345 +6aa128aa70f8ca80ae13eb301e96c380724ce67c54380bbea2316c1faf4d058e +b4ca2e23442047606b9bc4b3bf65b432cb271bea4eb35dd3eb360d3be8612a87 +a50e96a2264490aeabdc07c6e78e5dbf4fe3388726d0e2a228346bf3c2907d68 +2a6276b22ae883fb30fa611f4e4193e7a08480fcd7db48308bacbd72bf4807aa +11fd394859f97d22982f7fe890b2e2a0f7e7ffb693 +""" +)) + +# +# openssl pkcs8 -topk8 -passin pass:TestTest -inform DER -in key.der +# -outform DER -out keyenc.der -v1 PBE-SHA1-RC2-64 +# hexdump -v -e '32/1 "%02x" "\n"' keyenc.der +# +wrapped_enc_keys.append(( +'skip encryption', # pbeWithSHA1AndRC2-CBC, only decoding is supported +-1, +"", +"", +""" +308201f1301b06092a864886f70d01050b300e04083ee943bdae185008020208 +00048201d0e4614d9371d3ff10ceabc2f6a7a13a0f449f9a714144e46518ea55 +e3e6f0cde24031d01ef1f37ec40081449ef01914faf45983dde0d2bc496712de +8dd15a5527dff4721d9016c13f34fb93e3ce68577e30146266d71b539f854e56 +753a192cf126ed4812734d86f81884374f1100772f78d0646e9946407637c565 +d070acab413c55952f7237437f2e48cae7fa0ff8d370de2bf446dd08049a3663 +d9c813ac197468c02e2b687e7ca994cf7f03f01b6eca87dbfed94502c2094157 +ea39f73fe4e591df1a68b04d19d9adab90bb9898467c1464ad20bf2b8fb9a5ff +d3ec91847d1c67fd768a4b9cfb46572eccc83806601372b6fad0243f58f623b7 +1c5809dea0feb8278fe27e5560eed8448dc93f5612f546e5dd7c5f6404365eb2 +5bf3396814367ae8b15c5c432b57eaed1f882c05c7f6517ee9e42b87b7b8d071 +9d6125d1b52f7b2cca1f6bd5f584334bf90bce1a7d938274cafe27b68e629698 +b16e27ae528db28593af9adcfccbebb3b9e1f2af5cd5531b51968389caa6c091 +e7de1f1b96f0d258e54e540d961a7c0ef51fda45d6da5fddd33e9bbfd3a5f8d7 +d7ab2e971de495cddbc86d38444fee9f0ac097b00adaf7802dabe0cff5b43b45 +4f26b7b547016f89be52676866189911c53e2f2477""" +)) + +# +# openssl pkcs8 -topk8 -passin pass:TestTest -inform DER -in key.der +# -outform DER -out keyenc.der -v1 PBE-MD5-RC2-64 +# hexdump -v -e '32/1 "%02x" "\n"' keyenc.der +# +wrapped_enc_keys.append(( +'skip encryption', # pbeWithMD5AndRC2-CBC, only decoding is supported +-1, +"", +"", +""" +308201f1301b06092a864886f70d010506300e0408f5cd2fee56d9b4b8020208 +00048201d086454942d6166a19d6b108465bd111e7080911f573d54b1369c676 +df28600e84936bfec04f91023ff16499e2e07178c340904f12ffa6886ab66228 +32bf43c2bff5a0ed14e765918cf5fc543ad49566246f7eb3fc044fa5a9c25f40 +8fc8c8296b91658d3bb1067c0aba008c4fefd9e2bcdbbbd63fdc8085482bccf4 +f150cec9a084259ad441a017e5d81a1034ef2484696a7a50863836d0eeda45cd +8cee8ecabfed703f8d9d4bbdf3a767d32a0ccdc38550ee2928d7fe3fa27eda5b +5c7899e75ad55d076d2c2d3c37d6da3d95236081f9671dab9a99afdb1cbc890e +332d1a91105d9a8ce08b6027aa07367bd1daec3059cb51f5d896124da16971e4 +0ca4bcadb06c854bdf39f42dd24174011414e51626d198775eff3449a982df7b +ace874e77e045eb6d7c3faef0750792b29a068a6291f7275df1123fac5789c51 +27ace42836d81633faf9daf38f6787fff0394ea484bbcd465b57d4dbee3cf8df +b77d1db287b3a6264c466805be5a4fe85cfbca180699859280f2dd8e2c2c10b5 +7a7d2ac670c6039d41952fbb0e4f99b560ebe1d020e1b96d02403283819c00cc +529c51f0b0101555e4c58002ba3c6e3c12e3fde1aec94382792e96d9666a2b33 +3dc397b22ecab67ee38a552fec29a1d4ff8719c748""" +)) + +# +# openssl pkcs8 -topk8 -passin pass:TestTest -inform DER -in key.der +# -outform DER -out keyenc.der -v1 PBE-SHA1-DES +# hexdump -v -e '32/1 "%02x" "\n"' keyenc.der +# +wrapped_enc_keys.append(( +'skip encryption', # pbeWithSHA1AndDES-CBC, only decoding is supported +-1, +"", +"", +""" +308201f1301b06092a864886f70d01050a300e04089bacc9cf1e8f734e020208 +00048201d03e502f3ceafe8fd19ab2939576bfdded26d719b2441db1459688f5 +9673218b41ec1f739edf1e460bd927bc28470c87b2d4fc8ea02ba17b47a63c49 +c5c1bee40529dadfd3ef8b4472c730bc136678c78abfb34670ec9d7dcd17ee3f +892f93f2629e6e0f4b24ecb9f954069bf722f466dece3913bb6abbd2c471d9a5 +c5eea89b14aaccda43d30b0dd0f6eb6e9850d9747aa8aa8414c383ad01c374ee +26d3552abec9ba22669cc9622ccf2921e3d0c8ecd1a70e861956de0bec6104b5 +b649ac994970c83f8a9e84b14a7dff7843d4ca3dd4af87cea43b5657e15ae0b5 +a940ce5047f006ab3596506600724764f23757205fe374fee04911336d655acc +03e159ec27789191d1517c4f3f9122f5242d44d25eab8f0658cafb928566ca0e +8f6589aa0c0ab13ca7a618008ae3eafd4671ee8fe0b562e70b3623b0e2a16eee +97fd388087d2e03530c9fe7db6e52eccc7c48fd701ede35e08922861a9508d12 +bc8bbf24f0c6bee6e63dbcb489b603d4c4a78ce45bf2eab1d5d10456c42a65a8 +3a606f4e4b9b46eb13b57f2624b651859d3d2d5192b45dbd5a2ead14ff20ca76 +48f321309aa56d8c0c4a192b580821cc6c70c75e6f19d1c5414da898ec4dd39d +b0eb93d6ba387a80702dfd2db610757ba340f63230 +""" +)) + +# +# openssl pkcs8 -topk8 -passin pass:TestTest -inform DER -in key.der +# -outform DER -out keyenc.der -v2 aes128 +# hexdump -v -e '32/1 "%02x" "\n"' keyenc.der +# +wrapped_enc_keys.append(( +'PBKDF2WithHMAC-SHA1AndAES128-CBC', +2048, +"4F66EE5D3BCD531FE6EBF4B4E73016B8", # IV +"479F25156176C53A", # Salt +""" +3082021f304906092a864886f70d01050d303c301b06092a864886f70d01050c +300e0408479f25156176c53a02020800301d060960864801650304010204104f +66ee5d3bcd531fe6ebf4b4e73016b8048201d0e33cfa560423f589d097d21533 +3b880a5ebac5b2ac58b4e73b0d787aee7764f034fe34ca1d1bd845c0a7c3316f +afbfb2129e03dcaf5a5031394206492828dacef1e04639bee5935e0f46114202 +10bc6c37182f4889be11c5d0486c398f4be952e5740f65de9d8edeb275e2b406 +e19bc29ad5ebb97fa536344fc3d84c7e755696f12b810898de4e6f069b8a81c8 +0aab0d45d7d062303aaa4a10c2ce84fdb5a03114039cfe138e38bb15b2ced717 +93549cdad85e730b14d9e2198b663dfdc8d04a4349eb3de59b076ad40b116d4a +25ed917c576bc7c883c95ef0f1180e28fc9981bea069594c309f1aa1b253ceab +a2f0313bb1372bcb51a745056be93d77a1f235a762a45e8856512d436b2ca0f7 +dd60fbed394ba28978d2a2b984b028529d0a58d93aba46c6bbd4ac1e4013cbaa +63b00988bc5f11ccc40141c346762d2b28f64435d4be98ec17c1884985e3807e +e550db606600993efccf6de0dfc2d2d70b5336a3b018fa415d6bdd59f5777118 +16806b7bc17c4c7e20ad7176ebfa5a1aa3f6bc10f04b77afd443944642ac9cca +d740e082b4a3bbb8bafdd34a0b3c5f2f3c2aceccccdccd092b78994b845bfa61 +706c3b9df5165ed1dbcbf1244fe41fc9bf993f52f7658e2f87e1baaeacb0f562 +9d905c +""" +)) + +# +# openssl pkcs8 -topk8 -passin pass:TestTest -inform DER -in key.der +# -outform DER -out keyenc.der -v2 aes192 +# hexdump -v -e '32/1 "%02x" "\n"' keyenc.der +# +wrapped_enc_keys.append(( +'PBKDF2WithHMAC-SHA1AndAES192-CBC', +2048, +"5CFC2A4FF7B63201A4A8A5B021148186", # IV +"D718541C264944CE", # Salt +""" +3082021f304906092a864886f70d01050d303c301b06092a864886f70d01050c +300e0408d718541c264944ce02020800301d060960864801650304011604105c +fc2a4ff7b63201a4a8a5b021148186048201d08e74aaa21b8bcfb15b9790fe95 +b0e09ddb0f189b6fb1682fdb9f122b804650ddec3c67a1df093a828b3e5fbcc6 +286abbcc5354c482fd796d972e919ca8a5eba1eaa2293af1d648013ddad72106 +75622264dfba55dafdda39e338f058f1bdb9846041ffff803797d3fdf3693135 +8a192729ea8346a7e5e58e925a2e2e4af0818581859e8215d87370eb4194a5ff +bae900857d4c591dbc651a241865a817eaede9987c9f9ae4f95c0bf930eea88c +4d7596e535ffb7ca369988aba75027a96b9d0bc9c8b0b75f359067fd145a378b +02aaa15e9db7a23176224da48a83249005460cc6e429168657f2efa8b1af7537 +d7d7042f2d683e8271b21d591090963eeb57aea6172f88da139e1614d6a7d1a2 +1002d5a7a93d6d21156e2b4777f6fc069287a85a1538c46b7722ccde591ab55c +630e1ceeb1ac42d1b41f3f654e9da86b5efced43775ea68b2594e50e4005e052 +0fe753c0898120c2c07265367ff157f6538a1e4080d6f9d1ca9eb51939c9574e +f2e4e1e87c1434affd5808563cddd376776dbbf790c6a40028f311a8b58dafa2 +0970ed34acd6e3e89d063987893b2b9570ddb8cc032b05a723bba9444933ebf3 +c624204be72f4190e0245197d0cb772bec933fd8442445f9a28bd042d5a3a1e9 +9a8a07 +""" +)) + +# +# openssl pkcs8 -topk8 -passin pass:TestTest -inform DER -in key.der +# -outform DER -out keyenc.der -v2 aes192 +# hexdump -v -e '32/1 "%02x" "\n"' keyenc.der +# +wrapped_enc_keys.append(( +'PBKDF2WithHMAC-SHA1AndAES256-CBC', +2048, +"323351F94462AC563E053A056252C2C4", # IV +"02A6CD0D12E727B5", # Salt +""" +3082021f304906092a864886f70d01050d303c301b06092a864886f70d01050c +300e040802a6cd0d12e727b502020800301d060960864801650304012a041032 +3351f94462ac563e053a056252c2c4048201d07f4ef1c7be21aae738a20c5632 +b8bdbbb9083b6e7f68822267b1f481fd27fdafd61a90660de6e4058790e4c912 +bf3f319a7c37e6eb3d956daaa143865020d554bf6215e8d7492359aaeef45d6e +d85a686ed26c0bf7c18d071d827a86f0b73e1db0c0e7f3d42201544093302a90 +551ad530692468c47ac15c69500b8ca67d4a17b64d15cecc035ae50b768a36cf +07c395afa091e9e6f86f665455fbdc1b21ad79c0908b73da5de75a9b43508d5d +44dc97a870cd3cd9f01ca24452e9b11c1b4982946702cfcbfda5b2fcc0203fb5 +0b52a115760bd635c94d4c95ac2c640ee9a04ffaf6ccff5a8d953dd5d88ca478 +c377811c521f2191639c643d657a9e364af88bb7c14a356c2b0b4870a23c2f54 +d41f8157afff731471dccc6058b15e1151bcf84b39b5e622a3a1d65859c912a5 +591b85e034a1f6af664f030a6bfc8c3d20c70f32b54bcf4da9c2da83cef49cf8 +e9a74f0e5d358fe50b88acdce6a9db9a7ad61536212fc5f877ebfc7957b8bda4 +b1582a0f10d515a20ee06cf768db9c977aa6fbdca7540d611ff953012d009dac +e8abd059f8e8ffea637c9c7721f817aaf0bb23403e26a0ef0ff0e2037da67d41 +af728481f53443551a9bff4cea023164e9622b5441a309e1f4bff98e5bf76677 +8d7cd9 +""" +)) + +# hexdump -v -e '32/1 "%02x" "\n"' botan_scrypt.der +botan_scrypt = """ +3081f1305206092a864886f70d01050d3045302806092b06010401da47040b30 +1b040c316c5c7a847276a838a668280202200002010102010102012030190609 +60864801650304012e040c293e9bcddc0d59d64e060cb604819ab92318063480 +16148081a3123bb092b636ec0cc3b964628e181504c13eaf94987e6fb9f171d4 +9c45baeeb79c1d805d5a762d9bfd6d1995669df60a2cd0174b6d204693964de7 +05bc3fdc3a4ce5db01f30a994c82b0aac786e4f8655138c952f1cf2cc6093f90 +b5e5ca507beb539ff497b7b6370ba7f31f4928d3385dbe8bcd2395813ba1324e +6795e81a8518aff0f0a9e01396539f937b8b7b08 +""" + +# hexdump -v -e '32/1 "%02x" "\n"' botan_pbkdf2.der +botan_pbkdf2 = """ +3081f3305e06092a864886f70d01050d3051303006092a864886f70d01050c30 +23040cc91c89b368db578d2ec4c32002020fa0020118300c06082a864886f70d +02090500301d060960864801650304011604102a7147289e7c914a7d8257e4a1 +a2135b048190a648955fc96ecae56dcb4d0ab19edc5b7ef1219c88c7c3b2d0ed +b21e25d2559447f53e20b90b2f20e72456d943561c4925aad6067a4c720afb3d +691e14dfffa10ef77898e21d134f19136d35088a7aac508b296fd00d5742ad69 +8c693293b6a591e3660b130d718724d23d696f4da9bf4031475fafb682d7955c +996363f37032e10ac85afebb7cc1cbfc0e5d4c60a4c2 +""" + +def txt2bin(inputs): + s = b('').join([b(x) for x in inputs if not (x in '\n\r\t ')]) + return unhexlify(s) + +class Rng: + def __init__(self, output): + self.output=output + self.idx=0 + def __call__(self, n): + output = self.output[self.idx:self.idx+n] + self.idx += n + return output + +class PKCS8_Decrypt(unittest.TestCase): + + def setUp(self): + self.oid_key = oid_key + self.clear_key = txt2bin(clear_key) + self.wrapped_clear_key = txt2bin(wrapped_clear_key) + self.wrapped_enc_keys = [] + for t in wrapped_enc_keys: + self.wrapped_enc_keys.append(( + t[0], + t[1], + txt2bin(t[2]), + txt2bin(t[3]), + txt2bin(t[4]) + )) + + ### NO ENCRYTION + + def test1(self): + """Verify unwrapping w/o encryption""" + res1, res2, res3 = PKCS8.unwrap(self.wrapped_clear_key) + self.assertEqual(res1, self.oid_key) + self.assertEqual(res2, self.clear_key) + + def test2(self): + """Verify wrapping w/o encryption""" + wrapped = PKCS8.wrap(self.clear_key, self.oid_key) + res1, res2, res3 = PKCS8.unwrap(wrapped) + self.assertEqual(res1, self.oid_key) + self.assertEqual(res2, self.clear_key) + + ## ENCRYPTION + + def test3(self): + """Verify unwrapping with encryption""" + + for t in self.wrapped_enc_keys: + res1, res2, res3 = PKCS8.unwrap(t[4], b"TestTest") + self.assertEqual(res1, self.oid_key) + self.assertEqual(res2, self.clear_key) + + def test4(self): + """Verify wrapping with encryption""" + + for t in self.wrapped_enc_keys: + if t[0] == 'skip encryption': + continue + rng = Rng(t[2]+t[3]) + params = { 'iteration_count':t[1] } + wrapped = PKCS8.wrap( + self.clear_key, + self.oid_key, + b("TestTest"), + protection=t[0], + prot_params=params, + key_params=DerNull(), + randfunc=rng) + self.assertEqual(wrapped, t[4]) + + def test_import_botan_keys(self): + botan_scrypt_der = txt2bin(botan_scrypt) + key1 = PKCS8.unwrap(botan_scrypt_der, + b'your_password') + botan_pbkdf2_der = txt2bin(botan_pbkdf2) + key2 = PKCS8.unwrap(botan_pbkdf2_der, + b'your_password') + self.assertEqual(key1, key2) + + +def get_tests(config={}): + from Crypto.SelfTest.st_common import list_test_cases + listTests = [] + listTests += list_test_cases(PKCS8_Decrypt) + return listTests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Math/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Math/__init__.py new file mode 100644 index 000000000..c72d7dc32 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Math/__init__.py @@ -0,0 +1,51 @@ +# +# SelfTest/Math/__init__.py: Self-test for math module +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-test for Math""" + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.Math import test_Numbers + from Crypto.SelfTest.Math import test_Primality + from Crypto.SelfTest.Math import test_modexp + from Crypto.SelfTest.Math import test_modmult + tests += test_Numbers.get_tests(config=config) + tests += test_Primality.get_tests(config=config) + tests += test_modexp.get_tests(config=config) + tests += test_modmult.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Math/test_Numbers.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Math/test_Numbers.py new file mode 100644 index 000000000..7609485a4 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Math/test_Numbers.py @@ -0,0 +1,825 @@ +# +# SelfTest/Math/test_Numbers.py: Self-test for Numbers module +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-test for Math.Numbers""" + +import sys +import unittest + +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Util.py3compat import * + +from Crypto.Math._IntegerNative import IntegerNative + + +class TestIntegerBase(unittest.TestCase): + + def setUp(self): + raise NotImplementedError("To be implemented") + + def Integers(self, *arg): + return map(self.Integer, arg) + + def test_init_and_equality(self): + Integer = self.Integer + + v1 = Integer(23) + v2 = Integer(v1) + v3 = Integer(-9) + self.assertRaises(ValueError, Integer, 1.0) + + v4 = Integer(10**10) + v5 = Integer(-10**10) + + v6 = Integer(0xFFFF) + v7 = Integer(0xFFFFFFFF) + v8 = Integer(0xFFFFFFFFFFFFFFFF) + + self.assertEqual(v1, v1) + self.assertEqual(v1, 23) + self.assertEqual(v1, v2) + self.assertEqual(v3, -9) + self.assertEqual(v4, 10 ** 10) + self.assertEqual(v5, -10 ** 10) + self.assertEqual(v6, 0xFFFF) + self.assertEqual(v7, 0xFFFFFFFF) + self.assertEqual(v8, 0xFFFFFFFFFFFFFFFF) + + self.assertFalse(v1 == v4) + + # Init and comparison between Integer's + v6 = Integer(v1) + self.assertEqual(v1, v6) + + self.assertFalse(Integer(0) == None) + + def test_conversion_to_int(self): + v1, v2 = self.Integers(-23, 2 ** 1000) + self.assertEqual(int(v1), -23) + self.assertEqual(int(v2), 2 ** 1000) + + def test_equality_with_ints(self): + v1, v2, v3 = self.Integers(23, -89, 2 ** 1000) + self.assertTrue(v1 == 23) + self.assertTrue(v2 == -89) + self.assertFalse(v1 == 24) + self.assertTrue(v3 == 2 ** 1000) + + def test_conversion_to_str(self): + v1, v2, v3, v4 = self.Integers(20, 0, -20, 2 ** 1000) + self.assertTrue(str(v1) == "20") + self.assertTrue(str(v2) == "0") + self.assertTrue(str(v3) == "-20") + self.assertTrue(str(v4) == "10715086071862673209484250490600018105614048117055336074437503883703510511249361224931983788156958581275946729175531468251871452856923140435984577574698574803934567774824230985421074605062371141877954182153046474983581941267398767559165543946077062914571196477686542167660429831652624386837205668069376") + + def test_repr(self): + v1, v2 = self.Integers(-1, 2**80) + self.assertEqual(repr(v1), "Integer(-1)") + self.assertEqual(repr(v2), "Integer(1208925819614629174706176)") + + def test_conversion_to_bytes(self): + Integer = self.Integer + + v1 = Integer(0x17) + self.assertEqual(b("\x17"), v1.to_bytes()) + + v2 = Integer(0xFFFE) + self.assertEqual(b("\xFF\xFE"), v2.to_bytes()) + self.assertEqual(b("\x00\xFF\xFE"), v2.to_bytes(3)) + self.assertRaises(ValueError, v2.to_bytes, 1) + + self.assertEqual(b("\xFE\xFF"), v2.to_bytes(byteorder='little')) + self.assertEqual(b("\xFE\xFF\x00"), v2.to_bytes(3, byteorder='little')) + + v3 = Integer(-90) + self.assertRaises(ValueError, v3.to_bytes) + self.assertRaises(ValueError, v3.to_bytes, byteorder='bittle') + + def test_conversion_from_bytes(self): + Integer = self.Integer + + v1 = Integer.from_bytes(b"\x00") + self.assertTrue(isinstance(v1, Integer)) + self.assertEqual(0, v1) + + v2 = Integer.from_bytes(b"\x00\x01") + self.assertEqual(1, v2) + + v3 = Integer.from_bytes(b"\xFF\xFF") + self.assertEqual(0xFFFF, v3) + + v4 = Integer.from_bytes(b"\x00\x01", 'big') + self.assertEqual(1, v4) + + v5 = Integer.from_bytes(b"\x00\x01", byteorder='big') + self.assertEqual(1, v5) + + v6 = Integer.from_bytes(b"\x00\x01", byteorder='little') + self.assertEqual(0x0100, v6) + + self.assertRaises(ValueError, Integer.from_bytes, b'\x09', 'bittle') + + def test_inequality(self): + # Test Integer!=Integer and Integer!=int + v1, v2, v3, v4 = self.Integers(89, 89, 90, -8) + self.assertTrue(v1 != v3) + self.assertTrue(v1 != 90) + self.assertFalse(v1 != v2) + self.assertFalse(v1 != 89) + self.assertTrue(v1 != v4) + self.assertTrue(v4 != v1) + self.assertTrue(self.Integer(0) != None) + + def test_less_than(self): + # Test IntegerInteger and Integer>int + v1, v2, v3, v4, v5 = self.Integers(13, 13, 14, -8, 2 ** 10) + self.assertTrue(v3 > v1) + self.assertTrue(v3 > 13) + self.assertFalse(v1 > v1) + self.assertFalse(v1 > v2) + self.assertFalse(v1 > 13) + self.assertTrue(v1 > v4) + self.assertFalse(v4 > v1) + self.assertTrue(v5 > v1) + self.assertFalse(v1 > v5) + + def test_more_than_or_equal(self): + # Test Integer>=Integer and Integer>=int + v1, v2, v3, v4 = self.Integers(13, 13, 14, -4) + self.assertTrue(v3 >= v1) + self.assertTrue(v3 >= 13) + self.assertTrue(v1 >= v2) + self.assertTrue(v1 >= v1) + self.assertTrue(v1 >= 13) + self.assertFalse(v4 >= v1) + + def test_bool(self): + v1, v2, v3, v4 = self.Integers(0, 10, -9, 2 ** 10) + self.assertFalse(v1) + self.assertFalse(bool(v1)) + self.assertTrue(v2) + self.assertTrue(bool(v2)) + self.assertTrue(v3) + self.assertTrue(v4) + + def test_is_negative(self): + v1, v2, v3, v4, v5 = self.Integers(-3 ** 100, -3, 0, 3, 3**100) + self.assertTrue(v1.is_negative()) + self.assertTrue(v2.is_negative()) + self.assertFalse(v4.is_negative()) + self.assertFalse(v5.is_negative()) + + def test_addition(self): + # Test Integer+Integer and Integer+int + v1, v2, v3 = self.Integers(7, 90, -7) + self.assertTrue(isinstance(v1 + v2, self.Integer)) + self.assertEqual(v1 + v2, 97) + self.assertEqual(v1 + 90, 97) + self.assertEqual(v1 + v3, 0) + self.assertEqual(v1 + (-7), 0) + self.assertEqual(v1 + 2 ** 10, 2 ** 10 + 7) + + def test_subtraction(self): + # Test Integer-Integer and Integer-int + v1, v2, v3 = self.Integers(7, 90, -7) + self.assertTrue(isinstance(v1 - v2, self.Integer)) + self.assertEqual(v2 - v1, 83) + self.assertEqual(v2 - 7, 83) + self.assertEqual(v2 - v3, 97) + self.assertEqual(v1 - (-7), 14) + self.assertEqual(v1 - 2 ** 10, 7 - 2 ** 10) + + def test_multiplication(self): + # Test Integer-Integer and Integer-int + v1, v2, v3, v4 = self.Integers(4, 5, -2, 2 ** 10) + self.assertTrue(isinstance(v1 * v2, self.Integer)) + self.assertEqual(v1 * v2, 20) + self.assertEqual(v1 * 5, 20) + self.assertEqual(v1 * -2, -8) + self.assertEqual(v1 * 2 ** 10, 4 * (2 ** 10)) + + def test_floor_div(self): + v1, v2, v3 = self.Integers(3, 8, 2 ** 80) + self.assertTrue(isinstance(v1 // v2, self.Integer)) + self.assertEqual(v2 // v1, 2) + self.assertEqual(v2 // 3, 2) + self.assertEqual(v2 // -3, -3) + self.assertEqual(v3 // 2 ** 79, 2) + self.assertRaises(ZeroDivisionError, lambda: v1 // 0) + + def test_remainder(self): + # Test Integer%Integer and Integer%int + v1, v2, v3 = self.Integers(23, 5, -4) + self.assertTrue(isinstance(v1 % v2, self.Integer)) + self.assertEqual(v1 % v2, 3) + self.assertEqual(v1 % 5, 3) + self.assertEqual(v3 % 5, 1) + self.assertEqual(v1 % 2 ** 10, 23) + self.assertRaises(ZeroDivisionError, lambda: v1 % 0) + self.assertRaises(ValueError, lambda: v1 % -6) + + def test_simple_exponentiation(self): + v1, v2, v3 = self.Integers(4, 3, -2) + self.assertTrue(isinstance(v1 ** v2, self.Integer)) + self.assertEqual(v1 ** v2, 64) + self.assertEqual(pow(v1, v2), 64) + self.assertEqual(v1 ** 3, 64) + self.assertEqual(pow(v1, 3), 64) + self.assertEqual(v3 ** 2, 4) + self.assertEqual(v3 ** 3, -8) + + self.assertRaises(ValueError, pow, v1, -3) + + def test_modular_exponentiation(self): + v1, v2, v3 = self.Integers(23, 5, 17) + + self.assertTrue(isinstance(pow(v1, v2, v3), self.Integer)) + self.assertEqual(pow(v1, v2, v3), 7) + self.assertEqual(pow(v1, 5, v3), 7) + self.assertEqual(pow(v1, v2, 17), 7) + self.assertEqual(pow(v1, 5, 17), 7) + self.assertEqual(pow(v1, 0, 17), 1) + self.assertEqual(pow(v1, 1, 2 ** 80), 23) + self.assertEqual(pow(v1, 2 ** 80, 89298), 17689) + + self.assertRaises(ZeroDivisionError, pow, v1, 5, 0) + self.assertRaises(ValueError, pow, v1, 5, -4) + self.assertRaises(ValueError, pow, v1, -3, 8) + + def test_inplace_exponentiation(self): + v1 = self.Integer(4) + v1.inplace_pow(2) + self.assertEqual(v1, 16) + + v1 = self.Integer(4) + v1.inplace_pow(2, 15) + self.assertEqual(v1, 1) + + def test_abs(self): + v1, v2, v3, v4, v5 = self.Integers(-2 ** 100, -2, 0, 2, 2 ** 100) + self.assertEqual(abs(v1), 2 ** 100) + self.assertEqual(abs(v2), 2) + self.assertEqual(abs(v3), 0) + self.assertEqual(abs(v4), 2) + self.assertEqual(abs(v5), 2 ** 100) + + def test_sqrt(self): + v1, v2, v3, v4 = self.Integers(-2, 0, 49, 10**100) + + self.assertRaises(ValueError, v1.sqrt) + self.assertEqual(v2.sqrt(), 0) + self.assertEqual(v3.sqrt(), 7) + self.assertEqual(v4.sqrt(), 10**50) + + def test_sqrt_module(self): + + # Invalid modulus (non positive) + self.assertRaises(ValueError, self.Integer(5).sqrt, 0) + self.assertRaises(ValueError, self.Integer(5).sqrt, -1) + + # Simple cases + assert self.Integer(0).sqrt(5) == 0 + assert self.Integer(1).sqrt(5) in (1, 4) + + # Test with all quadratic residues in several fields + for p in (11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53): + for i in range(0, p): + square = i**2 % p + res = self.Integer(square).sqrt(p) + assert res in (i, p - i) + + # 2 is a non-quadratic reside in Z_11 + self.assertRaises(ValueError, self.Integer(2).sqrt, 11) + + # 10 is not a prime + self.assertRaises(ValueError, self.Integer(4).sqrt, 10) + + # 5 is square residue of 4 and 7 + assert self.Integer(5 - 11).sqrt(11) in (4, 7) + assert self.Integer(5 + 11).sqrt(11) in (4, 7) + + def test_in_place_add(self): + v1, v2 = self.Integers(10, 20) + + v1 += v2 + self.assertEqual(v1, 30) + v1 += 10 + self.assertEqual(v1, 40) + v1 += -1 + self.assertEqual(v1, 39) + v1 += 2 ** 1000 + self.assertEqual(v1, 39 + 2 ** 1000) + + def test_in_place_sub(self): + v1, v2 = self.Integers(10, 20) + + v1 -= v2 + self.assertEqual(v1, -10) + v1 -= -100 + self.assertEqual(v1, 90) + v1 -= 90000 + self.assertEqual(v1, -89910) + v1 -= -100000 + self.assertEqual(v1, 10090) + + def test_in_place_mul(self): + v1, v2 = self.Integers(3, 5) + + v1 *= v2 + self.assertEqual(v1, 15) + v1 *= 2 + self.assertEqual(v1, 30) + v1 *= -2 + self.assertEqual(v1, -60) + v1 *= 2 ** 1000 + self.assertEqual(v1, -60 * (2 ** 1000)) + + def test_in_place_modulus(self): + v1, v2 = self.Integers(20, 7) + + v1 %= v2 + self.assertEqual(v1, 6) + v1 %= 2 ** 1000 + self.assertEqual(v1, 6) + v1 %= 2 + self.assertEqual(v1, 0) + def t(): + v3 = self.Integer(9) + v3 %= 0 + self.assertRaises(ZeroDivisionError, t) + + def test_and(self): + v1, v2, v3 = self.Integers(0xF4, 0x31, -0xF) + self.assertTrue(isinstance(v1 & v2, self.Integer)) + self.assertEqual(v1 & v2, 0x30) + self.assertEqual(v1 & 0x31, 0x30) + self.assertEqual(v1 & v3, 0xF0) + self.assertEqual(v1 & -0xF, 0xF0) + self.assertEqual(v3 & -0xF, -0xF) + self.assertEqual(v2 & (2 ** 1000 + 0x31), 0x31) + + def test_or(self): + v1, v2, v3 = self.Integers(0x40, 0x82, -0xF) + self.assertTrue(isinstance(v1 | v2, self.Integer)) + self.assertEqual(v1 | v2, 0xC2) + self.assertEqual(v1 | 0x82, 0xC2) + self.assertEqual(v2 | v3, -0xD) + self.assertEqual(v2 | 2 ** 1000, 2 ** 1000 + 0x82) + + def test_right_shift(self): + v1, v2, v3 = self.Integers(0x10, 1, -0x10) + self.assertEqual(v1 >> 0, v1) + self.assertTrue(isinstance(v1 >> v2, self.Integer)) + self.assertEqual(v1 >> v2, 0x08) + self.assertEqual(v1 >> 1, 0x08) + self.assertRaises(ValueError, lambda: v1 >> -1) + self.assertEqual(v1 >> (2 ** 1000), 0) + + self.assertEqual(v3 >> 1, -0x08) + self.assertEqual(v3 >> (2 ** 1000), -1) + + def test_in_place_right_shift(self): + v1, v2, v3 = self.Integers(0x10, 1, -0x10) + v1 >>= 0 + self.assertEqual(v1, 0x10) + v1 >>= 1 + self.assertEqual(v1, 0x08) + v1 >>= v2 + self.assertEqual(v1, 0x04) + v3 >>= 1 + self.assertEqual(v3, -0x08) + def l(): + v4 = self.Integer(0x90) + v4 >>= -1 + self.assertRaises(ValueError, l) + def m1(): + v4 = self.Integer(0x90) + v4 >>= 2 ** 1000 + return v4 + self.assertEqual(0, m1()) + def m2(): + v4 = self.Integer(-1) + v4 >>= 2 ** 1000 + return v4 + self.assertEqual(-1, m2()) + + def _test_left_shift(self): + v1, v2, v3 = self.Integers(0x10, 1, -0x10) + self.assertEqual(v1 << 0, v1) + self.assertTrue(isinstance(v1 << v2, self.Integer)) + self.assertEqual(v1 << v2, 0x20) + self.assertEqual(v1 << 1, 0x20) + self.assertEqual(v3 << 1, -0x20) + self.assertRaises(ValueError, lambda: v1 << -1) + self.assertRaises(ValueError, lambda: v1 << (2 ** 1000)) + + def test_in_place_left_shift(self): + v1, v2, v3 = self.Integers(0x10, 1, -0x10) + v1 <<= 0 + self.assertEqual(v1, 0x10) + v1 <<= 1 + self.assertEqual(v1, 0x20) + v1 <<= v2 + self.assertEqual(v1, 0x40) + v3 <<= 1 + self.assertEqual(v3, -0x20) + def l(): + v4 = self.Integer(0x90) + v4 <<= -1 + self.assertRaises(ValueError, l) + def m(): + v4 = self.Integer(0x90) + v4 <<= 2 ** 1000 + self.assertRaises(ValueError, m) + + + def test_get_bit(self): + v1, v2, v3 = self.Integers(0x102, -3, 1) + self.assertEqual(v1.get_bit(0), 0) + self.assertEqual(v1.get_bit(1), 1) + self.assertEqual(v1.get_bit(v3), 1) + self.assertEqual(v1.get_bit(8), 1) + self.assertEqual(v1.get_bit(9), 0) + + self.assertRaises(ValueError, v1.get_bit, -1) + self.assertEqual(v1.get_bit(2 ** 1000), 0) + + self.assertRaises(ValueError, v2.get_bit, -1) + self.assertRaises(ValueError, v2.get_bit, 0) + self.assertRaises(ValueError, v2.get_bit, 1) + self.assertRaises(ValueError, v2.get_bit, 2 * 1000) + + def test_odd_even(self): + v1, v2, v3, v4, v5 = self.Integers(0, 4, 17, -4, -17) + + self.assertTrue(v1.is_even()) + self.assertTrue(v2.is_even()) + self.assertFalse(v3.is_even()) + self.assertTrue(v4.is_even()) + self.assertFalse(v5.is_even()) + + self.assertFalse(v1.is_odd()) + self.assertFalse(v2.is_odd()) + self.assertTrue(v3.is_odd()) + self.assertFalse(v4.is_odd()) + self.assertTrue(v5.is_odd()) + + def test_size_in_bits(self): + v1, v2, v3, v4 = self.Integers(0, 1, 0x100, -90) + self.assertEqual(v1.size_in_bits(), 1) + self.assertEqual(v2.size_in_bits(), 1) + self.assertEqual(v3.size_in_bits(), 9) + self.assertRaises(ValueError, v4.size_in_bits) + + def test_size_in_bytes(self): + v1, v2, v3, v4, v5, v6 = self.Integers(0, 1, 0xFF, 0x1FF, 0x10000, -9) + self.assertEqual(v1.size_in_bytes(), 1) + self.assertEqual(v2.size_in_bytes(), 1) + self.assertEqual(v3.size_in_bytes(), 1) + self.assertEqual(v4.size_in_bytes(), 2) + self.assertEqual(v5.size_in_bytes(), 3) + self.assertRaises(ValueError, v6.size_in_bits) + + def test_perfect_square(self): + + self.assertFalse(self.Integer(-9).is_perfect_square()) + self.assertTrue(self.Integer(0).is_perfect_square()) + self.assertTrue(self.Integer(1).is_perfect_square()) + self.assertFalse(self.Integer(2).is_perfect_square()) + self.assertFalse(self.Integer(3).is_perfect_square()) + self.assertTrue(self.Integer(4).is_perfect_square()) + self.assertTrue(self.Integer(39*39).is_perfect_square()) + self.assertFalse(self.Integer(39*39+1).is_perfect_square()) + + for x in range(100, 1000): + self.assertFalse(self.Integer(x**2+1).is_perfect_square()) + self.assertTrue(self.Integer(x**2).is_perfect_square()) + + def test_fail_if_divisible_by(self): + v1, v2, v3 = self.Integers(12, -12, 4) + + # No failure expected + v1.fail_if_divisible_by(7) + v2.fail_if_divisible_by(7) + v2.fail_if_divisible_by(2 ** 80) + + # Failure expected + self.assertRaises(ValueError, v1.fail_if_divisible_by, 4) + self.assertRaises(ValueError, v1.fail_if_divisible_by, v3) + + def test_multiply_accumulate(self): + v1, v2, v3 = self.Integers(4, 3, 2) + v1.multiply_accumulate(v2, v3) + self.assertEqual(v1, 10) + v1.multiply_accumulate(v2, 2) + self.assertEqual(v1, 16) + v1.multiply_accumulate(3, v3) + self.assertEqual(v1, 22) + v1.multiply_accumulate(1, -2) + self.assertEqual(v1, 20) + v1.multiply_accumulate(-2, 1) + self.assertEqual(v1, 18) + v1.multiply_accumulate(1, 2 ** 1000) + self.assertEqual(v1, 18 + 2 ** 1000) + v1.multiply_accumulate(2 ** 1000, 1) + self.assertEqual(v1, 18 + 2 ** 1001) + + def test_set(self): + v1, v2 = self.Integers(3, 6) + v1.set(v2) + self.assertEqual(v1, 6) + v1.set(9) + self.assertEqual(v1, 9) + v1.set(-2) + self.assertEqual(v1, -2) + v1.set(2 ** 1000) + self.assertEqual(v1, 2 ** 1000) + + def test_inverse(self): + v1, v2, v3, v4, v5, v6 = self.Integers(2, 5, -3, 0, 723872, 3433) + + self.assertTrue(isinstance(v1.inverse(v2), self.Integer)) + self.assertEqual(v1.inverse(v2), 3) + self.assertEqual(v1.inverse(5), 3) + self.assertEqual(v3.inverse(5), 3) + self.assertEqual(v5.inverse(92929921), 58610507) + self.assertEqual(v6.inverse(9912), 5353) + + self.assertRaises(ValueError, v2.inverse, 10) + self.assertRaises(ValueError, v1.inverse, -3) + self.assertRaises(ValueError, v4.inverse, 10) + self.assertRaises(ZeroDivisionError, v2.inverse, 0) + + def test_inplace_inverse(self): + v1, v2 = self.Integers(2, 5) + + v1.inplace_inverse(v2) + self.assertEqual(v1, 3) + + def test_gcd(self): + v1, v2, v3, v4 = self.Integers(6, 10, 17, -2) + self.assertTrue(isinstance(v1.gcd(v2), self.Integer)) + self.assertEqual(v1.gcd(v2), 2) + self.assertEqual(v1.gcd(10), 2) + self.assertEqual(v1.gcd(v3), 1) + self.assertEqual(v1.gcd(-2), 2) + self.assertEqual(v4.gcd(6), 2) + + def test_lcm(self): + v1, v2, v3, v4, v5 = self.Integers(6, 10, 17, -2, 0) + self.assertTrue(isinstance(v1.lcm(v2), self.Integer)) + self.assertEqual(v1.lcm(v2), 30) + self.assertEqual(v1.lcm(10), 30) + self.assertEqual(v1.lcm(v3), 102) + self.assertEqual(v1.lcm(-2), 6) + self.assertEqual(v4.lcm(6), 6) + self.assertEqual(v1.lcm(0), 0) + self.assertEqual(v5.lcm(0), 0) + + def test_jacobi_symbol(self): + + data = ( + (1001, 1, 1), + (19, 45, 1), + (8, 21, -1), + (5, 21, 1), + (610, 987, -1), + (1001, 9907, -1), + (5, 3439601197, -1) + ) + + js = self.Integer.jacobi_symbol + + # Jacobi symbol is always 1 for k==1 or n==1 + for k in range(1, 30): + self.assertEqual(js(k, 1), 1) + for n in range(1, 30, 2): + self.assertEqual(js(1, n), 1) + + # Fail if n is not positive odd + self.assertRaises(ValueError, js, 6, -2) + self.assertRaises(ValueError, js, 6, -1) + self.assertRaises(ValueError, js, 6, 0) + self.assertRaises(ValueError, js, 0, 0) + self.assertRaises(ValueError, js, 6, 2) + self.assertRaises(ValueError, js, 6, 4) + self.assertRaises(ValueError, js, 6, 6) + self.assertRaises(ValueError, js, 6, 8) + + for tv in data: + self.assertEqual(js(tv[0], tv[1]), tv[2]) + self.assertEqual(js(self.Integer(tv[0]), tv[1]), tv[2]) + self.assertEqual(js(tv[0], self.Integer(tv[1])), tv[2]) + + def test_jacobi_symbol_wikipedia(self): + + # Test vectors from https://en.wikipedia.org/wiki/Jacobi_symbol + tv = [ + (3, [(1, 1), (2, -1), (3, 0), (4, 1), (5, -1), (6, 0), (7, 1), (8, -1), (9, 0), (10, 1), (11, -1), (12, 0), (13, 1), (14, -1), (15, 0), (16, 1), (17, -1), (18, 0), (19, 1), (20, -1), (21, 0), (22, 1), (23, -1), (24, 0), (25, 1), (26, -1), (27, 0), (28, 1), (29, -1), (30, 0)]), + (5, [(1, 1), (2, -1), (3, -1), (4, 1), (5, 0), (6, 1), (7, -1), (8, -1), (9, 1), (10, 0), (11, 1), (12, -1), (13, -1), (14, 1), (15, 0), (16, 1), (17, -1), (18, -1), (19, 1), (20, 0), (21, 1), (22, -1), (23, -1), (24, 1), (25, 0), (26, 1), (27, -1), (28, -1), (29, 1), (30, 0)]), + (7, [(1, 1), (2, 1), (3, -1), (4, 1), (5, -1), (6, -1), (7, 0), (8, 1), (9, 1), (10, -1), (11, 1), (12, -1), (13, -1), (14, 0), (15, 1), (16, 1), (17, -1), (18, 1), (19, -1), (20, -1), (21, 0), (22, 1), (23, 1), (24, -1), (25, 1), (26, -1), (27, -1), (28, 0), (29, 1), (30, 1)]), + (9, [(1, 1), (2, 1), (3, 0), (4, 1), (5, 1), (6, 0), (7, 1), (8, 1), (9, 0), (10, 1), (11, 1), (12, 0), (13, 1), (14, 1), (15, 0), (16, 1), (17, 1), (18, 0), (19, 1), (20, 1), (21, 0), (22, 1), (23, 1), (24, 0), (25, 1), (26, 1), (27, 0), (28, 1), (29, 1), (30, 0)]), + (11, [(1, 1), (2, -1), (3, 1), (4, 1), (5, 1), (6, -1), (7, -1), (8, -1), (9, 1), (10, -1), (11, 0), (12, 1), (13, -1), (14, 1), (15, 1), (16, 1), (17, -1), (18, -1), (19, -1), (20, 1), (21, -1), (22, 0), (23, 1), (24, -1), (25, 1), (26, 1), (27, 1), (28, -1), (29, -1), (30, -1)]), + (13, [(1, 1), (2, -1), (3, 1), (4, 1), (5, -1), (6, -1), (7, -1), (8, -1), (9, 1), (10, 1), (11, -1), (12, 1), (13, 0), (14, 1), (15, -1), (16, 1), (17, 1), (18, -1), (19, -1), (20, -1), (21, -1), (22, 1), (23, 1), (24, -1), (25, 1), (26, 0), (27, 1), (28, -1), (29, 1), (30, 1)]), + (15, [(1, 1), (2, 1), (3, 0), (4, 1), (5, 0), (6, 0), (7, -1), (8, 1), (9, 0), (10, 0), (11, -1), (12, 0), (13, -1), (14, -1), (15, 0), (16, 1), (17, 1), (18, 0), (19, 1), (20, 0), (21, 0), (22, -1), (23, 1), (24, 0), (25, 0), (26, -1), (27, 0), (28, -1), (29, -1), (30, 0)]), + (17, [(1, 1), (2, 1), (3, -1), (4, 1), (5, -1), (6, -1), (7, -1), (8, 1), (9, 1), (10, -1), (11, -1), (12, -1), (13, 1), (14, -1), (15, 1), (16, 1), (17, 0), (18, 1), (19, 1), (20, -1), (21, 1), (22, -1), (23, -1), (24, -1), (25, 1), (26, 1), (27, -1), (28, -1), (29, -1), (30, 1)]), + (19, [(1, 1), (2, -1), (3, -1), (4, 1), (5, 1), (6, 1), (7, 1), (8, -1), (9, 1), (10, -1), (11, 1), (12, -1), (13, -1), (14, -1), (15, -1), (16, 1), (17, 1), (18, -1), (19, 0), (20, 1), (21, -1), (22, -1), (23, 1), (24, 1), (25, 1), (26, 1), (27, -1), (28, 1), (29, -1), (30, 1)]), + (21, [(1, 1), (2, -1), (3, 0), (4, 1), (5, 1), (6, 0), (7, 0), (8, -1), (9, 0), (10, -1), (11, -1), (12, 0), (13, -1), (14, 0), (15, 0), (16, 1), (17, 1), (18, 0), (19, -1), (20, 1), (21, 0), (22, 1), (23, -1), (24, 0), (25, 1), (26, 1), (27, 0), (28, 0), (29, -1), (30, 0)]), + (23, [(1, 1), (2, 1), (3, 1), (4, 1), (5, -1), (6, 1), (7, -1), (8, 1), (9, 1), (10, -1), (11, -1), (12, 1), (13, 1), (14, -1), (15, -1), (16, 1), (17, -1), (18, 1), (19, -1), (20, -1), (21, -1), (22, -1), (23, 0), (24, 1), (25, 1), (26, 1), (27, 1), (28, -1), (29, 1), (30, -1)]), + (25, [(1, 1), (2, 1), (3, 1), (4, 1), (5, 0), (6, 1), (7, 1), (8, 1), (9, 1), (10, 0), (11, 1), (12, 1), (13, 1), (14, 1), (15, 0), (16, 1), (17, 1), (18, 1), (19, 1), (20, 0), (21, 1), (22, 1), (23, 1), (24, 1), (25, 0), (26, 1), (27, 1), (28, 1), (29, 1), (30, 0)]), + (27, [(1, 1), (2, -1), (3, 0), (4, 1), (5, -1), (6, 0), (7, 1), (8, -1), (9, 0), (10, 1), (11, -1), (12, 0), (13, 1), (14, -1), (15, 0), (16, 1), (17, -1), (18, 0), (19, 1), (20, -1), (21, 0), (22, 1), (23, -1), (24, 0), (25, 1), (26, -1), (27, 0), (28, 1), (29, -1), (30, 0)]), + (29, [(1, 1), (2, -1), (3, -1), (4, 1), (5, 1), (6, 1), (7, 1), (8, -1), (9, 1), (10, -1), (11, -1), (12, -1), (13, 1), (14, -1), (15, -1), (16, 1), (17, -1), (18, -1), (19, -1), (20, 1), (21, -1), (22, 1), (23, 1), (24, 1), (25, 1), (26, -1), (27, -1), (28, 1), (29, 0), (30, 1)]), + ] + + js = self.Integer.jacobi_symbol + + for n, kj in tv: + for k, j in kj: + self.assertEqual(js(k, n), j) + + def test_hex(self): + v1, = self.Integers(0x10) + self.assertEqual(hex(v1), "0x10") + + def test_mult_modulo_bytes(self): + modmult = self.Integer._mult_modulo_bytes + + res = modmult(4, 5, 19) + self.assertEqual(res, b'\x01') + + res = modmult(4 - 19, 5, 19) + self.assertEqual(res, b'\x01') + + res = modmult(4, 5 - 19, 19) + self.assertEqual(res, b'\x01') + + res = modmult(4 + 19, 5, 19) + self.assertEqual(res, b'\x01') + + res = modmult(4, 5 + 19, 19) + self.assertEqual(res, b'\x01') + + modulus = 2**512 - 1 # 64 bytes + t1 = 13**100 + t2 = 17**100 + expect = b"\xfa\xb2\x11\x87\xc3(y\x07\xf8\xf1n\xdepq\x0b\xca\xf3\xd3B,\xef\xf2\xfbf\xcc)\x8dZ*\x95\x98r\x96\xa8\xd5\xc3}\xe2q:\xa2'z\xf48\xde%\xef\t\x07\xbc\xc4[C\x8bUE2\x90\xef\x81\xaa:\x08" + self.assertEqual(expect, modmult(t1, t2, modulus)) + + self.assertRaises(ZeroDivisionError, modmult, 4, 5, 0) + self.assertRaises(ValueError, modmult, 4, 5, -1) + self.assertRaises(ValueError, modmult, 4, 5, 4) + + +class TestIntegerInt(TestIntegerBase): + + def setUp(self): + self.Integer = IntegerNative + + +class testIntegerRandom(unittest.TestCase): + + def test_random_exact_bits(self): + + for _ in range(1000): + a = IntegerNative.random(exact_bits=8) + self.assertFalse(a < 128) + self.assertFalse(a >= 256) + + for bits_value in range(1024, 1024 + 8): + a = IntegerNative.random(exact_bits=bits_value) + self.assertFalse(a < 2**(bits_value - 1)) + self.assertFalse(a >= 2**bits_value) + + def test_random_max_bits(self): + + flag = False + for _ in range(1000): + a = IntegerNative.random(max_bits=8) + flag = flag or a < 128 + self.assertFalse(a>=256) + self.assertTrue(flag) + + for bits_value in range(1024, 1024 + 8): + a = IntegerNative.random(max_bits=bits_value) + self.assertFalse(a >= 2**bits_value) + + def test_random_bits_custom_rng(self): + + class CustomRNG(object): + def __init__(self): + self.counter = 0 + + def __call__(self, size): + self.counter += size + return bchr(0) * size + + custom_rng = CustomRNG() + a = IntegerNative.random(exact_bits=32, randfunc=custom_rng) + self.assertEqual(custom_rng.counter, 4) + + def test_random_range(self): + + func = IntegerNative.random_range + + for x in range(200): + a = func(min_inclusive=1, max_inclusive=15) + self.assertTrue(1 <= a <= 15) + + for x in range(200): + a = func(min_inclusive=1, max_exclusive=15) + self.assertTrue(1 <= a < 15) + + self.assertRaises(ValueError, func, min_inclusive=1, max_inclusive=2, + max_exclusive=3) + self.assertRaises(ValueError, func, max_inclusive=2, max_exclusive=3) + +def get_tests(config={}): + tests = [] + tests += list_test_cases(TestIntegerInt) + + try: + from Crypto.Math._IntegerGMP import IntegerGMP + + class TestIntegerGMP(TestIntegerBase): + def setUp(self): + self.Integer = IntegerGMP + + tests += list_test_cases(TestIntegerGMP) + except (ImportError, OSError) as e: + if sys.platform == "win32": + sys.stdout.write("Skipping GMP tests on Windows\n") + else: + sys.stdout.write("Skipping GMP tests (%s)\n" % str(e) ) + + try: + from Crypto.Math._IntegerCustom import IntegerCustom + + class TestIntegerCustomModexp(TestIntegerBase): + def setUp(self): + self.Integer = IntegerCustom + + tests += list_test_cases(TestIntegerCustomModexp) + except (ImportError, OSError) as e: + sys.stdout.write("Skipping custom modexp tests (%s)\n" % str(e) ) + + tests += list_test_cases(testIntegerRandom) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Math/test_Primality.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Math/test_Primality.py new file mode 100644 index 000000000..38344f35b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Math/test_Primality.py @@ -0,0 +1,118 @@ +# +# SelfTest/Math/test_Primality.py: Self-test for Primality module +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-test for Math.Numbers""" + +import unittest + +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Util.py3compat import * + +from Crypto.Math.Numbers import Integer +from Crypto.Math.Primality import ( + PROBABLY_PRIME, COMPOSITE, + miller_rabin_test, lucas_test, + test_probable_prime, + generate_probable_prime, + generate_probable_safe_prime, + ) + + +class TestPrimality(unittest.TestCase): + + primes = (1, 2, 3, 5, 7, 11, 13, 17, 19, 23, 2**127-1, 175637383534939453397801320455508570374088202376942372758907369518414308188137781042871856139027160010343454418881888953150175357127346872102307696660678617989191485418582475696230580407111841072614783095326672517315988762029036079794994990250662362650625650262324085116467511357592728695033227611029693067539) + composites = (0, 4, 6, 8, 9, 10, 12, 14, 15, 16, 18, 20, 21, 7*23, (2**19-1)*(2**67-1), 9746347772161,) + + def test_miller_rabin(self): + for prime in self.primes: + self.assertEqual(miller_rabin_test(prime, 3), PROBABLY_PRIME) + for composite in self.composites: + self.assertEqual(miller_rabin_test(composite, 3), COMPOSITE) + self.assertRaises(ValueError, miller_rabin_test, -1, 3) + + def test_lucas(self): + for prime in self.primes: + res = lucas_test(prime) + self.assertEqual(res, PROBABLY_PRIME) + for composite in self.composites: + res = lucas_test(composite) + self.assertEqual(res, COMPOSITE) + self.assertRaises(ValueError, lucas_test, -1) + + def test_is_prime(self): + primes = (170141183460469231731687303715884105727, + 19175002942688032928599, + 1363005552434666078217421284621279933627102780881053358473, + 2 ** 521 - 1) + for p in primes: + self.assertEqual(test_probable_prime(p), PROBABLY_PRIME) + + not_primes = ( + 4754868377601046732119933839981363081972014948522510826417784001, + 1334733877147062382486934807105197899496002201113849920496510541601, + 260849323075371835669784094383812120359260783810157225730623388382401, + ) + for np in not_primes: + self.assertEqual(test_probable_prime(np), COMPOSITE) + + from Crypto.Util.number import sieve_base + for p in sieve_base[:100]: + res = test_probable_prime(p) + self.assertEqual(res, PROBABLY_PRIME) + + def test_generate_prime_bit_size(self): + p = generate_probable_prime(exact_bits=512) + self.assertEqual(p.size_in_bits(), 512) + + def test_generate_prime_filter(self): + def ending_with_one(number): + return number % 10 == 1 + + for x in range(20): + q = generate_probable_prime(exact_bits=160, + prime_filter=ending_with_one) + self.assertEqual(q % 10, 1) + + def test_generate_safe_prime(self): + p = generate_probable_safe_prime(exact_bits=161) + self.assertEqual(p.size_in_bits(), 161) + +def get_tests(config={}): + tests = [] + tests += list_test_cases(TestPrimality) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Math/test_modexp.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Math/test_modexp.py new file mode 100644 index 000000000..b9eb86982 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Math/test_modexp.py @@ -0,0 +1,201 @@ +# +# SelfTest/Math/test_modexp.py: Self-test for module exponentiation +# +# =================================================================== +# +# Copyright (c) 2017, Helder Eijs +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-test for the custom module exponentiation""" + +import unittest + +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Util.number import long_to_bytes, bytes_to_long + +from Crypto.Util.py3compat import * + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, + create_string_buffer, + get_raw_buffer, + c_size_t, + c_ulonglong) + +from Crypto.Hash import SHAKE128 +from Crypto.Math.Numbers import Integer +from Crypto.Math._IntegerCustom import _raw_montgomery + +from Crypto.Random.random import StrongRandom + + +def create_rng(tag): + rng = StrongRandom(SHAKE128.new(data=tag)) + return rng + +class ExceptionModulus(ValueError): + pass + +def monty_pow(base, exp, modulus): + max_len = len(long_to_bytes(max(base, exp, modulus))) + + base_b, exp_b, modulus_b = [ long_to_bytes(x, max_len) for x in + (base, exp, modulus) ] + + out = create_string_buffer(max_len) + error = _raw_montgomery.monty_pow( + out, + base_b, + exp_b, + modulus_b, + c_size_t(max_len), + c_ulonglong(32) + ) + + if error == 17: + raise ExceptionModulus() + if error: + raise ValueError("monty_pow failed with error: %d" % error) + + result = bytes_to_long(get_raw_buffer(out)) + return result + +exponent1 = 0x2ce0af628901460a419a08ef950d498b9fd6f271a1a52ac293b86fe5c60efe8e8ba93fa1ebe1eb3d614d2e7b328cb60a2591440e163441a190ecf101ceec245f600fffdcf3f5b3a17a7baeacb96a424db1d7ec985e8ec998bb479fecfffed6a75f9a90fc97062fd973303bce855ad7b8d8272a94025e8532be9aabd54a183f303538d2a7e621b4131d59e823a4625f39bd7d518d7784f7c3a8f19061da74974ff42fa1c063dec2db97d461e291a7d6e721708a5229de166c1246363372854e27f3f08ae274bc16bfd205b028a4d81386494433d516dfbb35f495acba5e4e1d1843cb3c3129b6642a85fc7244ce5845fac071c7f622e4ee12ac43fabeeaa0cd01 +modulus1 = 0xd66691b20071be4d66d4b71032b37fa007cfabf579fcb91e50bfc2753b3f0ce7be74e216aef7e26d4ae180bc20d7bd3ea88a6cbf6f87380e613c8979b5b043b200a8ff8856a3b12875e36e98a7569f3852d028e967551000b02c19e9fa52e83115b89309aabb1e1cf1e2cb6369d637d46775ce4523ea31f64ad2794cbc365dd8a35e007ed3b57695877fbf102dbeb8b3212491398e494314e93726926e1383f8abb5889bea954eb8c0ca1c62c8e9d83f41888095c5e645ed6d32515fe0c58c1368cad84694e18da43668c6f43e61d7c9bca633ddcda7aef5b79bc396d4a9f48e2a9abe0836cc455e435305357228e93d25aaed46b952defae0f57339bf26f5a9 + + +class TestModExp(unittest.TestCase): + + def test_small(self): + self.assertEqual(1, monty_pow(11,12,19)) + + def test_large_1(self): + base = 0xfffffffffffffffffffffffffffffffffffffffffffffffffff + expected = pow(base, exponent1, modulus1) + result = monty_pow(base, exponent1, modulus1) + self.assertEqual(result, expected) + + def test_zero_exp(self): + base = 0xfffffffffffffffffffffffffffffffffffffffffffffffffff + result = monty_pow(base, 0, modulus1) + self.assertEqual(result, 1) + + def test_zero_base(self): + result = monty_pow(0, exponent1, modulus1) + self.assertEqual(result, 0) + + def test_zero_modulus(self): + base = 0xfffffffffffffffffffffffffffffffffffffffffffffffff + self.assertRaises(ExceptionModulus, monty_pow, base, exponent1, 0) + self.assertRaises(ExceptionModulus, monty_pow, 0, 0, 0) + + def test_larger_exponent(self): + base = modulus1 - 0xFFFFFFF + expected = pow(base, modulus1<<64, modulus1) + result = monty_pow(base, modulus1<<64, modulus1) + self.assertEqual(result, expected) + + def test_even_modulus(self): + base = modulus1 >> 4 + self.assertRaises(ExceptionModulus, monty_pow, base, exponent1, modulus1-1) + + def test_several_lengths(self): + prng = SHAKE128.new().update(b('Test')) + for length in range(1, 100): + modulus2 = Integer.from_bytes(prng.read(length)) | 1 + base = Integer.from_bytes(prng.read(length)) % modulus2 + exponent2 = Integer.from_bytes(prng.read(length)) + + expected = pow(base, exponent2, modulus2) + result = monty_pow(base, exponent2, modulus2) + self.assertEqual(result, expected) + + def test_variable_exponent(self): + prng = create_rng(b('Test variable exponent')) + for i in range(20): + for j in range(7): + modulus = prng.getrandbits(8*30) | 1 + base = prng.getrandbits(8*30) % modulus + exponent = prng.getrandbits(i*8+j) + + expected = pow(base, exponent, modulus) + result = monty_pow(base, exponent, modulus) + self.assertEqual(result, expected) + + exponent ^= (1 << (i*8+j)) - 1 + + expected = pow(base, exponent, modulus) + result = monty_pow(base, exponent, modulus) + self.assertEqual(result, expected) + + def test_stress_63(self): + prng = create_rng(b('Test 63')) + length = 63 + for _ in range(2000): + modulus = prng.getrandbits(8*length) | 1 + base = prng.getrandbits(8*length) % modulus + exponent = prng.getrandbits(8*length) + + expected = pow(base, exponent, modulus) + result = monty_pow(base, exponent, modulus) + self.assertEqual(result, expected) + + def test_stress_64(self): + prng = create_rng(b('Test 64')) + length = 64 + for _ in range(2000): + modulus = prng.getrandbits(8*length) | 1 + base = prng.getrandbits(8*length) % modulus + exponent = prng.getrandbits(8*length) + + expected = pow(base, exponent, modulus) + result = monty_pow(base, exponent, modulus) + self.assertEqual(result, expected) + + def test_stress_65(self): + prng = create_rng(b('Test 65')) + length = 65 + for _ in range(2000): + modulus = prng.getrandbits(8*length) | 1 + base = prng.getrandbits(8*length) % modulus + exponent = prng.getrandbits(8*length) + + expected = pow(base, exponent, modulus) + result = monty_pow(base, exponent, modulus) + self.assertEqual(result, expected) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(TestModExp) + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Math/test_modmult.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Math/test_modmult.py new file mode 100644 index 000000000..66aa3cd18 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Math/test_modmult.py @@ -0,0 +1,120 @@ +# +# SelfTest/Math/test_modmult.py: Self-test for custom modular multiplication +# +# =================================================================== +# +# Copyright (c) 2023, Helder Eijs +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-test for the custom modular multiplication""" + +import unittest + +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Util.number import long_to_bytes, bytes_to_long + +from Crypto.Util._raw_api import (create_string_buffer, + get_raw_buffer, + c_size_t) + +from Crypto.Math._IntegerCustom import _raw_montgomery + + +class ExceptionModulus(ValueError): + pass + + +def monty_mult(term1, term2, modulus): + + if term1 >= modulus: + term1 %= modulus + if term2 >= modulus: + term2 %= modulus + + modulus_b = long_to_bytes(modulus) + numbers_len = len(modulus_b) + term1_b = long_to_bytes(term1, numbers_len) + term2_b = long_to_bytes(term2, numbers_len) + + out = create_string_buffer(numbers_len) + error = _raw_montgomery.monty_multiply( + out, + term1_b, + term2_b, + modulus_b, + c_size_t(numbers_len) + ) + + if error == 17: + raise ExceptionModulus() + if error: + raise ValueError("monty_multiply() failed with error: %d" % error) + + return get_raw_buffer(out) + + +modulus1 = 0xd66691b20071be4d66d4b71032b37fa007cfabf579fcb91e50bfc2753b3f0ce7be74e216aef7e26d4ae180bc20d7bd3ea88a6cbf6f87380e613c8979b5b043b200a8ff8856a3b12875e36e98a7569f3852d028e967551000b02c19e9fa52e83115b89309aabb1e1cf1e2cb6369d637d46775ce4523ea31f64ad2794cbc365dd8a35e007ed3b57695877fbf102dbeb8b3212491398e494314e93726926e1383f8abb5889bea954eb8c0ca1c62c8e9d83f41888095c5e645ed6d32515fe0c58c1368cad84694e18da43668c6f43e61d7c9bca633ddcda7aef5b79bc396d4a9f48e2a9abe0836cc455e435305357228e93d25aaed46b952defae0f57339bf26f5a9 + + +class TestModMultiply(unittest.TestCase): + + def test_small(self): + self.assertEqual(b"\x01", monty_mult(5, 6, 29)) + + def test_large(self): + numbers_len = (modulus1.bit_length() + 7) // 8 + + t1 = modulus1 // 2 + t2 = modulus1 - 90 + expect = b'\x00' * (numbers_len - 1) + b'\x2d' + self.assertEqual(expect, monty_mult(t1, t2, modulus1)) + + def test_zero_term(self): + numbers_len = (modulus1.bit_length() + 7) // 8 + expect = b'\x00' * numbers_len + self.assertEqual(expect, monty_mult(0x100, 0, modulus1)) + self.assertEqual(expect, monty_mult(0, 0x100, modulus1)) + + def test_larger_term(self): + t1 = 2**2047 + expect_int = 0x8edf4071f78e3d7ba622cdbbbef74612e301d69186776ae6bf87ff38c320d9aebaa64889c2f67de2324e6bccd2b10ad89e91fd21ba4bb523904d033eff5e70e62f01a84f41fa90a4f248ef249b82e1d2729253fdfc2a3b5b740198123df8bfbf7057d03e15244ad5f26eb9a099763b5c5972121ec076b0bf899f59bd95f7cc129abddccf24217bce52ca0f3a44c9ccc504765dbb89734205f3ae6a8cc560494a60ea84b27d8e00fa24bdd5b4f1d4232edb61e47d3d984c1fa50a3820a2e580fbc3fc8bc11e99df53b9efadf5a40ac75d384e400905aa6f1d88950cd53b1c54dc2222115ad84a27260fa4d978155c1434c551de1ee7361a17a2f79d4388f78a5d + res = bytes_to_long(monty_mult(t1, t1, modulus1)) + self.assertEqual(res, expect_int) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(TestModMultiply) + return tests + + +if __name__ == '__main__': + def suite(): + return unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Protocol/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Protocol/__init__.py new file mode 100644 index 000000000..7cb24413a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Protocol/__init__.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Protocol/__init__.py: Self-tests for Crypto.Protocol +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for Crypto.Protocol""" + +__revision__ = "$Id$" + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.Protocol import test_rfc1751; tests += test_rfc1751.get_tests(config=config) + from Crypto.SelfTest.Protocol import test_KDF; tests += test_KDF.get_tests(config=config) + from Crypto.SelfTest.Protocol import test_ecdh; tests += test_ecdh.get_tests(config=config) + + from Crypto.SelfTest.Protocol import test_SecretSharing; + tests += test_SecretSharing.get_tests(config=config) + + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Protocol/test_KDF.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Protocol/test_KDF.py new file mode 100644 index 000000000..55dd19e2c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Protocol/test_KDF.py @@ -0,0 +1,807 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Protocol/test_KDF.py: Self-test for key derivation functions +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +import re +import unittest +from binascii import unhexlify + +from Crypto.Util.py3compat import b, bchr + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors, load_test_vectors_wycheproof +from Crypto.Hash import SHA1, HMAC, SHA256, MD5, SHA224, SHA384, SHA512 +from Crypto.Cipher import AES, DES3 + +from Crypto.Protocol.KDF import (PBKDF1, PBKDF2, _S2V, HKDF, scrypt, + bcrypt, bcrypt_check, + SP800_108_Counter) + +from Crypto.Protocol.KDF import _bcrypt_decode + + +def t2b(t): + if t is None: + return None + t2 = t.replace(" ", "").replace("\n", "") + return unhexlify(b(t2)) + + +class TestVector(object): + pass + + +class PBKDF1_Tests(unittest.TestCase): + + # List of tuples with test data. + # Each tuple is made up by: + # Item #0: a pass phrase + # Item #1: salt (8 bytes encoded in hex) + # Item #2: output key length + # Item #3: iterations to use + # Item #4: expected result (encoded in hex) + _testData = ( + # From http://www.di-mgt.com.au/cryptoKDFs.html#examplespbkdf + ("password", "78578E5A5D63CB06", 16, 1000, "DC19847E05C64D2FAF10EBFB4A3D2A20"), + ) + + def test1(self): + v = self._testData[0] + res = PBKDF1(v[0], t2b(v[1]), v[2], v[3], SHA1) + self.assertEqual(res, t2b(v[4])) + + +class PBKDF2_Tests(unittest.TestCase): + + # List of tuples with test data. + # Each tuple is made up by: + # Item #0: a pass phrase + # Item #1: salt (encoded in hex) + # Item #2: output key length + # Item #3: iterations to use + # Item #4: hash module + # Item #5: expected result (encoded in hex) + _testData = ( + # From http://www.di-mgt.com.au/cryptoKDFs.html#examplespbkdf + ("password","78578E5A5D63CB06",24,2048, SHA1, "BFDE6BE94DF7E11DD409BCE20A0255EC327CB936FFE93643"), + # From RFC 6050 + ("password","73616c74", 20, 1, SHA1, "0c60c80f961f0e71f3a9b524af6012062fe037a6"), + ("password","73616c74", 20, 2, SHA1, "ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957"), + ("password","73616c74", 20, 4096, SHA1, "4b007901b765489abead49d926f721d065a429c1"), + ("passwordPASSWORDpassword","73616c7453414c5473616c7453414c5473616c7453414c5473616c7453414c5473616c74", + 25, 4096, SHA1, "3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038"), + ( 'pass\x00word',"7361006c74",16,4096, SHA1, "56fa6aa75548099dcc37d7f03425e0c3"), + # From draft-josefsson-scrypt-kdf-01, Chapter 10 + ( 'passwd', '73616c74', 64, 1, SHA256, "55ac046e56e3089fec1691c22544b605f94185216dde0465e68b9d57c20dacbc49ca9cccf179b645991664b39d77ef317c71b845b1e30bd509112041d3a19783"), + ( 'Password', '4e61436c', 64, 80000, SHA256, "4ddcd8f60b98be21830cee5ef22701f9641a4418d04c0414aeff08876b34ab56a1d425a1225833549adb841b51c9b3176a272bdebba1d078478f62b397f33c8d"), + ) + + def test1(self): + # Test only for HMAC-SHA1 as PRF + + def prf_SHA1(p,s): + return HMAC.new(p,s,SHA1).digest() + + def prf_SHA256(p,s): + return HMAC.new(p,s,SHA256).digest() + + for i in range(len(self._testData)): + v = self._testData[i] + password = v[0] + salt = t2b(v[1]) + out_len = v[2] + iters = v[3] + hash_mod = v[4] + expected = t2b(v[5]) + + if hash_mod is SHA1: + res = PBKDF2(password, salt, out_len, iters) + self.assertEqual(res, expected) + + res = PBKDF2(password, salt, out_len, iters, prf_SHA1) + self.assertEqual(res, expected) + else: + res = PBKDF2(password, salt, out_len, iters, prf_SHA256) + self.assertEqual(res, expected) + + def test2(self): + # Verify that prf and hmac_hash_module are mutual exclusive + def prf_SHA1(p,s): + return HMAC.new(p,s,SHA1).digest() + + self.assertRaises(ValueError, PBKDF2, b("xxx"), b("yyy"), 16, 100, + prf=prf_SHA1, hmac_hash_module=SHA1) + + def test3(self): + # Verify that hmac_hash_module works like prf + + password = b("xxx") + salt = b("yyy") + + for hashmod in (MD5, SHA1, SHA224, SHA256, SHA384, SHA512): + + pr1 = PBKDF2(password, salt, 16, 100, + prf=lambda p, s: HMAC.new(p,s,hashmod).digest()) + pr2 = PBKDF2(password, salt, 16, 100, hmac_hash_module=hashmod) + + self.assertEqual(pr1, pr2) + + def test4(self): + # Verify that PBKDF2 can take bytes or strings as password or salt + k1 = PBKDF2("xxx", b("yyy"), 16, 10) + k2 = PBKDF2(b("xxx"), b("yyy"), 16, 10) + self.assertEqual(k1, k2) + + k1 = PBKDF2(b("xxx"), "yyy", 16, 10) + k2 = PBKDF2(b("xxx"), b("yyy"), 16, 10) + self.assertEqual(k1, k2) + + +class S2V_Tests(unittest.TestCase): + + # Sequence of test vectors. + # Each test vector is made up by: + # Item #0: a tuple of strings + # Item #1: an AES key + # Item #2: the result + # Item #3: the cipher module S2V is based on + # Everything is hex encoded + _testData = [ + + # RFC5297, A.1 + ( + ( '101112131415161718191a1b1c1d1e1f2021222324252627', + '112233445566778899aabbccddee' ), + 'fffefdfcfbfaf9f8f7f6f5f4f3f2f1f0', + '85632d07c6e8f37f950acd320a2ecc93', + AES + ), + + # RFC5297, A.2 + ( + ( '00112233445566778899aabbccddeeffdeaddadadeaddadaffeeddcc'+ + 'bbaa99887766554433221100', + '102030405060708090a0', + '09f911029d74e35bd84156c5635688c0', + '7468697320697320736f6d6520706c61'+ + '696e7465787420746f20656e63727970'+ + '74207573696e67205349562d414553'), + '7f7e7d7c7b7a79787776757473727170', + '7bdb6e3b432667eb06f4d14bff2fbd0f', + AES + ), + + ] + + def test1(self): + """Verify correctness of test vector""" + for tv in self._testData: + s2v = _S2V.new(t2b(tv[1]), tv[3]) + for s in tv[0]: + s2v.update(t2b(s)) + result = s2v.derive() + self.assertEqual(result, t2b(tv[2])) + + def test2(self): + """Verify that no more than 127(AES) and 63(TDES) + components are accepted.""" + key = bchr(0) * 8 + bchr(255) * 8 + for module in (AES, DES3): + s2v = _S2V.new(key, module) + max_comps = module.block_size*8-1 + for i in range(max_comps): + s2v.update(b("XX")) + self.assertRaises(TypeError, s2v.update, b("YY")) + + +class HKDF_Tests(unittest.TestCase): + + # Test vectors from RFC5869, Appendix A + # Each tuple is made up by: + # Item #0: hash module + # Item #1: secret + # Item #2: salt + # Item #3: context + # Item #4: expected result + _test_vector = ( + ( + SHA256, + "0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b", + "000102030405060708090a0b0c", + "f0f1f2f3f4f5f6f7f8f9", + 42, + "3cb25f25faacd57a90434f64d0362f2a" + + "2d2d0a90cf1a5a4c5db02d56ecc4c5bf" + + "34007208d5b887185865" + ), + ( + SHA256, + "000102030405060708090a0b0c0d0e0f" + + "101112131415161718191a1b1c1d1e1f" + + "202122232425262728292a2b2c2d2e2f" + + "303132333435363738393a3b3c3d3e3f" + + "404142434445464748494a4b4c4d4e4f", + "606162636465666768696a6b6c6d6e6f" + + "707172737475767778797a7b7c7d7e7f" + + "808182838485868788898a8b8c8d8e8f" + + "909192939495969798999a9b9c9d9e9f" + + "a0a1a2a3a4a5a6a7a8a9aaabacadaeaf", + "b0b1b2b3b4b5b6b7b8b9babbbcbdbebf" + + "c0c1c2c3c4c5c6c7c8c9cacbcccdcecf" + + "d0d1d2d3d4d5d6d7d8d9dadbdcdddedf" + + "e0e1e2e3e4e5e6e7e8e9eaebecedeeef" + + "f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff", + 82, + "b11e398dc80327a1c8e7f78c596a4934" + + "4f012eda2d4efad8a050cc4c19afa97c" + + "59045a99cac7827271cb41c65e590e09" + + "da3275600c2f09b8367793a9aca3db71" + + "cc30c58179ec3e87c14c01d5c1f3434f" + + "1d87" + ), + ( + SHA256, + "0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b", + None, + None, + 42, + "8da4e775a563c18f715f802a063c5a31" + + "b8a11f5c5ee1879ec3454e5f3c738d2d" + + "9d201395faa4b61a96c8" + ), + ( + SHA1, + "0b0b0b0b0b0b0b0b0b0b0b", + "000102030405060708090a0b0c", + "f0f1f2f3f4f5f6f7f8f9", + 42, + "085a01ea1b10f36933068b56efa5ad81" + + "a4f14b822f5b091568a9cdd4f155fda2" + + "c22e422478d305f3f896" + ), + ( + SHA1, + "000102030405060708090a0b0c0d0e0f" + + "101112131415161718191a1b1c1d1e1f" + + "202122232425262728292a2b2c2d2e2f" + + "303132333435363738393a3b3c3d3e3f" + + "404142434445464748494a4b4c4d4e4f", + "606162636465666768696a6b6c6d6e6f" + + "707172737475767778797a7b7c7d7e7f" + + "808182838485868788898a8b8c8d8e8f" + + "909192939495969798999a9b9c9d9e9f" + + "a0a1a2a3a4a5a6a7a8a9aaabacadaeaf", + "b0b1b2b3b4b5b6b7b8b9babbbcbdbebf" + + "c0c1c2c3c4c5c6c7c8c9cacbcccdcecf" + + "d0d1d2d3d4d5d6d7d8d9dadbdcdddedf" + + "e0e1e2e3e4e5e6e7e8e9eaebecedeeef" + + "f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff", + 82, + "0bd770a74d1160f7c9f12cd5912a06eb" + + "ff6adcae899d92191fe4305673ba2ffe" + + "8fa3f1a4e5ad79f3f334b3b202b2173c" + + "486ea37ce3d397ed034c7f9dfeb15c5e" + + "927336d0441f4c4300e2cff0d0900b52" + + "d3b4" + ), + ( + SHA1, + "0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b", + "", + "", + 42, + "0ac1af7002b3d761d1e55298da9d0506" + + "b9ae52057220a306e07b6b87e8df21d0" + + "ea00033de03984d34918" + ), + ( + SHA1, + "0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c", + None, + "", + 42, + "2c91117204d745f3500d636a62f64f0a" + + "b3bae548aa53d423b0d1f27ebba6f5e5" + + "673a081d70cce7acfc48" + ) + ) + + def test1(self): + for tv in self._test_vector: + secret, salt, info, exp = [ t2b(tv[x]) for x in (1,2,3,5) ] + key_len, hashmod = [ tv[x] for x in (4,0) ] + + output = HKDF(secret, key_len, salt, hashmod, 1, info) + self.assertEqual(output, exp) + + def test2(self): + ref = HKDF(b("XXXXXX"), 12, b("YYYY"), SHA1) + + # Same output, but this time split over 2 keys + key1, key2 = HKDF(b("XXXXXX"), 6, b("YYYY"), SHA1, 2) + self.assertEqual((ref[:6], ref[6:]), (key1, key2)) + + # Same output, but this time split over 3 keys + key1, key2, key3 = HKDF(b("XXXXXX"), 4, b("YYYY"), SHA1, 3) + self.assertEqual((ref[:4], ref[4:8], ref[8:]), (key1, key2, key3)) + + +class scrypt_Tests(unittest.TestCase): + + # Test vectors taken from + # https://tools.ietf.org/html/rfc7914 + # - password + # - salt + # - N + # - r + # - p + data = ( + ( + "", + "", + 16, # 2K + 1, + 1, + """ + 77 d6 57 62 38 65 7b 20 3b 19 ca 42 c1 8a 04 97 + f1 6b 48 44 e3 07 4a e8 df df fa 3f ed e2 14 42 + fc d0 06 9d ed 09 48 f8 32 6a 75 3a 0f c8 1f 17 + e8 d3 e0 fb 2e 0d 36 28 cf 35 e2 0c 38 d1 89 06 + """ + ), + ( + "password", + "NaCl", + 1024, # 1M + 8, + 16, + """ + fd ba be 1c 9d 34 72 00 78 56 e7 19 0d 01 e9 fe + 7c 6a d7 cb c8 23 78 30 e7 73 76 63 4b 37 31 62 + 2e af 30 d9 2e 22 a3 88 6f f1 09 27 9d 98 30 da + c7 27 af b9 4a 83 ee 6d 83 60 cb df a2 cc 06 40 + """ + ), + ( + "pleaseletmein", + "SodiumChloride", + 16384, # 16M + 8, + 1, + """ + 70 23 bd cb 3a fd 73 48 46 1c 06 cd 81 fd 38 eb + fd a8 fb ba 90 4f 8e 3e a9 b5 43 f6 54 5d a1 f2 + d5 43 29 55 61 3f 0f cf 62 d4 97 05 24 2a 9a f9 + e6 1e 85 dc 0d 65 1e 40 df cf 01 7b 45 57 58 87 + """ + ), + ( + "pleaseletmein", + "SodiumChloride", + 1048576, # 1G + 8, + 1, + """ + 21 01 cb 9b 6a 51 1a ae ad db be 09 cf 70 f8 81 + ec 56 8d 57 4a 2f fd 4d ab e5 ee 98 20 ad aa 47 + 8e 56 fd 8f 4b a5 d0 9f fa 1c 6d 92 7c 40 f4 c3 + 37 30 40 49 e8 a9 52 fb cb f4 5c 6f a7 7a 41 a4 + """ + ), + ) + + def setUp(self): + new_test_vectors = [] + for tv in self.data: + new_tv = TestVector() + new_tv.P = b(tv[0]) + new_tv.S = b(tv[1]) + new_tv.N = tv[2] + new_tv.r = tv[3] + new_tv.p = tv[4] + new_tv.output = t2b(tv[5]) + new_tv.dkLen = len(new_tv.output) + new_test_vectors.append(new_tv) + self.data = new_test_vectors + + def test2(self): + + for tv in self.data: + try: + output = scrypt(tv.P, tv.S, tv.dkLen, tv.N, tv.r, tv.p) + except ValueError as e: + if " 2 " in str(e) and tv.N >= 1048576: + import warnings + warnings.warn("Not enough memory to unit test scrypt() with N=1048576", RuntimeWarning) + continue + else: + raise e + self.assertEqual(output, tv.output) + + def test3(self): + ref = scrypt(b("password"), b("salt"), 12, 16, 1, 1) + + # Same output, but this time split over 2 keys + key1, key2 = scrypt(b("password"), b("salt"), 6, 16, 1, 1, 2) + self.assertEqual((ref[:6], ref[6:]), (key1, key2)) + + # Same output, but this time split over 3 keys + key1, key2, key3 = scrypt(b("password"), b("salt"), 4, 16, 1, 1, 3) + self.assertEqual((ref[:4], ref[4:8], ref[8:]), (key1, key2, key3)) + + +class bcrypt_Tests(unittest.TestCase): + + def test_negative_cases(self): + self.assertRaises(ValueError, bcrypt, b"1" * 73, 10) + self.assertRaises(ValueError, bcrypt, b"1" * 10, 3) + self.assertRaises(ValueError, bcrypt, b"1" * 10, 32) + self.assertRaises(ValueError, bcrypt, b"1" * 10, 4, salt=b"") + self.assertRaises(ValueError, bcrypt, b"1" * 10, 4, salt=b"1") + self.assertRaises(ValueError, bcrypt, b"1" * 10, 4, salt=b"1" * 17) + self.assertRaises(ValueError, bcrypt, b"1\x00" * 10, 4) + + def test_bytearray_mismatch(self): + ref = bcrypt("pwd", 4) + bcrypt_check("pwd", ref) + bref = bytearray(ref) + bcrypt_check("pwd", bref) + + wrong = ref[:-1] + bchr(bref[-1] ^ 0x01) + self.assertRaises(ValueError, bcrypt_check, "pwd", wrong) + + wrong = b"x" + ref[1:] + self.assertRaises(ValueError, bcrypt_check, "pwd", wrong) + + # https://github.com/patrickfav/bcrypt/wiki/Published-Test-Vectors + + def test_empty_password(self): + # password, cost, salt, bcrypt hash + tvs = [ + (b"", 4, b"zVHmKQtGGQob.b/Nc7l9NO", b"$2a$04$zVHmKQtGGQob.b/Nc7l9NO8UlrYcW05FiuCj/SxsFO/ZtiN9.mNzy"), + (b"", 5, b"zVHmKQtGGQob.b/Nc7l9NO", b"$2a$05$zVHmKQtGGQob.b/Nc7l9NOWES.1hkVBgy5IWImh9DOjKNU8atY4Iy"), + (b"", 6, b"zVHmKQtGGQob.b/Nc7l9NO", b"$2a$06$zVHmKQtGGQob.b/Nc7l9NOjOl7l4oz3WSh5fJ6414Uw8IXRAUoiaO"), + (b"", 7, b"zVHmKQtGGQob.b/Nc7l9NO", b"$2a$07$zVHmKQtGGQob.b/Nc7l9NOBsj1dQpBA1HYNGpIETIByoNX9jc.hOi"), + (b"", 8, b"zVHmKQtGGQob.b/Nc7l9NO", b"$2a$08$zVHmKQtGGQob.b/Nc7l9NOiLTUh/9MDpX86/DLyEzyiFjqjBFePgO"), + ] + + for (idx, (password, cost, salt64, result)) in enumerate(tvs): + x = bcrypt(password, cost, salt=_bcrypt_decode(salt64)) + self.assertEqual(x, result) + bcrypt_check(password, result) + + def test_random_password_and_salt_short_pw(self): + # password, cost, salt, bcrypt hash + tvs = [ + (b"<.S.2K(Zq'", 4, b"VYAclAMpaXY/oqAo9yUpku", b"$2a$04$VYAclAMpaXY/oqAo9yUpkuWmoYywaPzyhu56HxXpVltnBIfmO9tgu"), + (b"5.rApO%5jA", 5, b"kVNDrnYKvbNr5AIcxNzeIu", b"$2a$05$kVNDrnYKvbNr5AIcxNzeIuRcyIF5cZk6UrwHGxENbxP5dVv.WQM/G"), + (b"oW++kSrQW^", 6, b"QLKkRMH9Am6irtPeSKN5sO", b"$2a$06$QLKkRMH9Am6irtPeSKN5sObJGr3j47cO6Pdf5JZ0AsJXuze0IbsNm"), + (b"ggJ\\KbTnDG", 7, b"4H896R09bzjhapgCPS/LYu", b"$2a$07$4H896R09bzjhapgCPS/LYuMzAQluVgR5iu/ALF8L8Aln6lzzYXwbq"), + (b"49b0:;VkH/", 8, b"hfvO2retKrSrx5f2RXikWe", b"$2a$08$hfvO2retKrSrx5f2RXikWeFWdtSesPlbj08t/uXxCeZoHRWDz/xFe"), + (b">9N^5jc##'", 9, b"XZLvl7rMB3EvM0c1.JHivu", b"$2a$09$XZLvl7rMB3EvM0c1.JHivuIDPJWeNJPTVrpjZIEVRYYB/mF6cYgJK"), + (b"\\$ch)s4WXp", 10, b"aIjpMOLK5qiS9zjhcHR5TO", b"$2a$10$aIjpMOLK5qiS9zjhcHR5TOU7v2NFDmcsBmSFDt5EHOgp/jeTF3O/q"), + (b"RYoj\\_>2P7", 12, b"esIAHiQAJNNBrsr5V13l7.", b"$2a$12$esIAHiQAJNNBrsr5V13l7.RFWWJI2BZFtQlkFyiWXjou05GyuREZa"), + ] + + for (idx, (password, cost, salt64, result)) in enumerate(tvs): + x = bcrypt(password, cost, salt=_bcrypt_decode(salt64)) + self.assertEqual(x, result) + bcrypt_check(password, result) + + def test_random_password_and_salt_long_pw(self): + # password, cost, salt, bcrypt hash + tvs = [ + (b"^Q&\"]A`%/A(BVGt>QaX0M-#1ghq_+\":Y0CRmY", 5, b"YuQvhokOGVnevctykUYpKu", b"$2a$05$YuQvhokOGVnevctykUYpKutZD2pWeGGYn3auyLOasguMY3/0BbIyq"), + (b"F%uN/j>[GuB7-jB'_Yj!Tnb7Y!u^6)", 6, b"5L3vpQ0tG9O7k5gQ8nAHAe", b"$2a$06$5L3vpQ0tG9O7k5gQ8nAHAe9xxQiOcOLh8LGcI0PLWhIznsDt.S.C6"), + (b"Z>BobP32ub\"Cfe*Q<-q-=tRSjOBh8\\mLNW.", 9, b"nArqOfdCsD9kIbVnAixnwe", b"$2a$09$nArqOfdCsD9kIbVnAixnwe6s8QvyPYWtQBpEXKir2OJF9/oNBsEFe"), + (b"/MH51`!BP&0tj3%YCA;Xk%e3S`o\\EI", 10, b"ePiAc.s.yoBi3B6p1iQUCe", b"$2a$10$ePiAc.s.yoBi3B6p1iQUCezn3mraLwpVJ5XGelVyYFKyp5FZn/y.u"), + (b"ptAP\"mcg6oH.\";c0U2_oll.OKi5?Ui\"^ai#iQH7ZFtNMfs3AROnIncE9\"BNNoEgO[[*Yk8;RQ(#S,;I+aT", + 5, b"wgkOlGNXIVE2fWkT3gyRoO", b"$2a$05$wgkOlGNXIVE2fWkT3gyRoOqWi4gbi1Wv2Q2Jx3xVs3apl1w.Wtj8C"), + (b"M.E1=dt<.L0Q&p;94NfGm_Oo23+Kpl@M5?WIAL.[@/:'S)W96G8N^AWb7_smmC]>7#fGoB", + 6, b"W9zTCl35nEvUukhhFzkKMe", b"$2a$06$W9zTCl35nEvUukhhFzkKMekjT9/pj7M0lihRVEZrX3m8/SBNZRX7i"), + ] + + for (idx, (password, cost, salt64, result)) in enumerate(tvs): + x = bcrypt(password, cost, salt=_bcrypt_decode(salt64)) + self.assertEqual(x, result) + bcrypt_check(password, result) + + def test_increasing_password_length(self): + # password, cost, salt, bcrypt hash + tvs = [ + (b"a", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.l4WvgHIVg17ZawDIrDM2IjlE64GDNQS"), + (b"aa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.AyUxBk.ThHlsLvRTH7IqcG7yVHJ3SXq"), + (b"aaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.BxOVac5xPB6XFdRc/ZrzM9FgZkqmvbW"), + (b"aaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.Qbr209bpCtfl5hN7UQlG/L4xiD3AKau"), + (b"aaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.oWszihPjDZI0ypReKsaDOW1jBl7oOii"), + (b"aaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ./k.Xxn9YiqtV/sxh3EHbnOHd0Qsq27K"), + (b"aaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.PYJqRFQbgRbIjMd5VNKmdKS4sBVOyDe"), + (b"aaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ..VMYfzaw1wP/SGxowpLeGf13fxCCt.q"), + (b"aaaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.5B0p054nO5WgAD1n04XslDY/bqY9RJi"), + (b"aaaaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.INBTgqm7sdlBJDg.J5mLMSRK25ri04y"), + (b"aaaaaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.s3y7CdFD0OR5p6rsZw/eZ.Dla40KLfm"), + (b"aaaaaaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.Jx742Djra6Q7PqJWnTAS.85c28g.Siq"), + (b"aaaaaaaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.oKMXW3EZcPHcUV0ib5vDBnh9HojXnLu"), + (b"aaaaaaaaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.w6nIjWpDPNSH5pZUvLjC1q25ONEQpeS"), + (b"aaaaaaaaaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.k1b2/r9A/hxdwKEKurg6OCn4MwMdiGq"), + (b"aaaaaaaaaaaaaaaa", 4, b"5DCebwootqWMCp59ISrMJ.", b"$2a$04$5DCebwootqWMCp59ISrMJ.3prCNHVX1Ws.7Hm2bJxFUnQOX9f7DFa"), + ] + + for (idx, (password, cost, salt64, result)) in enumerate(tvs): + x = bcrypt(password, cost, salt=_bcrypt_decode(salt64)) + self.assertEqual(x, result) + bcrypt_check(password, result) + + def test_non_ascii_characters(self): + # password, cost, salt, bcrypt hash + tvs = [ + ("àèìòùÀÈÌÒÙáéíóúýÁÉÍÓÚÝðÐ", 4, b"D3qS2aoTVyqM7z8v8crLm.", b"$2a$04$D3qS2aoTVyqM7z8v8crLm.3nKt4CzBZJbyFB.ZebmfCvRw7BGs.Xm"), + ("àèìòùÀÈÌÒÙáéíóúýÁÉÍÓÚÝðÐ", 5, b"VA1FujiOCMPkUHQ8kF7IaO", b"$2a$05$VA1FujiOCMPkUHQ8kF7IaOg7NGaNvpxwWzSluQutxEVmbZItRTsAa"), + ("àèìòùÀÈÌÒÙáéíóúýÁÉÍÓÚÝðÐ", 6, b"TXiaNrPeBSz5ugiQlehRt.", b"$2a$06$TXiaNrPeBSz5ugiQlehRt.gwpeDQnXWteQL4z2FulouBr6G7D9KUi"), + ("âêîôûÂÊÎÔÛãñõÃÑÕäëïöüÿ", 4, b"YTn1Qlvps8e1odqMn6G5x.", b"$2a$04$YTn1Qlvps8e1odqMn6G5x.85pqKql6w773EZJAExk7/BatYAI4tyO"), + ("âêîôûÂÊÎÔÛãñõÃÑÕäëïöüÿ", 5, b"C.8k5vJKD2NtfrRI9o17DO", b"$2a$05$C.8k5vJKD2NtfrRI9o17DOfIW0XnwItA529vJnh2jzYTb1QdoY0py"), + ("âêîôûÂÊÎÔÛãñõÃÑÕäëïöüÿ", 6, b"xqfRPj3RYAgwurrhcA6uRO", b"$2a$06$xqfRPj3RYAgwurrhcA6uROtGlXDp/U6/gkoDYHwlubtcVcNft5.vW"), + ("ÄËÏÖÜŸåÅæÆœŒßçÇøØ¢¿¡€", 4, b"y8vGgMmr9EdyxP9rmMKjH.", b"$2a$04$y8vGgMmr9EdyxP9rmMKjH.wv2y3r7yRD79gykQtmb3N3zrwjKsyay"), + ("ÄËÏÖÜŸåÅæÆœŒßçÇøØ¢¿¡€", 5, b"iYH4XIKAOOm/xPQs7xKP1u", b"$2a$05$iYH4XIKAOOm/xPQs7xKP1upD0cWyMn3Jf0ZWiizXbEkVpS41K1dcO"), + ("ÄËÏÖÜŸåÅæÆœŒßçÇøØ¢¿¡€", 6, b"wCOob.D0VV8twafNDB2ape", b"$2a$06$wCOob.D0VV8twafNDB2apegiGD5nqF6Y1e6K95q6Y.R8C4QGd265q"), + ("ΔημοσιεύθηκεστηνΕφημερίδατης", 4, b"E5SQtS6P4568MDXW7cyUp.", b"$2a$04$E5SQtS6P4568MDXW7cyUp.18wfDisKZBxifnPZjAI1d/KTYMfHPYO"), + ("АБбВвГгДдЕеЁёЖжЗзИиЙйКкЛлМмН", 4, b"03e26gQFHhQwRNf81/ww9.", b"$2a$04$03e26gQFHhQwRNf81/ww9.p1UbrNwxpzWjLuT.zpTLH4t/w5WhAhC"), + ("нОоПпРрСсТтУуФфХхЦцЧчШшЩщЪъЫыЬьЭэЮю", 4, b"PHNoJwpXCfe32nUtLv2Upu", b"$2a$04$PHNoJwpXCfe32nUtLv2UpuhJXOzd4k7IdFwnEpYwfJVCZ/f/.8Pje"), + ("電电電島岛島兔兔兎龜龟亀國国国區区区", 4, b"wU4/0i1TmNl2u.1jIwBX.u", b"$2a$04$wU4/0i1TmNl2u.1jIwBX.uZUaOL3Rc5ID7nlQRloQh6q5wwhV/zLW"), + ("诶比伊艾弗豆贝尔维吾艾尺开艾丝维贼德", 4, b"P4kreGLhCd26d4WIy7DJXu", b"$2a$04$P4kreGLhCd26d4WIy7DJXusPkhxLvBouzV6OXkL5EB0jux0osjsry"), + ] + + for (idx, (password, cost, salt64, result)) in enumerate(tvs): + x = bcrypt(password, cost, salt=_bcrypt_decode(salt64)) + self.assertEqual(x, result) + bcrypt_check(password, result) + + def test_special_case_salt(self): + # password, cost, salt, bcrypt hash + tvs = [ + ("-O_=*N!2JP", 4, b"......................", b"$2a$04$......................JjuKLOX9OOwo5PceZZXSkaLDvdmgb82"), + ("7B[$Q<4b>U", 5, b"......................", b"$2a$05$......................DRiedDQZRL3xq5A5FL8y7/6NM8a2Y5W"), + (">d5-I_8^.h", 6, b"......................", b"$2a$06$......................5Mq1Ng8jgDY.uHNU4h5p/x6BedzNH2W"), + (")V`/UM/]1t", 4, b".OC/.OC/.OC/.OC/.OC/.O", b"$2a$04$.OC/.OC/.OC/.OC/.OC/.OQIvKRDAam.Hm5/IaV/.hc7P8gwwIbmi"), + (":@t2.bWuH]", 5, b".OC/.OC/.OC/.OC/.OC/.O", b"$2a$05$.OC/.OC/.OC/.OC/.OC/.ONDbUvdOchUiKmQORX6BlkPofa/QxW9e"), + ("b(#KljF5s\"", 6, b".OC/.OC/.OC/.OC/.OC/.O", b"$2a$06$.OC/.OC/.OC/.OC/.OC/.OHfTd9e7svOu34vi1PCvOcAEq07ST7.K"), + ("@3YaJ^Xs]*", 4, b"eGA.eGA.eGA.eGA.eGA.e.", b"$2a$04$eGA.eGA.eGA.eGA.eGA.e.stcmvh.R70m.0jbfSFVxlONdj1iws0C"), + ("'\"5\\!k*C(p", 5, b"eGA.eGA.eGA.eGA.eGA.e.", b"$2a$05$eGA.eGA.eGA.eGA.eGA.e.vR37mVSbfdHwu.F0sNMvgn8oruQRghy"), + ("edEu7C?$'W", 6, b"eGA.eGA.eGA.eGA.eGA.e.", b"$2a$06$eGA.eGA.eGA.eGA.eGA.e.tSq0FN8MWHQXJXNFnHTPQKtA.n2a..G"), + ("N7dHmg\\PI^", 4, b"999999999999999999999u", b"$2a$04$999999999999999999999uCZfA/pLrlyngNDMq89r1uUk.bQ9icOu"), + ("\"eJuHh!)7*", 5, b"999999999999999999999u", b"$2a$05$999999999999999999999uj8Pfx.ufrJFAoWFLjapYBS5vVEQQ/hK"), + ("ZeDRJ:_tu:", 6, b"999999999999999999999u", b"$2a$06$999999999999999999999u6RB0P9UmbdbQgjoQFEJsrvrKe.BoU6q"), + ] + + for (idx, (password, cost, salt64, result)) in enumerate(tvs): + x = bcrypt(password, cost, salt=_bcrypt_decode(salt64)) + self.assertEqual(x, result) + bcrypt_check(password, result) + + +class TestVectorsHKDFWycheproof(unittest.TestCase): + + def __init__(self, wycheproof_warnings): + unittest.TestCase.__init__(self) + self._wycheproof_warnings = wycheproof_warnings + self._id = "None" + + def add_tests(self, filename): + + def filter_algo(root): + algo_name = root['algorithm'] + if algo_name == "HKDF-SHA-1": + return SHA1 + elif algo_name == "HKDF-SHA-256": + return SHA256 + elif algo_name == "HKDF-SHA-384": + return SHA384 + elif algo_name == "HKDF-SHA-512": + return SHA512 + else: + raise ValueError("Unknown algorithm " + algo_name) + + def filter_size(unit): + return int(unit['size']) + + result = load_test_vectors_wycheproof(("Protocol", "wycheproof"), + filename, + "Wycheproof HMAC (%s)" % filename, + root_tag={'hash_module': filter_algo}, + unit_tag={'size': filter_size}) + return result + + def setUp(self): + self.tv = [] + self.add_tests("hkdf_sha1_test.json") + self.add_tests("hkdf_sha256_test.json") + self.add_tests("hkdf_sha384_test.json") + self.add_tests("hkdf_sha512_test.json") + + def shortDescription(self): + return self._id + + def warn(self, tv): + if tv.warning and self._wycheproof_warnings: + import warnings + warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) + + def test_verify(self, tv): + self._id = "Wycheproof HKDF Test #%d (%s, %s)" % (tv.id, tv.comment, tv.filename) + + try: + key = HKDF(tv.ikm, tv.size, tv.salt, tv.hash_module, 1, tv.info) + except ValueError: + assert not tv.valid + else: + if key != tv.okm: + assert not tv.valid + else: + assert tv.valid + self.warn(tv) + + def runTest(self): + for tv in self.tv: + self.test_verify(tv) + + +def load_hash_by_name(hash_name): + return __import__("Crypto.Hash." + hash_name, globals(), locals(), ["new"]) + + +class SP800_180_Counter_Tests(unittest.TestCase): + + def test_negative_zeroes(self): + def prf(s, x): + return HMAC.new(s, x, SHA256).digest() + + self.assertRaises(ValueError, SP800_108_Counter, b'0' * 16, 1, prf, + label=b'A\x00B') + self.assertRaises(ValueError, SP800_108_Counter, b'0' * 16, 1, prf, + context=b'A\x00B') + + def test_multiple_keys(self): + def prf(s, x): + return HMAC.new(s, x, SHA256).digest() + + key = b'0' * 16 + expected = SP800_108_Counter(key, 2*3*23, prf) + for r in (1, 2, 3, 23): + dks = SP800_108_Counter(key, r, prf, 138//r) + self.assertEqual(len(dks), 138//r) + self.assertEqual(len(dks[0]), r) + self.assertEqual(b''.join(dks), expected) + + +def add_tests_sp800_108_counter(cls): + + test_vectors_sp800_108_counter = load_test_vectors(("Protocol", ), + "KDF_SP800_108_COUNTER.txt", + "NIST SP 800 108 KDF Counter Mode", + {'count': lambda x: int(x)}, + ) or [] + + mac_type = None + for idx, tv in enumerate(test_vectors_sp800_108_counter): + + if isinstance(tv, str): + res = re.match(r"\[HMAC-(SHA-[0-9]+)\]", tv) + if res: + hash_name = res.group(1).replace("-", "") + hash_module = load_hash_by_name(hash_name) + mac_type = "hmac" + continue + res = re.match(r"\[CMAC-AES-128\]", tv) + if res: + mac_type = "cmac" + continue + assert res + + if mac_type == "hmac": + def prf(s, x, hash_module=hash_module): + return HMAC.new(s, x, hash_module).digest() + elif mac_type == "cmac": + def prf(s, x, hash_module=hash_module): + return CMAC.new(s, x, AES).digest() + continue + + def kdf_test(self, prf=prf, kin=tv.kin, label=tv.label, + context=tv.context, kout=tv.kout, count=tv.count): + result = SP800_108_Counter(kin, len(kout), prf, 1, label, context) + assert(len(result) == len(kout)) + self.assertEqual(result, kout) + + setattr(cls, "test_kdf_sp800_108_counter_%d" % idx, kdf_test) + + +add_tests_sp800_108_counter(SP800_180_Counter_Tests) + + +def get_tests(config={}): + wycheproof_warnings = config.get('wycheproof_warnings') + + if not config.get('slow_tests'): + PBKDF2_Tests._testData = PBKDF2_Tests._testData[:3] + scrypt_Tests.data = scrypt_Tests.data[:3] + + tests = [] + tests += list_test_cases(PBKDF1_Tests) + tests += list_test_cases(PBKDF2_Tests) + tests += list_test_cases(S2V_Tests) + tests += list_test_cases(HKDF_Tests) + tests += [TestVectorsHKDFWycheproof(wycheproof_warnings)] + tests += list_test_cases(scrypt_Tests) + tests += list_test_cases(bcrypt_Tests) + tests += list_test_cases(SP800_180_Counter_Tests) + + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Protocol/test_SecretSharing.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Protocol/test_SecretSharing.py new file mode 100644 index 000000000..0ea58a574 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Protocol/test_SecretSharing.py @@ -0,0 +1,267 @@ +# +# SelfTest/Protocol/test_secret_sharing.py: Self-test for secret sharing protocols +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from unittest import main, TestCase, TestSuite +from binascii import unhexlify, hexlify + +from Crypto.Util.py3compat import * +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Protocol.SecretSharing import Shamir, _Element, \ + _mult_gf2, _div_gf2 + +class GF2_Tests(TestCase): + + def test_mult_gf2(self): + # Prove mult by zero + x = _mult_gf2(0,0) + self.assertEqual(x, 0) + + # Prove mult by unity + x = _mult_gf2(34, 1) + self.assertEqual(x, 34) + + z = 3 # (x+1) + y = _mult_gf2(z, z) + self.assertEqual(y, 5) # (x+1)^2 = x^2 + 1 + y = _mult_gf2(y, z) + self.assertEqual(y, 15) # (x+1)^3 = x^3 + x^2 + x + 1 + y = _mult_gf2(y, z) + self.assertEqual(y, 17) # (x+1)^4 = x^4 + 1 + + # Prove linearity works + comps = [1, 4, 128, 2**34] + sum_comps = 1+4+128+2**34 + y = 908 + z = _mult_gf2(sum_comps, y) + w = 0 + for x in comps: + w ^= _mult_gf2(x, y) + self.assertEqual(w, z) + + def test_div_gf2(self): + from Crypto.Util.number import size as deg + + x, y = _div_gf2(567, 7) + self.assertTrue(deg(y) < deg(7)) + + w = _mult_gf2(x, 7) ^ y + self.assertEqual(567, w) + + x, y = _div_gf2(7, 567) + self.assertEqual(x, 0) + self.assertEqual(y, 7) + +class Element_Tests(TestCase): + + def test1(self): + # Test encondings + e = _Element(256) + self.assertEqual(int(e), 256) + self.assertEqual(e.encode(), bchr(0)*14 + b("\x01\x00")) + + e = _Element(bchr(0)*14 + b("\x01\x10")) + self.assertEqual(int(e), 0x110) + self.assertEqual(e.encode(), bchr(0)*14 + b("\x01\x10")) + + # Only 16 byte string are a valid encoding + self.assertRaises(ValueError, _Element, bchr(0)) + + def test2(self): + # Test addition + e = _Element(0x10) + f = _Element(0x0A) + self.assertEqual(int(e+f), 0x1A) + + def test3(self): + # Test multiplication + zero = _Element(0) + one = _Element(1) + two = _Element(2) + + x = _Element(6) * zero + self.assertEqual(int(x), 0) + + x = _Element(6) * one + self.assertEqual(int(x), 6) + + x = _Element(2**127) * two + self.assertEqual(int(x), 1 + 2 + 4 + 128) + + def test4(self): + # Test inversion + one = _Element(1) + + x = one.inverse() + self.assertEqual(int(x), 1) + + x = _Element(82323923) + y = x.inverse() + self.assertEqual(int(x * y), 1) + +class Shamir_Tests(TestCase): + + def test1(self): + # Test splitting + shares = Shamir.split(2, 3, bchr(90)*16) + self.assertEqual(len(shares), 3) + for index in range(3): + self.assertEqual(shares[index][0], index+1) + self.assertEqual(len(shares[index][1]), 16) + + def test2(self): + # Test recombine + from itertools import permutations + + test_vectors = ( + (2, "d9fe73909bae28b3757854c0af7ad405", + "1-594ae8964294174d95c33756d2504170", + "2-d897459d29da574eb40e93ec552ffe6e", + "3-5823de9bf0e068b054b5f07a28056b1b", + "4-db2c1f8bff46d748f795da995bd080cb"), + (2, "bf4f902d9a7efafd1f3ffd9291fd5de9", + "1-557bd3b0748064b533469722d1cc7935", + "2-6b2717164783c66d47cd28f2119f14d0", + "3-8113548ba97d58256bb4424251ae300c", + "4-179e9e5a218483ddaeda57539139cf04"), + (3, "ec96aa5c14c9faa699354cf1da74e904", + "1-64579fbf1908d66f7239bf6e2b4e41e1", + "2-6cd9428df8017b52322561e8c672ae3e", + "3-e418776ef5c0579bd9299277374806dd", + "4-ab3f77a0107398d23b323e581bb43f5d", + "5-23fe42431db2b41bd03ecdc7ea8e97ac"), + (3, "44cf249b68b80fcdc27b47be60c2c145", + "1-d6515a3905cd755119b86e311c801e31", + "2-16693d9ac9f10c254036ced5f8917fa3", + "3-84f74338a48476b99bf5e75a84d3a0d1", + "4-3fe8878dc4a5d35811cf3cbcd33dbe52", + "5-ad76f92fa9d0a9c4ca0c1533af7f6132"), + (5, "5398717c982db935d968eebe53a47f5a", + "1-be7be2dd4c068e7ef576aaa1b1c11b01", + "2-f821f5848441cb98b3eb467e2733ee21", + "3-25ee52f53e203f6e29a0297b5ab486b5", + "4-fc9fb58ef74dab947fbf9acd9d5d83cd", + "5-b1949cce46d81552e65f248d3f74cc5c", + "6-d64797f59977c4d4a7956ad916da7699", + "7-ab608a6546a8b9af8820ff832b1135c7"), + (5, "4a78db90fbf35da5545d2fb728e87596", + "1-08daf9a25d8aa184cfbf02b30a0ed6a0", + "2-dda28261e36f0b14168c2cf153fb734e", + "3-e9fdec5505d674a57f9836c417c1ecaa", + "4-4dce5636ae06dee42d2c82e65f06c735", + "5-3963dc118afc2ba798fa1d452b28ef00", + "6-6dfe6ff5b09e94d2f84c382b12f42424", + "7-6faea9d4d4a4e201bf6c90b9000630c3"), + (10, "eccbf6d66d680b49b073c4f1ddf804aa", + "01-7d8ac32fe4ae209ead1f3220fda34466", + "02-f9144e76988aad647d2e61353a6e96d5", + "03-b14c3b80179203363922d60760271c98", + "04-770bb2a8c28f6cee89e00f4d5cc7f861", + "05-6e3d7073ea368334ef67467871c66799", + "06-248792bc74a98ce024477c13c8fb5f8d", + "07-fcea4640d2db820c0604851e293d2487", + "08-2776c36fb714bb1f8525a0be36fc7dba", + "09-6ee7ac8be773e473a4bf75ee5f065762", + "10-33657fc073354cf91d4a68c735aacfc8", + "11-7645c65094a5868bf225c516fdee2d0c", + "12-840485aacb8226631ecd9c70e3018086"), + (10, "377e63bdbb5f7d4dc58a483d035212bb", + "01-32c53260103be431c843b1a633afe3bd", + "02-0107eb16cb8695084d452d2cc50bc7d6", + "03-df1e5c66cd755287fb0446faccd72a06", + "04-361bbcd5d40797f49dfa1898652da197", + "05-160d3ad1512f7dec7fd9344aed318591", + "06-659af6d95df4f25beca4fb9bfee3b7e8", + "07-37f3b208977bad50b3724566b72bfa9d", + "08-6c1de2dfc69c2986142c26a8248eb316", + "09-5e19220837a396bd4bc8cd685ff314c3", + "10-86e7b864fb0f3d628e46d50c1ba92f1c", + "11-065d0082c80b1aea18f4abe0c49df72e", + "12-84a09430c1d20ea9f388f3123c3733a3"), + ) + + def get_share(p): + pos = p.find('-') + return int(p[:pos]), unhexlify(p[pos + 1:]) + + for tv in test_vectors: + k = tv[0] + secret = unhexlify(tv[1]) + max_perms = 10 + for perm, shares_idx in enumerate(permutations(range(2, len(tv)), k)): + if perm > max_perms: + break + shares = [ get_share(tv[x]) for x in shares_idx ] + result = Shamir.combine(shares, True) + self.assertEqual(secret, result) + + def test3(self): + # Loopback split/recombine + secret = unhexlify(b("000102030405060708090a0b0c0d0e0f")) + + shares = Shamir.split(2, 3, secret) + + secret2 = Shamir.combine(shares[:2]) + self.assertEqual(secret, secret2) + + secret3 = Shamir.combine([ shares[0], shares[2] ]) + self.assertEqual(secret, secret3) + + def test4(self): + # Loopback split/recombine (SSSS) + secret = unhexlify(b("000102030405060708090a0b0c0d0e0f")) + + shares = Shamir.split(2, 3, secret, ssss=True) + + secret2 = Shamir.combine(shares[:2], ssss=True) + self.assertEqual(secret, secret2) + + def test5(self): + # Detect duplicate shares + secret = unhexlify(b("000102030405060708090a0b0c0d0e0f")) + + shares = Shamir.split(2, 3, secret) + self.assertRaises(ValueError, Shamir.combine, (shares[0], shares[0])) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(GF2_Tests) + tests += list_test_cases(Element_Tests) + tests += list_test_cases(Shamir_Tests) + return tests + +if __name__ == '__main__': + suite = lambda: TestSuite(get_tests()) + main(defaultTest='suite') + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Protocol/test_ecdh.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Protocol/test_ecdh.py new file mode 100644 index 000000000..ec1193e6e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Protocol/test_ecdh.py @@ -0,0 +1,292 @@ +import re +import unittest +from binascii import hexlify + +from Crypto.Util.py3compat import bord + +from Crypto.Hash import SHA256 +from Crypto.PublicKey import ECC +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors, load_test_vectors_wycheproof + +from Crypto.Protocol.DH import key_agreement + + +class FIPS_ECDH_Tests_KAT(unittest.TestCase): + pass + + +test_vectors_verify = load_test_vectors(("Protocol", ), + "KAS_ECC_CDH_PrimitiveTest.txt", + "ECC CDH Primitive (SP800-56A Section 5.7.1.2)", + { + 'qcavsx': lambda x: int(x, 16), + 'qcavsy': lambda x: int(x, 16), + 'diut': lambda x: int(x, 16), + 'qiutx': lambda x: int(x, 16), + 'qiuty': lambda x: int(x, 16), + }) or [] + +for idx, tv in enumerate(test_vectors_verify): + + # Stand-alone header with curve name + if isinstance(tv, str): + res = re.match(r"\[([A-Za-z0-9-]+)\]", tv) + assert res + curve_name = res.group(1) + continue + + public_key = ECC.construct(curve=curve_name, + point_x=tv.qcavsx, + point_y=tv.qcavsy) + + private_key = ECC.construct(curve=curve_name, + d=tv.diut) + + exp_response = tv.ziut + + def ecdh_test(self, + public_key=public_key, + private_key=private_key, + exp_response=exp_response): + z = key_agreement( + static_pub=public_key, + static_priv=private_key, + kdf=lambda x: x) + self.assertEqual(z, exp_response) + + def ecdh_test_rev(self, + public_key=public_key, + private_key=private_key, + exp_response=exp_response): + z = key_agreement( + static_pub=public_key, + static_priv=private_key, + kdf=lambda x: x) + self.assertEqual(z, exp_response) + + setattr(FIPS_ECDH_Tests_KAT, "test_verify_positive_%d" % idx, ecdh_test) + if idx == 1: + setattr(FIPS_ECDH_Tests_KAT, "test_verify_positive_rev_%d" % idx, ecdh_test_rev) + + +class TestVectorsECDHWycheproof(unittest.TestCase): + + desc = "Wycheproof ECDH tests" + + def add_tests(self, filename): + + def curve(g): + return g['curve'] + + def private(u): + return int(u['private'], 16) + + result = load_test_vectors_wycheproof(("Protocol", "wycheproof"), + filename, + "Wycheproof ECDH (%s)" + % filename, + group_tag={'curve': curve}, + unit_tag={'private': private}, + ) + self.tv += result + + def setUp(self): + self.tv = [] + self.desc = None + + self.add_tests("ecdh_secp224r1_ecpoint_test.json") + self.add_tests("ecdh_secp256r1_ecpoint_test.json") + self.add_tests("ecdh_secp384r1_ecpoint_test.json") + self.add_tests("ecdh_secp521r1_ecpoint_test.json") + + self.add_tests("ecdh_secp224r1_test.json") + self.add_tests("ecdh_secp256r1_test.json") + self.add_tests("ecdh_secp384r1_test.json") + self.add_tests("ecdh_secp521r1_test.json") + + def shortDescription(self): + return self.desc + + def test_verify(self, tv): + + if len(tv.public) == 0: + return + + try: + if bord(tv.public[0]) == 4: # SEC1 + public_key = ECC.import_key(tv.public, curve_name=tv.curve) + else: + public_key = ECC.import_key(tv.public) + except ValueError: + assert tv.warning or not tv.valid + return + + private_key = ECC.construct(curve=tv.curve, d=tv.private) + + try: + z = key_agreement(static_pub=public_key, + static_priv=private_key, + kdf=lambda x: x) + except ValueError: + assert not tv.valid + except TypeError as e: + assert not tv.valid + assert "incompatible curve" in str(e) + else: + self.assertEqual(z, tv.shared) + assert tv.valid + + def runTest(self): + for tv in self.tv: + self.desc = "Wycheproof ECDH Verify Test #%d (%s, %s)" % (tv.id, tv.comment, tv.filename) + self.test_verify(tv) + + +class ECDH_Tests(unittest.TestCase): + + static_priv = ECC.import_key('-----BEGIN PRIVATE KEY-----\nMIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQg9VHFVKh2a1aVFifH\n+BiyNaRa2kttEg3165Ye/dJxJ7KhRANCAARImIEXro5ZOcyWU2mq/+d79FEZXtTA\nbKkz1aICQXihQdCMzRNbeNtC9LFLzhu1slRKJ2xsDAlw9r6w6vwtkRzr\n-----END PRIVATE KEY-----') + static_pub = ECC.import_key('-----BEGIN PRIVATE KEY-----\nMIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgHhmv8zmZ+Nw8fsZd\ns8tlZflyfw2NE1CRS9DWr3Y3O46hRANCAAS3hZVUCbk+uk3w4S/YOraEVGG+WYpk\nNO/vrwzufUUks2GV2OnBQESe0EBk4Jq8gn4ij8Lvs3rZX2yT+XfeATYd\n-----END PRIVATE KEY-----').public_key() + + eph_priv = ECC.import_key('-----BEGIN PRIVATE KEY-----\nMIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgGPdJmFFFKzLPspIr\nE1T2cEjeIf4ajS9CpneP0e2b3AyhRANCAAQBexAA5BYDcXHs2KOksTYUsst4HhPt\nkp0zkgI2virc3OGJFNGPaCCPfFCQJHwLRaEpiq3SoQlgoBwSc8ZPsl3y\n-----END PRIVATE KEY-----') + + eph_pub = ECC.import_key('-----BEGIN PRIVATE KEY-----\nMIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQghaVZXElSEGEojFKF\nOU0JCpxWUWHvWQUR81gwWrOp76ShRANCAATi1Ib2K+YR3AckD8wxypWef7pw5PRw\ntBaB3RDPyE7IjHZC6yu1DbcXoCdtaw+F5DM+4zpl59n5ZaIy/Yl1BdIy\n-----END PRIVATE KEY-----') + + def test_1(self): + # C(0, 2s) + kdf = lambda x: SHA256.new(x).digest() + z = key_agreement( + kdf=kdf, + static_pub=self.static_pub, + static_priv=self.static_priv) + self.assertEqual(hexlify(z), + b"3960a1101d1193cbaffef4cc7202ebff783c22c6d2e0d5d530ffc66dc197ea9c") + + def test_2(self): + # C(2e, 2s) + kdf = lambda x: SHA256.new(x).digest() + z = key_agreement( + kdf=kdf, + static_pub=self.static_pub, + static_priv=self.static_priv, + eph_pub=self.eph_pub, + eph_priv=self.eph_priv) + self.assertEqual(hexlify(z), + b"7447b733d40c8fab2c633b3dc61e4a8c742f3a6af7e16fb0cc486f5bdb5d6ba2") + + def test_3(self): + # C(1e, 2s) + kdf = lambda x: SHA256.new(x).digest() + z = key_agreement( + kdf=kdf, + static_pub=self.static_pub, + static_priv=self.static_priv, + eph_priv=self.eph_priv) + self.assertEqual(hexlify(z), + b"9e977ae45f33bf67f285d064d83e6632bcafe3a7d33fe571233bab4794ace759") + + def test_4(self): + # C(1e, 2s) + kdf = lambda x: SHA256.new(x).digest() + z = key_agreement( + kdf=kdf, + static_pub=self.static_pub, + static_priv=self.static_priv, + eph_pub=self.eph_pub) + self.assertEqual(hexlify(z), + b"c9532df6aa7e9dbe5fe85da31ee25ff19c179c88691ec4b8328cc2036dcdadf2") + + def test_5(self): + # C(2e, 1s) is not supported + kdf = lambda x: SHA256.new(x).digest() + self.assertRaises(ValueError, + key_agreement, + kdf=kdf, + static_priv=self.static_priv, + eph_pub=self.eph_pub, + eph_priv=self.eph_priv) + + def test_6(self): + # C(2e, 1s) is not supported + kdf = lambda x: SHA256.new(x).digest() + self.assertRaises(ValueError, + key_agreement, + kdf=kdf, + static_pub=self.static_pub, + eph_pub=self.eph_pub, + eph_priv=self.eph_priv) + + def test_7(self): + # C(2e, 0) + kdf = lambda x: SHA256.new(x).digest() + z = key_agreement( + kdf=kdf, + eph_pub=self.eph_pub, + eph_priv=self.eph_priv) + self.assertEqual(hexlify(z), + b"feb257ebe063078b1391aac07913283d7b642ad7df61b46dfc9cd6f420bb896a") + + def test_8(self): + # C(1e, 1s) + kdf = lambda x: SHA256.new(x).digest() + z = key_agreement( + kdf=kdf, + static_priv=self.static_priv, + eph_pub=self.eph_pub) + self.assertEqual(hexlify(z), + b"ee4dc995117476ed57fd17ff0ed44e9f0466d46b929443bc0db9380317583b04") + + def test_9(self): + # C(1e, 1s) + kdf = lambda x: SHA256.new(x).digest() + z = key_agreement( + kdf=kdf, + static_pub=self.static_pub, + eph_priv=self.eph_priv) + self.assertEqual(hexlify(z), + b"2351cc2014f7c40468fa072b5d30f706eeaeef7507311cd8e59bab3b43f03c51") + + def test_10(self): + # No private (local) keys + kdf = lambda x: SHA256.new(x).digest() + self.assertRaises(ValueError, + key_agreement, + kdf=kdf, + static_pub=self.static_pub, + eph_pub=self.eph_pub) + + def test_11(self): + # No public (peer) keys + kdf = lambda x: SHA256.new(x).digest() + self.assertRaises(ValueError, + key_agreement, + kdf=kdf, + static_priv=self.static_priv, + eph_priv=self.eph_priv) + + def test_12(self): + # failure if kdf is missing + self.assertRaises(ValueError, + key_agreement, + static_pub=self.static_pub, + static_priv=self.static_priv) + + +def get_tests(config={}): + + tests = [] + tests += list_test_cases(FIPS_ECDH_Tests_KAT) + tests += [TestVectorsECDHWycheproof()] + tests += list_test_cases(ECDH_Tests) + + slow_tests = config.get('slow_tests') + if slow_tests: + pass + + return tests + + +if __name__ == '__main__': + def suite(): + return unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Protocol/test_rfc1751.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Protocol/test_rfc1751.py new file mode 100644 index 000000000..0878cc510 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Protocol/test_rfc1751.py @@ -0,0 +1,62 @@ +# +# Test script for Crypto.Util.RFC1751. +# +# Part of the Python Cryptography Toolkit +# +# Written by Andrew Kuchling and others +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + +import binascii +import unittest +from Crypto.Util import RFC1751 +from Crypto.Util.py3compat import * + +test_data = [('EB33F77EE73D4053', 'TIDE ITCH SLOW REIN RULE MOT'), + ('CCAC2AED591056BE4F90FD441C534766', + 'RASH BUSH MILK LOOK BAD BRIM AVID GAFF BAIT ROT POD LOVE'), + ('EFF81F9BFBC65350920CDD7416DE8009', + 'TROD MUTE TAIL WARM CHAR KONG HAAG CITY BORE O TEAL AWL') + ] + +class RFC1751Test_k2e (unittest.TestCase): + + def runTest (self): + "Check converting keys to English" + for key, words in test_data: + key=binascii.a2b_hex(b(key)) + self.assertEqual(RFC1751.key_to_english(key), words) + +class RFC1751Test_e2k (unittest.TestCase): + + def runTest (self): + "Check converting English strings to keys" + for key, words in test_data: + key=binascii.a2b_hex(b(key)) + self.assertEqual(RFC1751.english_to_key(words), key) + +# class RFC1751Test + +def get_tests(config={}): + return [RFC1751Test_k2e(), RFC1751Test_e2k()] + +if __name__ == "__main__": + unittest.main() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/__init__.py new file mode 100644 index 000000000..437d3e410 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/__init__.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/PublicKey/__init__.py: Self-test for public key crypto +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for public-key crypto""" + +import unittest +from Crypto.SelfTest.PublicKey import (test_DSA, test_RSA, + test_ECC_NIST, test_ECC_25519, test_ECC_448, + test_import_DSA, test_import_RSA, + test_import_ECC, test_ElGamal) + + +def get_tests(config={}): + tests = [] + tests += test_DSA.get_tests(config=config) + tests += test_RSA.get_tests(config=config) + tests += test_ECC_NIST.get_tests(config=config) + tests += test_ECC_25519.get_tests(config=config) + tests += test_ECC_448.get_tests(config=config) + + tests += test_import_DSA.get_tests(config=config) + tests += test_import_RSA.get_tests(config=config) + tests += test_import_ECC.get_tests(config=config) + + tests += test_ElGamal.get_tests(config=config) + return tests + + +if __name__ == '__main__': + def suite(): + return unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/test_DSA.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/test_DSA.py new file mode 100644 index 000000000..125cf6c45 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/test_DSA.py @@ -0,0 +1,247 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/PublicKey/test_DSA.py: Self-test for the DSA primitive +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.PublicKey.DSA""" + +import os +from Crypto.Util.py3compat import * + +import unittest +from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex + +def _sws(s): + """Remove whitespace from a text or byte string""" + if isinstance(s,str): + return "".join(s.split()) + else: + return b("").join(s.split()) + +class DSATest(unittest.TestCase): + # Test vector from "Appendix 5. Example of the DSA" of + # "Digital Signature Standard (DSS)", + # U.S. Department of Commerce/National Institute of Standards and Technology + # FIPS 186-2 (+Change Notice), 2000 January 27. + # http://csrc.nist.gov/publications/fips/fips186-2/fips186-2-change1.pdf + + y = _sws("""19131871 d75b1612 a819f29d 78d1b0d7 346f7aa7 7bb62a85 + 9bfd6c56 75da9d21 2d3a36ef 1672ef66 0b8c7c25 5cc0ec74 + 858fba33 f44c0669 9630a76b 030ee333""") + + g = _sws("""626d0278 39ea0a13 413163a5 5b4cb500 299d5522 956cefcb + 3bff10f3 99ce2c2e 71cb9de5 fa24babf 58e5b795 21925c9c + c42e9f6f 464b088c c572af53 e6d78802""") + + p = _sws("""8df2a494 492276aa 3d25759b b06869cb eac0d83a fb8d0cf7 + cbb8324f 0d7882e5 d0762fc5 b7210eaf c2e9adac 32ab7aac + 49693dfb f83724c2 ec0736ee 31c80291""") + + q = _sws("""c773218c 737ec8ee 993b4f2d ed30f48e dace915f""") + + x = _sws("""2070b322 3dba372f de1c0ffc 7b2e3b49 8b260614""") + + k = _sws("""358dad57 1462710f 50e254cf 1a376b2b deaadfbf""") + k_inverse = _sws("""0d516729 8202e49b 4116ac10 4fc3f415 ae52f917""") + m = b2a_hex(b("abc")) + m_hash = _sws("""a9993e36 4706816a ba3e2571 7850c26c 9cd0d89d""") + r = _sws("""8bac1ab6 6410435c b7181f95 b16ab97c 92b341c0""") + s = _sws("""41e2345f 1f56df24 58f426d1 55b4ba2d b6dcd8c8""") + + def setUp(self): + global DSA, Random, bytes_to_long, size + from Crypto.PublicKey import DSA + from Crypto import Random + from Crypto.Util.number import bytes_to_long, inverse, size + + self.dsa = DSA + + def test_generate_1arg(self): + """DSA (default implementation) generated key (1 argument)""" + dsaObj = self.dsa.generate(1024) + self._check_private_key(dsaObj) + pub = dsaObj.public_key() + self._check_public_key(pub) + + def test_generate_2arg(self): + """DSA (default implementation) generated key (2 arguments)""" + dsaObj = self.dsa.generate(1024, Random.new().read) + self._check_private_key(dsaObj) + pub = dsaObj.public_key() + self._check_public_key(pub) + + def test_construct_4tuple(self): + """DSA (default implementation) constructed key (4-tuple)""" + (y, g, p, q) = [bytes_to_long(a2b_hex(param)) for param in (self.y, self.g, self.p, self.q)] + dsaObj = self.dsa.construct((y, g, p, q)) + self._test_verification(dsaObj) + + def test_construct_5tuple(self): + """DSA (default implementation) constructed key (5-tuple)""" + (y, g, p, q, x) = [bytes_to_long(a2b_hex(param)) for param in (self.y, self.g, self.p, self.q, self.x)] + dsaObj = self.dsa.construct((y, g, p, q, x)) + self._test_signing(dsaObj) + self._test_verification(dsaObj) + + def test_construct_bad_key4(self): + (y, g, p, q) = [bytes_to_long(a2b_hex(param)) for param in (self.y, self.g, self.p, self.q)] + tup = (y, g, p+1, q) + self.assertRaises(ValueError, self.dsa.construct, tup) + + tup = (y, g, p, q+1) + self.assertRaises(ValueError, self.dsa.construct, tup) + + tup = (y, 1, p, q) + self.assertRaises(ValueError, self.dsa.construct, tup) + + def test_construct_bad_key5(self): + (y, g, p, q, x) = [bytes_to_long(a2b_hex(param)) for param in (self.y, self.g, self.p, self.q, self.x)] + tup = (y, g, p, q, x+1) + self.assertRaises(ValueError, self.dsa.construct, tup) + + tup = (y, g, p, q, q+10) + self.assertRaises(ValueError, self.dsa.construct, tup) + + def _check_private_key(self, dsaObj): + # Check capabilities + self.assertEqual(1, dsaObj.has_private()) + self.assertEqual(1, dsaObj.can_sign()) + self.assertEqual(0, dsaObj.can_encrypt()) + + # Sanity check key data + self.assertEqual(1, dsaObj.p > dsaObj.q) # p > q + self.assertEqual(160, size(dsaObj.q)) # size(q) == 160 bits + self.assertEqual(0, (dsaObj.p - 1) % dsaObj.q) # q is a divisor of p-1 + self.assertEqual(dsaObj.y, pow(dsaObj.g, dsaObj.x, dsaObj.p)) # y == g**x mod p + self.assertEqual(1, 0 < dsaObj.x < dsaObj.q) # 0 < x < q + + def _check_public_key(self, dsaObj): + k = bytes_to_long(a2b_hex(self.k)) + m_hash = bytes_to_long(a2b_hex(self.m_hash)) + + # Check capabilities + self.assertEqual(0, dsaObj.has_private()) + self.assertEqual(1, dsaObj.can_sign()) + self.assertEqual(0, dsaObj.can_encrypt()) + + # Check that private parameters are all missing + self.assertEqual(0, hasattr(dsaObj, 'x')) + + # Sanity check key data + self.assertEqual(1, dsaObj.p > dsaObj.q) # p > q + self.assertEqual(160, size(dsaObj.q)) # size(q) == 160 bits + self.assertEqual(0, (dsaObj.p - 1) % dsaObj.q) # q is a divisor of p-1 + + # Public-only key objects should raise an error when .sign() is called + self.assertRaises(TypeError, dsaObj._sign, m_hash, k) + + # Check __eq__ and __ne__ + self.assertEqual(dsaObj.public_key() == dsaObj.public_key(),True) # assert_ + self.assertEqual(dsaObj.public_key() != dsaObj.public_key(),False) # assertFalse + + self.assertEqual(dsaObj.public_key(), dsaObj.publickey()) + + def _test_signing(self, dsaObj): + k = bytes_to_long(a2b_hex(self.k)) + m_hash = bytes_to_long(a2b_hex(self.m_hash)) + r = bytes_to_long(a2b_hex(self.r)) + s = bytes_to_long(a2b_hex(self.s)) + (r_out, s_out) = dsaObj._sign(m_hash, k) + self.assertEqual((r, s), (r_out, s_out)) + + def _test_verification(self, dsaObj): + m_hash = bytes_to_long(a2b_hex(self.m_hash)) + r = bytes_to_long(a2b_hex(self.r)) + s = bytes_to_long(a2b_hex(self.s)) + self.assertTrue(dsaObj._verify(m_hash, (r, s))) + self.assertFalse(dsaObj._verify(m_hash + 1, (r, s))) + + def test_repr(self): + (y, g, p, q) = [bytes_to_long(a2b_hex(param)) for param in (self.y, self.g, self.p, self.q)] + dsaObj = self.dsa.construct((y, g, p, q)) + repr(dsaObj) + + +class DSADomainTest(unittest.TestCase): + + def test_domain1(self): + """Verify we can generate new keys in a given domain""" + dsa_key_1 = DSA.generate(1024) + domain_params = dsa_key_1.domain() + + dsa_key_2 = DSA.generate(1024, domain=domain_params) + self.assertEqual(dsa_key_1.p, dsa_key_2.p) + self.assertEqual(dsa_key_1.q, dsa_key_2.q) + self.assertEqual(dsa_key_1.g, dsa_key_2.g) + + self.assertEqual(dsa_key_1.domain(), dsa_key_2.domain()) + + def _get_weak_domain(self): + + from Crypto.Math.Numbers import Integer + from Crypto.Math import Primality + + p = Integer(4) + while p.size_in_bits() != 1024 or Primality.test_probable_prime(p) != Primality.PROBABLY_PRIME: + q1 = Integer.random(exact_bits=80) + q2 = Integer.random(exact_bits=80) + q = q1 * q2 + z = Integer.random(exact_bits=1024-160) + p = z * q + 1 + + h = Integer(2) + g = 1 + while g == 1: + g = pow(h, z, p) + h += 1 + + return (p, q, g) + + + def test_generate_error_weak_domain(self): + """Verify that domain parameters with composite q are rejected""" + + domain_params = self._get_weak_domain() + self.assertRaises(ValueError, DSA.generate, 1024, domain=domain_params) + + + def test_construct_error_weak_domain(self): + """Verify that domain parameters with composite q are rejected""" + + from Crypto.Math.Numbers import Integer + + p, q, g = self._get_weak_domain() + y = pow(g, 89, p) + self.assertRaises(ValueError, DSA.construct, (y, g, p, q)) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(DSATest) + tests += list_test_cases(DSADomainTest) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/test_ECC_25519.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/test_ECC_25519.py new file mode 100644 index 000000000..9f14131ff --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/test_ECC_25519.py @@ -0,0 +1,333 @@ +# =================================================================== +# +# Copyright (c) 2022, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors + +from Crypto.PublicKey import ECC +from Crypto.PublicKey.ECC import EccPoint, _curves, EccKey + +from Crypto.Math.Numbers import Integer + +from Crypto.Hash import SHAKE128 + + +class TestEccPoint_Ed25519(unittest.TestCase): + + Gxy = {"x": 15112221349535400772501151409588531511454012693041857206046113283949847762202, + "y": 46316835694926478169428394003475163141307993866256225615783033603165251855960} + + G2xy = {"x": 24727413235106541002554574571675588834622768167397638456726423682521233608206, + "y": 15549675580280190176352668710449542251549572066445060580507079593062643049417} + + G3xy = {"x": 46896733464454938657123544595386787789046198280132665686241321779790909858396, + "y": 8324843778533443976490377120369201138301417226297555316741202210403726505172} + + pointG = EccPoint(Gxy['x'], Gxy['y'], curve="Ed25519") + pointG2 = EccPoint(G2xy['x'], G2xy['y'], curve="Ed25519") + pointG3 = EccPoint(G3xy['x'], G3xy['y'], curve="Ed25519") + + def test_init_xy(self): + EccPoint(self.Gxy['x'], self.Gxy['y'], curve="Ed25519") + + # Neutral point + pai = EccPoint(0, 1, curve="Ed25519") + self.assertEqual(pai.x, 0) + self.assertEqual(pai.y, 1) + self.assertEqual(pai.xy, (0, 1)) + + # G + bp = self.pointG.copy() + self.assertEqual(bp.x, 15112221349535400772501151409588531511454012693041857206046113283949847762202) + self.assertEqual(bp.y, 46316835694926478169428394003475163141307993866256225615783033603165251855960) + self.assertEqual(bp.xy, (bp.x, bp.y)) + + # 2G + bp2 = self.pointG2.copy() + self.assertEqual(bp2.x, 24727413235106541002554574571675588834622768167397638456726423682521233608206) + self.assertEqual(bp2.y, 15549675580280190176352668710449542251549572066445060580507079593062643049417) + self.assertEqual(bp2.xy, (bp2.x, bp2.y)) + + # 5G + EccPoint(x=33467004535436536005251147249499675200073690106659565782908757308821616914995, + y=43097193783671926753355113395909008640284023746042808659097434958891230611693, + curve="Ed25519") + + # Catch if point is not on the curve + self.assertRaises(ValueError, EccPoint, 34, 35, curve="Ed25519") + + def test_set(self): + pointW = EccPoint(0, 1, curve="Ed25519") + pointW.set(self.pointG) + self.assertEqual(pointW.x, self.pointG.x) + self.assertEqual(pointW.y, self.pointG.y) + + def test_copy(self): + pointW = self.pointG.copy() + self.assertEqual(pointW.x, self.pointG.x) + self.assertEqual(pointW.y, self.pointG.y) + + def test_equal(self): + pointH = self.pointG.copy() + pointI = self.pointG2.copy() + self.assertEqual(self.pointG, pointH) + self.assertNotEqual(self.pointG, pointI) + + def test_pai(self): + pai = EccPoint(0, 1, curve="Ed25519") + self.assertTrue(pai.is_point_at_infinity()) + self.assertEqual(pai, pai.point_at_infinity()) + + def test_negate(self): + negG = -self.pointG + sum = self.pointG + negG + self.assertTrue(sum.is_point_at_infinity()) + + def test_addition(self): + self.assertEqual(self.pointG + self.pointG2, self.pointG3) + self.assertEqual(self.pointG2 + self.pointG, self.pointG3) + self.assertEqual(self.pointG2 + self.pointG.point_at_infinity(), self.pointG2) + self.assertEqual(self.pointG.point_at_infinity() + self.pointG2, self.pointG2) + + G5 = self.pointG2 + self.pointG3 + self.assertEqual(G5.x, 33467004535436536005251147249499675200073690106659565782908757308821616914995) + self.assertEqual(G5.y, 43097193783671926753355113395909008640284023746042808659097434958891230611693) + + def test_inplace_addition(self): + pointH = self.pointG.copy() + pointH += self.pointG + self.assertEqual(pointH, self.pointG2) + pointH += self.pointG + self.assertEqual(pointH, self.pointG3) + pointH += self.pointG.point_at_infinity() + self.assertEqual(pointH, self.pointG3) + + def test_doubling(self): + pointH = self.pointG.copy() + pointH.double() + self.assertEqual(pointH.x, self.pointG2.x) + self.assertEqual(pointH.y, self.pointG2.y) + + # 2*0 + pai = self.pointG.point_at_infinity() + pointR = pai.copy() + pointR.double() + self.assertEqual(pointR, pai) + + def test_scalar_multiply(self): + d = 0 + pointH = d * self.pointG + self.assertEqual(pointH.x, 0) + self.assertEqual(pointH.y, 1) + + d = 1 + pointH = d * self.pointG + self.assertEqual(pointH.x, self.pointG.x) + self.assertEqual(pointH.y, self.pointG.y) + + d = 2 + pointH = d * self.pointG + self.assertEqual(pointH.x, self.pointG2.x) + self.assertEqual(pointH.y, self.pointG2.y) + + d = 3 + pointH = d * self.pointG + self.assertEqual(pointH.x, self.pointG3.x) + self.assertEqual(pointH.y, self.pointG3.y) + + d = 4 + pointH = d * self.pointG + self.assertEqual(pointH.x, 14582954232372986451776170844943001818709880559417862259286374126315108956272) + self.assertEqual(pointH.y, 32483318716863467900234833297694612235682047836132991208333042722294373421359) + + d = 5 + pointH = d * self.pointG + self.assertEqual(pointH.x, 33467004535436536005251147249499675200073690106659565782908757308821616914995) + self.assertEqual(pointH.y, 43097193783671926753355113395909008640284023746042808659097434958891230611693) + + d = 10 + pointH = d * self.pointG + self.assertEqual(pointH.x, 43500613248243327786121022071801015118933854441360174117148262713429272820047) + self.assertEqual(pointH.y, 45005105423099817237495816771148012388779685712352441364231470781391834741548) + + d = 20 + pointH = d * self.pointG + self.assertEqual(pointH.x, 46694936775300686710656303283485882876784402425210400817529601134760286812591) + self.assertEqual(pointH.y, 8786390172762935853260670851718824721296437982862763585171334833968259029560) + + d = 255 + pointH = d * self.pointG + self.assertEqual(pointH.x, 36843863416400016952258312492144504209624961884991522125275155377549541182230) + self.assertEqual(pointH.y, 22327030283879720808995671630924669697661065034121040761798775626517750047180) + + d = 256 + pointH = d * self.pointG + self.assertEqual(pointH.x, 42740085206947573681423002599456489563927820004573071834350074001818321593686) + self.assertEqual(pointH.y, 6935684722522267618220753829624209639984359598320562595061366101608187623111) + + def test_sizes(self): + self.assertEqual(self.pointG.size_in_bits(), 255) + self.assertEqual(self.pointG.size_in_bytes(), 32) + + +class TestEccKey_Ed25519(unittest.TestCase): + + def test_private_key(self): + seed = unhexlify("9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60") + Px = 38815646466658113194383306759739515082307681141926459231621296960732224964046 + Py = 11903303657706407974989296177215005343713679411332034699907763981919547054807 + + key = EccKey(curve="Ed25519", seed=seed) + self.assertEqual(key.seed, seed) + self.assertEqual(key.d, 36144925721603087658594284515452164870581325872720374094707712194495455132720) + self.assertTrue(key.has_private()) + self.assertEqual(key.pointQ.x, Px) + self.assertEqual(key.pointQ.y, Py) + + point = EccPoint(Px, Py, "ed25519") + key = EccKey(curve="Ed25519", seed=seed, point=point) + self.assertEqual(key.d, 36144925721603087658594284515452164870581325872720374094707712194495455132720) + self.assertTrue(key.has_private()) + self.assertEqual(key.pointQ, point) + + # Other names + key = EccKey(curve="ed25519", seed=seed) + + # Must not accept d parameter + self.assertRaises(ValueError, EccKey, curve="ed25519", d=1) + + def test_public_key(self): + point = EccPoint(_curves['ed25519'].Gx, _curves['ed25519'].Gy, curve='ed25519') + key = EccKey(curve="ed25519", point=point) + self.assertFalse(key.has_private()) + self.assertEqual(key.pointQ, point) + + def test_public_key_derived(self): + priv_key = EccKey(curve="ed25519", seed=b'H'*32) + pub_key = priv_key.public_key() + self.assertFalse(pub_key.has_private()) + self.assertEqual(priv_key.pointQ, pub_key.pointQ) + + def test_invalid_seed(self): + self.assertRaises(ValueError, lambda: EccKey(curve="ed25519", seed=b'H' * 31)) + + def test_equality(self): + private_key = ECC.construct(seed=b'H'*32, curve="Ed25519") + private_key2 = ECC.construct(seed=b'H'*32, curve="ed25519") + private_key3 = ECC.construct(seed=b'C'*32, curve="Ed25519") + + public_key = private_key.public_key() + public_key2 = private_key2.public_key() + public_key3 = private_key3.public_key() + + self.assertEqual(private_key, private_key2) + self.assertNotEqual(private_key, private_key3) + + self.assertEqual(public_key, public_key2) + self.assertNotEqual(public_key, public_key3) + + self.assertNotEqual(public_key, private_key) + + def test_name_consistency(self): + key = ECC.generate(curve='ed25519') + self.assertIn("curve='Ed25519'", repr(key)) + self.assertEqual(key.curve, 'Ed25519') + self.assertEqual(key.public_key().curve, 'Ed25519') + + +class TestEccModule_Ed25519(unittest.TestCase): + + def test_generate(self): + key = ECC.generate(curve="Ed25519") + self.assertTrue(key.has_private()) + point = EccPoint(_curves['Ed25519'].Gx, _curves['Ed25519'].Gy, curve="Ed25519") * key.d + self.assertEqual(key.pointQ, point) + + # Always random + key2 = ECC.generate(curve="Ed25519") + self.assertNotEqual(key, key2) + + # Other names + ECC.generate(curve="Ed25519") + + # Random source + key1 = ECC.generate(curve="Ed25519", randfunc=SHAKE128.new().read) + key2 = ECC.generate(curve="Ed25519", randfunc=SHAKE128.new().read) + self.assertEqual(key1, key2) + + def test_construct(self): + seed = unhexlify("9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60") + Px = 38815646466658113194383306759739515082307681141926459231621296960732224964046 + Py = 11903303657706407974989296177215005343713679411332034699907763981919547054807 + d = 36144925721603087658594284515452164870581325872720374094707712194495455132720 + point = EccPoint(Px, Py, curve="Ed25519") + + # Private key only + key = ECC.construct(curve="Ed25519", seed=seed) + self.assertEqual(key.pointQ, point) + self.assertTrue(key.has_private()) + + # Public key only + key = ECC.construct(curve="Ed25519", point_x=Px, point_y=Py) + self.assertEqual(key.pointQ, point) + self.assertFalse(key.has_private()) + + # Private and public key + key = ECC.construct(curve="Ed25519", seed=seed, point_x=Px, point_y=Py) + self.assertEqual(key.pointQ, point) + self.assertTrue(key.has_private()) + + # Other names + key = ECC.construct(curve="ed25519", seed=seed) + + def test_negative_construct(self): + coord = dict(point_x=10, point_y=4) + coordG = dict(point_x=_curves['ed25519'].Gx, point_y=_curves['ed25519'].Gy) + + self.assertRaises(ValueError, ECC.construct, curve="Ed25519", **coord) + self.assertRaises(ValueError, ECC.construct, curve="Ed25519", d=2, **coordG) + self.assertRaises(ValueError, ECC.construct, curve="Ed25519", seed=b'H'*31) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(TestEccPoint_Ed25519) + tests += list_test_cases(TestEccKey_Ed25519) + tests += list_test_cases(TestEccModule_Ed25519) + return tests + + +if __name__ == '__main__': + def suite(): + return unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/test_ECC_448.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/test_ECC_448.py new file mode 100644 index 000000000..178c3a90c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/test_ECC_448.py @@ -0,0 +1,333 @@ +# =================================================================== +# +# Copyright (c) 2022, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors + +from Crypto.PublicKey import ECC +from Crypto.PublicKey.ECC import EccPoint, _curves, EccKey + +from Crypto.Math.Numbers import Integer + +from Crypto.Hash import SHAKE128 + + +class TestEccPoint_Ed448(unittest.TestCase): + + Gxy = {"x": 0x4f1970c66bed0ded221d15a622bf36da9e146570470f1767ea6de324a3d3a46412ae1af72ab66511433b80e18b00938e2626a82bc70cc05e, + "y": 0x693f46716eb6bc248876203756c9c7624bea73736ca3984087789c1e05a0c2d73ad3ff1ce67c39c4fdbd132c4ed7c8ad9808795bf230fa14} + + G2xy = {"x": 0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa955555555555555555555555555555555555555555555555555555555, + "y": 0xae05e9634ad7048db359d6205086c2b0036ed7a035884dd7b7e36d728ad8c4b80d6565833a2a3098bbbcb2bed1cda06bdaeafbcdea9386ed} + + G3xy = {"x": 0x865886b9108af6455bd64316cb6943332241b8b8cda82c7e2ba077a4a3fcfe8daa9cbf7f6271fd6e862b769465da8575728173286ff2f8f, + "y": 0xe005a8dbd5125cf706cbda7ad43aa6449a4a8d952356c3b9fce43c82ec4e1d58bb3a331bdb6767f0bffa9a68fed02dafb822ac13588ed6fc} + + pointG = EccPoint(Gxy['x'], Gxy['y'], curve="Ed448") + pointG2 = EccPoint(G2xy['x'], G2xy['y'], curve="Ed448") + pointG3 = EccPoint(G3xy['x'], G3xy['y'], curve="Ed448") + + def test_init_xy(self): + EccPoint(self.Gxy['x'], self.Gxy['y'], curve="Ed448") + + # Neutral point + pai = EccPoint(0, 1, curve="Ed448") + self.assertEqual(pai.x, 0) + self.assertEqual(pai.y, 1) + self.assertEqual(pai.xy, (0, 1)) + + # G + bp = self.pointG.copy() + self.assertEqual(bp.x, 0x4f1970c66bed0ded221d15a622bf36da9e146570470f1767ea6de324a3d3a46412ae1af72ab66511433b80e18b00938e2626a82bc70cc05e) + self.assertEqual(bp.y, 0x693f46716eb6bc248876203756c9c7624bea73736ca3984087789c1e05a0c2d73ad3ff1ce67c39c4fdbd132c4ed7c8ad9808795bf230fa14) + self.assertEqual(bp.xy, (bp.x, bp.y)) + + # 2G + bp2 = self.pointG2.copy() + self.assertEqual(bp2.x, 0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa955555555555555555555555555555555555555555555555555555555) + self.assertEqual(bp2.y, 0xae05e9634ad7048db359d6205086c2b0036ed7a035884dd7b7e36d728ad8c4b80d6565833a2a3098bbbcb2bed1cda06bdaeafbcdea9386ed) + self.assertEqual(bp2.xy, (bp2.x, bp2.y)) + + # 5G + EccPoint(x=0x7a9f9335a48dcb0e2ba7601eedb50def80cbcf728562ada756d761e8958812808bc0d57a920c3c96f07b2d8cefc6f950d0a99d1092030034, + y=0xadfd751a2517edd3b9109ce4fd580ade260ca1823ab18fced86551f7b698017127d7a4ee59d2b33c58405512881f225443b4731472f435eb, + curve="Ed448") + + # Catch if point is not on the curve + self.assertRaises(ValueError, EccPoint, 34, 35, curve="Ed448") + + def test_set(self): + pointW = EccPoint(0, 1, curve="Ed448") + pointW.set(self.pointG) + self.assertEqual(pointW.x, self.pointG.x) + self.assertEqual(pointW.y, self.pointG.y) + + def test_copy(self): + pointW = self.pointG.copy() + self.assertEqual(pointW.x, self.pointG.x) + self.assertEqual(pointW.y, self.pointG.y) + + def test_equal(self): + pointH = self.pointG.copy() + pointI = self.pointG2.copy() + self.assertEqual(self.pointG, pointH) + self.assertNotEqual(self.pointG, pointI) + + def test_pai(self): + pai = EccPoint(0, 1, curve="Ed448") + self.assertTrue(pai.is_point_at_infinity()) + self.assertEqual(pai, pai.point_at_infinity()) + + def test_negate(self): + negG = -self.pointG + sum = self.pointG + negG + self.assertTrue(sum.is_point_at_infinity()) + + def test_addition(self): + self.assertEqual(self.pointG + self.pointG2, self.pointG3) + self.assertEqual(self.pointG2 + self.pointG, self.pointG3) + self.assertEqual(self.pointG2 + self.pointG.point_at_infinity(), self.pointG2) + self.assertEqual(self.pointG.point_at_infinity() + self.pointG2, self.pointG2) + + G5 = self.pointG2 + self.pointG3 + self.assertEqual(G5.x, 0x7a9f9335a48dcb0e2ba7601eedb50def80cbcf728562ada756d761e8958812808bc0d57a920c3c96f07b2d8cefc6f950d0a99d1092030034) + self.assertEqual(G5.y, 0xadfd751a2517edd3b9109ce4fd580ade260ca1823ab18fced86551f7b698017127d7a4ee59d2b33c58405512881f225443b4731472f435eb) + + def test_inplace_addition(self): + pointH = self.pointG.copy() + pointH += self.pointG + self.assertEqual(pointH, self.pointG2) + pointH += self.pointG + self.assertEqual(pointH, self.pointG3) + pointH += self.pointG.point_at_infinity() + self.assertEqual(pointH, self.pointG3) + + def test_doubling(self): + pointH = self.pointG.copy() + pointH.double() + self.assertEqual(pointH.x, self.pointG2.x) + self.assertEqual(pointH.y, self.pointG2.y) + + # 2*0 + pai = self.pointG.point_at_infinity() + pointR = pai.copy() + pointR.double() + self.assertEqual(pointR, pai) + + def test_scalar_multiply(self): + d = 0 + pointH = d * self.pointG + self.assertEqual(pointH.x, 0) + self.assertEqual(pointH.y, 1) + + d = 1 + pointH = d * self.pointG + self.assertEqual(pointH.x, self.pointG.x) + self.assertEqual(pointH.y, self.pointG.y) + + d = 2 + pointH = d * self.pointG + self.assertEqual(pointH.x, self.pointG2.x) + self.assertEqual(pointH.y, self.pointG2.y) + + d = 3 + pointH = d * self.pointG + self.assertEqual(pointH.x, self.pointG3.x) + self.assertEqual(pointH.y, self.pointG3.y) + + d = 4 + pointH = d * self.pointG + self.assertEqual(pointH.x, 0x49dcbc5c6c0cce2c1419a17226f929ea255a09cf4e0891c693fda4be70c74cc301b7bdf1515dd8ba21aee1798949e120e2ce42ac48ba7f30) + self.assertEqual(pointH.y, 0xd49077e4accde527164b33a5de021b979cb7c02f0457d845c90dc3227b8a5bc1c0d8f97ea1ca9472b5d444285d0d4f5b32e236f86de51839) + + d = 5 + pointH = d * self.pointG + self.assertEqual(pointH.x, 0x7a9f9335a48dcb0e2ba7601eedb50def80cbcf728562ada756d761e8958812808bc0d57a920c3c96f07b2d8cefc6f950d0a99d1092030034) + self.assertEqual(pointH.y, 0xadfd751a2517edd3b9109ce4fd580ade260ca1823ab18fced86551f7b698017127d7a4ee59d2b33c58405512881f225443b4731472f435eb) + + d = 10 + pointH = d * self.pointG + self.assertEqual(pointH.x, 0x77486f9d19f6411cdd35d30d1c3235f71936452c787e5c034134d3e8172278aca61622bc805761ce3dab65118a0122d73b403165d0ed303d) + self.assertEqual(pointH.y, 0x4d2fea0b026be11024f1f0fe7e94e618e8ac17381ada1d1bf7ee293a68ff5d0bf93c1997dc1aabdc0c7e6381428d85b6b1954a89e4cddf67) + + d = 20 + pointH = d * self.pointG + self.assertEqual(pointH.x, 0x3c236422354600fe6763defcc1503737e4ed89e262d0de3ec1e552020f2a56fe3b9e1e012d021072598c3c2821e18268bb8fb8339c0d1216) + self.assertEqual(pointH.y, 0xb555b9721f630ccb05fc466de4c74d3d2781e69eca88e1b040844f04cab39fd946f91c688fa42402bb38fb9c3e61231017020b219b4396e1) + + d = 255 + pointH = d * self.pointG + self.assertEqual(pointH.x, 0xbeb7f8388b05cd9c1aa2e3c0dcf31e2b563659361826225390e7748654f627d5c36cbe627e9019936b56d15d4dad7c337c09bac64ff4197f) + self.assertEqual(pointH.y, 0x1e37312b2dd4e9440c43c6e7725fc4fa3d11e582d4863f1d018e28f50c0efdb1f53f9b01ada7c87fa162b1f0d72401015d57613d25f1ad53) + + d = 256 + pointH = d * self.pointG + self.assertEqual(pointH.x, 0xf19c34feb56730e3e2be761ac0a2a2b24853b281dda019fc35a5ab58e3696beb39609ae756b0d20fb7ccf0d79aaf5f3bca2e4fdb25bfac1c) + self.assertEqual(pointH.y, 0x3beb69cc9111bffcaddc61d363ce6fe5dd44da4aadce78f52e92e985d5442344ced72c4611ed0daac9f4f5661eab73d7a12d25ce8a30241e) + + def test_sizes(self): + self.assertEqual(self.pointG.size_in_bits(), 448) + self.assertEqual(self.pointG.size_in_bytes(), 56) + + +class TestEccKey_Ed448(unittest.TestCase): + + def test_private_key(self): + seed = unhexlify("4adf5d37ac6785e83e99a924f92676d366a78690af59c92b6bdf14f9cdbcf26fdad478109607583d633b60078d61d51d81b7509c5433b0d4c9") + Px = 0x72a01eea003a35f9ac44231dc4aae2a382f351d80bf32508175b0855edcf389aa2bbf308dd961ce361a6e7c2091bc78957f6ebcf3002a617 + Py = 0x9e0d08d84586e9aeefecacb41d049b831f1a3ee0c3eada63e34557b30702b50ab59fb372feff7c30b8cbb7dd51afbe88444ec56238722ec1 + + key = EccKey(curve="Ed448", seed=seed) + self.assertEqual(key.seed, seed) + self.assertEqual(key.d, 0xb07cf179604f83433186e5178760c759c15125ee54ff6f8dcde46e872b709ac82ed0bd0a4e036d774034dcb18a9fb11894657a1485895f80) + self.assertTrue(key.has_private()) + self.assertEqual(key.pointQ.x, Px) + self.assertEqual(key.pointQ.y, Py) + + point = EccPoint(Px, Py, "ed448") + key = EccKey(curve="Ed448", seed=seed, point=point) + self.assertEqual(key.d, 0xb07cf179604f83433186e5178760c759c15125ee54ff6f8dcde46e872b709ac82ed0bd0a4e036d774034dcb18a9fb11894657a1485895f80) + self.assertTrue(key.has_private()) + self.assertEqual(key.pointQ, point) + + # Other names + key = EccKey(curve="ed448", seed=seed) + + # Must not accept d parameter + self.assertRaises(ValueError, EccKey, curve="ed448", d=1) + + def test_public_key(self): + point = EccPoint(_curves['ed448'].Gx, _curves['ed448'].Gy, curve='ed448') + key = EccKey(curve="ed448", point=point) + self.assertFalse(key.has_private()) + self.assertEqual(key.pointQ, point) + + def test_public_key_derived(self): + priv_key = EccKey(curve="ed448", seed=b'H'*57) + pub_key = priv_key.public_key() + self.assertFalse(pub_key.has_private()) + self.assertEqual(priv_key.pointQ, pub_key.pointQ) + + def test_invalid_seed(self): + self.assertRaises(ValueError, lambda: EccKey(curve="ed448", seed=b'H' * 56)) + + def test_equality(self): + private_key = ECC.construct(seed=b'H'*57, curve="Ed448") + private_key2 = ECC.construct(seed=b'H'*57, curve="ed448") + private_key3 = ECC.construct(seed=b'C'*57, curve="Ed448") + + public_key = private_key.public_key() + public_key2 = private_key2.public_key() + public_key3 = private_key3.public_key() + + self.assertEqual(private_key, private_key2) + self.assertNotEqual(private_key, private_key3) + + self.assertEqual(public_key, public_key2) + self.assertNotEqual(public_key, public_key3) + + self.assertNotEqual(public_key, private_key) + + def test_name_consistency(self): + key = ECC.generate(curve='ed448') + self.assertIn("curve='Ed448'", repr(key)) + self.assertEqual(key.curve, 'Ed448') + self.assertEqual(key.public_key().curve, 'Ed448') + + +class TestEccModule_Ed448(unittest.TestCase): + + def test_generate(self): + key = ECC.generate(curve="Ed448") + self.assertTrue(key.has_private()) + point = EccPoint(_curves['Ed448'].Gx, _curves['Ed448'].Gy, curve="Ed448") * key.d + self.assertEqual(key.pointQ, point) + + # Always random + key2 = ECC.generate(curve="Ed448") + self.assertNotEqual(key, key2) + + # Other names + ECC.generate(curve="Ed448") + + # Random source + key1 = ECC.generate(curve="Ed448", randfunc=SHAKE128.new().read) + key2 = ECC.generate(curve="Ed448", randfunc=SHAKE128.new().read) + self.assertEqual(key1, key2) + + def test_construct(self): + seed = unhexlify("4adf5d37ac6785e83e99a924f92676d366a78690af59c92b6bdf14f9cdbcf26fdad478109607583d633b60078d61d51d81b7509c5433b0d4c9") + Px = 0x72a01eea003a35f9ac44231dc4aae2a382f351d80bf32508175b0855edcf389aa2bbf308dd961ce361a6e7c2091bc78957f6ebcf3002a617 + Py = 0x9e0d08d84586e9aeefecacb41d049b831f1a3ee0c3eada63e34557b30702b50ab59fb372feff7c30b8cbb7dd51afbe88444ec56238722ec1 + d = 0xb07cf179604f83433186e5178760c759c15125ee54ff6f8dcde46e872b709ac82ed0bd0a4e036d774034dcb18a9fb11894657a1485895f80 + point = EccPoint(Px, Py, curve="Ed448") + + # Private key only + key = ECC.construct(curve="Ed448", seed=seed) + self.assertEqual(key.pointQ, point) + self.assertTrue(key.has_private()) + + # Public key only + key = ECC.construct(curve="Ed448", point_x=Px, point_y=Py) + self.assertEqual(key.pointQ, point) + self.assertFalse(key.has_private()) + + # Private and public key + key = ECC.construct(curve="Ed448", seed=seed, point_x=Px, point_y=Py) + self.assertEqual(key.pointQ, point) + self.assertTrue(key.has_private()) + + # Other names + key = ECC.construct(curve="ed448", seed=seed) + + def test_negative_construct(self): + coord = dict(point_x=10, point_y=4) + coordG = dict(point_x=_curves['ed448'].Gx, point_y=_curves['ed448'].Gy) + + self.assertRaises(ValueError, ECC.construct, curve="Ed448", **coord) + self.assertRaises(ValueError, ECC.construct, curve="Ed448", d=2, **coordG) + self.assertRaises(ValueError, ECC.construct, curve="Ed448", seed=b'H'*58) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(TestEccPoint_Ed448) + tests += list_test_cases(TestEccKey_Ed448) + tests += list_test_cases(TestEccModule_Ed448) + return tests + + +if __name__ == '__main__': + def suite(): + return unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/test_ECC_NIST.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/test_ECC_NIST.py new file mode 100644 index 000000000..dfb696aa5 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/test_ECC_NIST.py @@ -0,0 +1,1425 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors + +from Crypto.PublicKey import ECC +from Crypto.PublicKey.ECC import EccPoint, _curves, EccKey + +from Crypto.Math.Numbers import Integer + + +class TestEccPoint(unittest.TestCase): + + def test_mix(self): + + p1 = ECC.generate(curve='P-256').pointQ + p2 = ECC.generate(curve='P-384').pointQ + + try: + p1 + p2 + assert(False) + except ValueError as e: + assert "not on the same curve" in str(e) + + try: + p1 += p2 + assert(False) + except ValueError as e: + assert "not on the same curve" in str(e) + + class OtherKeyType: + pass + + self.assertFalse(p1 == OtherKeyType()) + self.assertTrue(p1 != OtherKeyType()) + + def test_repr(self): + p1 = ECC.construct(curve='P-256', + d=75467964919405407085864614198393977741148485328036093939970922195112333446269, + point_x=20573031766139722500939782666697015100983491952082159880539639074939225934381, + point_y=108863130203210779921520632367477406025152638284581252625277850513266505911389) + self.assertEqual(repr(p1), "EccKey(curve='NIST P-256', point_x=20573031766139722500939782666697015100983491952082159880539639074939225934381, point_y=108863130203210779921520632367477406025152638284581252625277850513266505911389, d=75467964919405407085864614198393977741148485328036093939970922195112333446269)") + + +class TestEccPoint_NIST_P192(unittest.TestCase): + """Tests defined in section 4.1 of https://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.204.9073&rep=rep1&type=pdf""" + + pointS = EccPoint( + 0xd458e7d127ae671b0c330266d246769353a012073e97acf8, + 0x325930500d851f336bddc050cf7fb11b5673a1645086df3b, + curve='p192') + + pointT = EccPoint( + 0xf22c4395213e9ebe67ddecdd87fdbd01be16fb059b9753a4, + 0x264424096af2b3597796db48f8dfb41fa9cecc97691a9c79, + curve='p192') + + def test_set(self): + pointW = EccPoint(0, 0) + pointW.set(self.pointS) + self.assertEqual(pointW, self.pointS) + + def test_copy(self): + pointW = self.pointS.copy() + self.assertEqual(pointW, self.pointS) + pointW.set(self.pointT) + self.assertEqual(pointW, self.pointT) + self.assertNotEqual(self.pointS, self.pointT) + + def test_negate(self): + negS = -self.pointS + sum = self.pointS + negS + self.assertEqual(sum, self.pointS.point_at_infinity()) + + def test_addition(self): + pointRx = 0x48e1e4096b9b8e5ca9d0f1f077b8abf58e843894de4d0290 + pointRy = 0x408fa77c797cd7dbfb16aa48a3648d3d63c94117d7b6aa4b + + pointR = self.pointS + self.pointT + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + pai = pointR.point_at_infinity() + + # S + 0 + pointR = self.pointS + pai + self.assertEqual(pointR, self.pointS) + + # 0 + S + pointR = pai + self.pointS + self.assertEqual(pointR, self.pointS) + + # 0 + 0 + pointR = pai + pai + self.assertEqual(pointR, pai) + + def test_inplace_addition(self): + pointRx = 0x48e1e4096b9b8e5ca9d0f1f077b8abf58e843894de4d0290 + pointRy = 0x408fa77c797cd7dbfb16aa48a3648d3d63c94117d7b6aa4b + + pointR = self.pointS.copy() + pointR += self.pointT + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + pai = pointR.point_at_infinity() + + # S + 0 + pointR = self.pointS.copy() + pointR += pai + self.assertEqual(pointR, self.pointS) + + # 0 + S + pointR = pai.copy() + pointR += self.pointS + self.assertEqual(pointR, self.pointS) + + # 0 + 0 + pointR = pai.copy() + pointR += pai + self.assertEqual(pointR, pai) + + def test_doubling(self): + pointRx = 0x30c5bc6b8c7da25354b373dc14dd8a0eba42d25a3f6e6962 + pointRy = 0x0dde14bc4249a721c407aedbf011e2ddbbcb2968c9d889cf + + pointR = self.pointS.copy() + pointR.double() + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + # 2*0 + pai = self.pointS.point_at_infinity() + pointR = pai.copy() + pointR.double() + self.assertEqual(pointR, pai) + + # S + S + pointR = self.pointS.copy() + pointR += pointR + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + def test_scalar_multiply(self): + d = 0xa78a236d60baec0c5dd41b33a542463a8255391af64c74ee + pointRx = 0x1faee4205a4f669d2d0a8f25e3bcec9a62a6952965bf6d31 + pointRy = 0x5ff2cdfa508a2581892367087c696f179e7a4d7e8260fb06 + + pointR = self.pointS * d + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + # 0*S + pai = self.pointS.point_at_infinity() + pointR = self.pointS * 0 + self.assertEqual(pointR, pai) + + # -1*S + self.assertRaises(ValueError, lambda: self.pointS * -1) + + # Reverse order + pointR = d * self.pointS + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + pointR = Integer(d) * self.pointS + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + def test_joint_scalar_multiply(self): + d = 0xa78a236d60baec0c5dd41b33a542463a8255391af64c74ee + e = 0xc4be3d53ec3089e71e4de8ceab7cce889bc393cd85b972bc + pointRx = 0x019f64eed8fa9b72b7dfea82c17c9bfa60ecb9e1778b5bde + pointRy = 0x16590c5fcd8655fa4ced33fb800e2a7e3c61f35d83503644 + + pointR = self.pointS * d + self.pointT * e + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + def test_sizes(self): + self.assertEqual(self.pointS.size_in_bits(), 192) + self.assertEqual(self.pointS.size_in_bytes(), 24) + + +class TestEccPoint_NIST_P224(unittest.TestCase): + """Tests defined in section 4.2 of https://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.204.9073&rep=rep1&type=pdf""" + + pointS = EccPoint( + 0x6eca814ba59a930843dc814edd6c97da95518df3c6fdf16e9a10bb5b, + 0xef4b497f0963bc8b6aec0ca0f259b89cd80994147e05dc6b64d7bf22, + curve='p224') + + pointT = EccPoint( + 0xb72b25aea5cb03fb88d7e842002969648e6ef23c5d39ac903826bd6d, + 0xc42a8a4d34984f0b71b5b4091af7dceb33ea729c1a2dc8b434f10c34, + curve='p224') + + def test_set(self): + pointW = EccPoint(0, 0) + pointW.set(self.pointS) + self.assertEqual(pointW, self.pointS) + + def test_copy(self): + pointW = self.pointS.copy() + self.assertEqual(pointW, self.pointS) + pointW.set(self.pointT) + self.assertEqual(pointW, self.pointT) + self.assertNotEqual(self.pointS, self.pointT) + + def test_negate(self): + negS = -self.pointS + sum = self.pointS + negS + self.assertEqual(sum, self.pointS.point_at_infinity()) + + def test_addition(self): + pointRx = 0x236f26d9e84c2f7d776b107bd478ee0a6d2bcfcaa2162afae8d2fd15 + pointRy = 0xe53cc0a7904ce6c3746f6a97471297a0b7d5cdf8d536ae25bb0fda70 + + pointR = self.pointS + self.pointT + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + pai = pointR.point_at_infinity() + + # S + 0 + pointR = self.pointS + pai + self.assertEqual(pointR, self.pointS) + + # 0 + S + pointR = pai + self.pointS + self.assertEqual(pointR, self.pointS) + + # 0 + 0 + pointR = pai + pai + self.assertEqual(pointR, pai) + + def test_inplace_addition(self): + pointRx = 0x236f26d9e84c2f7d776b107bd478ee0a6d2bcfcaa2162afae8d2fd15 + pointRy = 0xe53cc0a7904ce6c3746f6a97471297a0b7d5cdf8d536ae25bb0fda70 + + pointR = self.pointS.copy() + pointR += self.pointT + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + pai = pointR.point_at_infinity() + + # S + 0 + pointR = self.pointS.copy() + pointR += pai + self.assertEqual(pointR, self.pointS) + + # 0 + S + pointR = pai.copy() + pointR += self.pointS + self.assertEqual(pointR, self.pointS) + + # 0 + 0 + pointR = pai.copy() + pointR += pai + self.assertEqual(pointR, pai) + + def test_doubling(self): + pointRx = 0xa9c96f2117dee0f27ca56850ebb46efad8ee26852f165e29cb5cdfc7 + pointRy = 0xadf18c84cf77ced4d76d4930417d9579207840bf49bfbf5837dfdd7d + + pointR = self.pointS.copy() + pointR.double() + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + # 2*0 + pai = self.pointS.point_at_infinity() + pointR = pai.copy() + pointR.double() + self.assertEqual(pointR, pai) + + # S + S + pointR = self.pointS.copy() + pointR += pointR + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + def test_scalar_multiply(self): + d = 0xa78ccc30eaca0fcc8e36b2dd6fbb03df06d37f52711e6363aaf1d73b + pointRx = 0x96a7625e92a8d72bff1113abdb95777e736a14c6fdaacc392702bca4 + pointRy = 0x0f8e5702942a3c5e13cd2fd5801915258b43dfadc70d15dbada3ed10 + + pointR = self.pointS * d + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + # 0*S + pai = self.pointS.point_at_infinity() + pointR = self.pointS * 0 + self.assertEqual(pointR, pai) + + # -1*S + self.assertRaises(ValueError, lambda: self.pointS * -1) + + # Reverse order + pointR = d * self.pointS + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + pointR = Integer(d) * self.pointS + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + def test_joing_scalar_multiply(self): + d = 0xa78ccc30eaca0fcc8e36b2dd6fbb03df06d37f52711e6363aaf1d73b + e = 0x54d549ffc08c96592519d73e71e8e0703fc8177fa88aa77a6ed35736 + pointRx = 0xdbfe2958c7b2cda1302a67ea3ffd94c918c5b350ab838d52e288c83e + pointRy = 0x2f521b83ac3b0549ff4895abcc7f0c5a861aacb87acbc5b8147bb18b + + pointR = self.pointS * d + self.pointT * e + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + def test_sizes(self): + self.assertEqual(self.pointS.size_in_bits(), 224) + self.assertEqual(self.pointS.size_in_bytes(), 28) + + +class TestEccPoint_NIST_P256(unittest.TestCase): + """Tests defined in section 4.3 of https://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.204.9073&rep=rep1&type=pdf""" + + pointS = EccPoint( + 0xde2444bebc8d36e682edd27e0f271508617519b3221a8fa0b77cab3989da97c9, + 0xc093ae7ff36e5380fc01a5aad1e66659702de80f53cec576b6350b243042a256) + + pointT = EccPoint( + 0x55a8b00f8da1d44e62f6b3b25316212e39540dc861c89575bb8cf92e35e0986b, + 0x5421c3209c2d6c704835d82ac4c3dd90f61a8a52598b9e7ab656e9d8c8b24316) + + def test_set(self): + pointW = EccPoint(0, 0) + pointW.set(self.pointS) + self.assertEqual(pointW, self.pointS) + + def test_copy(self): + pointW = self.pointS.copy() + self.assertEqual(pointW, self.pointS) + pointW.set(self.pointT) + self.assertEqual(pointW, self.pointT) + self.assertNotEqual(self.pointS, self.pointT) + + def test_negate(self): + negS = -self.pointS + sum = self.pointS + negS + self.assertEqual(sum, self.pointS.point_at_infinity()) + + def test_addition(self): + pointRx = 0x72b13dd4354b6b81745195e98cc5ba6970349191ac476bd4553cf35a545a067e + pointRy = 0x8d585cbb2e1327d75241a8a122d7620dc33b13315aa5c9d46d013011744ac264 + + pointR = self.pointS + self.pointT + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + pai = pointR.point_at_infinity() + + # S + 0 + pointR = self.pointS + pai + self.assertEqual(pointR, self.pointS) + + # 0 + S + pointR = pai + self.pointS + self.assertEqual(pointR, self.pointS) + + # 0 + 0 + pointR = pai + pai + self.assertEqual(pointR, pai) + + def test_inplace_addition(self): + pointRx = 0x72b13dd4354b6b81745195e98cc5ba6970349191ac476bd4553cf35a545a067e + pointRy = 0x8d585cbb2e1327d75241a8a122d7620dc33b13315aa5c9d46d013011744ac264 + + pointR = self.pointS.copy() + pointR += self.pointT + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + pai = pointR.point_at_infinity() + + # S + 0 + pointR = self.pointS.copy() + pointR += pai + self.assertEqual(pointR, self.pointS) + + # 0 + S + pointR = pai.copy() + pointR += self.pointS + self.assertEqual(pointR, self.pointS) + + # 0 + 0 + pointR = pai.copy() + pointR += pai + self.assertEqual(pointR, pai) + + def test_doubling(self): + pointRx = 0x7669e6901606ee3ba1a8eef1e0024c33df6c22f3b17481b82a860ffcdb6127b0 + pointRy = 0xfa878162187a54f6c39f6ee0072f33de389ef3eecd03023de10ca2c1db61d0c7 + + pointR = self.pointS.copy() + pointR.double() + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + # 2*0 + pai = self.pointS.point_at_infinity() + pointR = pai.copy() + pointR.double() + self.assertEqual(pointR, pai) + + # S + S + pointR = self.pointS.copy() + pointR += pointR + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + def test_scalar_multiply(self): + d = 0xc51e4753afdec1e6b6c6a5b992f43f8dd0c7a8933072708b6522468b2ffb06fd + pointRx = 0x51d08d5f2d4278882946d88d83c97d11e62becc3cfc18bedacc89ba34eeca03f + pointRy = 0x75ee68eb8bf626aa5b673ab51f6e744e06f8fcf8a6c0cf3035beca956a7b41d5 + + pointR = self.pointS * d + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + # 0*S + pai = self.pointS.point_at_infinity() + pointR = self.pointS * 0 + self.assertEqual(pointR, pai) + + # -1*S + self.assertRaises(ValueError, lambda: self.pointS * -1) + + # Reverse order + pointR = d * self.pointS + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + pointR = Integer(d) * self.pointS + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + def test_joing_scalar_multiply(self): + d = 0xc51e4753afdec1e6b6c6a5b992f43f8dd0c7a8933072708b6522468b2ffb06fd + e = 0xd37f628ece72a462f0145cbefe3f0b355ee8332d37acdd83a358016aea029db7 + pointRx = 0xd867b4679221009234939221b8046245efcf58413daacbeff857b8588341f6b8 + pointRy = 0xf2504055c03cede12d22720dad69c745106b6607ec7e50dd35d54bd80f615275 + + pointR = self.pointS * d + self.pointT * e + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + def test_sizes(self): + self.assertEqual(self.pointS.size_in_bits(), 256) + self.assertEqual(self.pointS.size_in_bytes(), 32) + + +class TestEccPoint_NIST_P384(unittest.TestCase): + """Tests defined in section 4.4 of https://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.204.9073&rep=rep1&type=pdf""" + + pointS = EccPoint( + 0xfba203b81bbd23f2b3be971cc23997e1ae4d89e69cb6f92385dda82768ada415ebab4167459da98e62b1332d1e73cb0e, + 0x5ffedbaefdeba603e7923e06cdb5d0c65b22301429293376d5c6944e3fa6259f162b4788de6987fd59aed5e4b5285e45, + "p384") + + pointT = EccPoint( + 0xaacc05202e7fda6fc73d82f0a66220527da8117ee8f8330ead7d20ee6f255f582d8bd38c5a7f2b40bcdb68ba13d81051, + 0x84009a263fefba7c2c57cffa5db3634d286131afc0fca8d25afa22a7b5dce0d9470da89233cee178592f49b6fecb5092, + "p384") + + def test_set(self): + pointW = EccPoint(0, 0, "p384") + pointW.set(self.pointS) + self.assertEqual(pointW, self.pointS) + + def test_copy(self): + pointW = self.pointS.copy() + self.assertEqual(pointW, self.pointS) + pointW.set(self.pointT) + self.assertEqual(pointW, self.pointT) + self.assertNotEqual(self.pointS, self.pointT) + + def test_negate(self): + negS = -self.pointS + sum = self.pointS + negS + self.assertEqual(sum, self.pointS.point_at_infinity()) + + def test_addition(self): + pointRx = 0x12dc5ce7acdfc5844d939f40b4df012e68f865b89c3213ba97090a247a2fc009075cf471cd2e85c489979b65ee0b5eed + pointRy = 0x167312e58fe0c0afa248f2854e3cddcb557f983b3189b67f21eee01341e7e9fe67f6ee81b36988efa406945c8804a4b0 + + pointR = self.pointS + self.pointT + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + pai = pointR.point_at_infinity() + + # S + 0 + pointR = self.pointS + pai + self.assertEqual(pointR, self.pointS) + + # 0 + S + pointR = pai + self.pointS + self.assertEqual(pointR, self.pointS) + + # 0 + 0 + pointR = pai + pai + self.assertEqual(pointR, pai) + + def _test_inplace_addition(self): + pointRx = 0x72b13dd4354b6b81745195e98cc5ba6970349191ac476bd4553cf35a545a067e + pointRy = 0x8d585cbb2e1327d75241a8a122d7620dc33b13315aa5c9d46d013011744ac264 + + pointR = self.pointS.copy() + pointR += self.pointT + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + pai = pointR.point_at_infinity() + + # S + 0 + pointR = self.pointS.copy() + pointR += pai + self.assertEqual(pointR, self.pointS) + + # 0 + S + pointR = pai.copy() + pointR += self.pointS + self.assertEqual(pointR, self.pointS) + + # 0 + 0 + pointR = pai.copy() + pointR += pai + self.assertEqual(pointR, pai) + + def test_doubling(self): + pointRx = 0x2a2111b1e0aa8b2fc5a1975516bc4d58017ff96b25e1bdff3c229d5fac3bacc319dcbec29f9478f42dee597b4641504c + pointRy = 0xfa2e3d9dc84db8954ce8085ef28d7184fddfd1344b4d4797343af9b5f9d837520b450f726443e4114bd4e5bdb2f65ddd + + pointR = self.pointS.copy() + pointR.double() + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + # 2*0 + pai = self.pointS.point_at_infinity() + pointR = pai.copy() + pointR.double() + self.assertEqual(pointR, pai) + + # S + S + pointR = self.pointS.copy() + pointR += pointR + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + def test_scalar_multiply(self): + d = 0xa4ebcae5a665983493ab3e626085a24c104311a761b5a8fdac052ed1f111a5c44f76f45659d2d111a61b5fdd97583480 + pointRx = 0xe4f77e7ffeb7f0958910e3a680d677a477191df166160ff7ef6bb5261f791aa7b45e3e653d151b95dad3d93ca0290ef2 + pointRy = 0xac7dee41d8c5f4a7d5836960a773cfc1376289d3373f8cf7417b0c6207ac32e913856612fc9ff2e357eb2ee05cf9667f + + pointR = self.pointS * d + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + # 0*S + pai = self.pointS.point_at_infinity() + pointR = self.pointS * 0 + self.assertEqual(pointR, pai) + + # -1*S + self.assertRaises(ValueError, lambda: self.pointS * -1) + + def test_joing_scalar_multiply(self): + d = 0xa4ebcae5a665983493ab3e626085a24c104311a761b5a8fdac052ed1f111a5c44f76f45659d2d111a61b5fdd97583480 + e = 0xafcf88119a3a76c87acbd6008e1349b29f4ba9aa0e12ce89bcfcae2180b38d81ab8cf15095301a182afbc6893e75385d + pointRx = 0x917ea28bcd641741ae5d18c2f1bd917ba68d34f0f0577387dc81260462aea60e2417b8bdc5d954fc729d211db23a02dc + pointRy = 0x1a29f7ce6d074654d77b40888c73e92546c8f16a5ff6bcbd307f758d4aee684beff26f6742f597e2585c86da908f7186 + + pointR = self.pointS * d + self.pointT * e + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + def test_sizes(self): + self.assertEqual(self.pointS.size_in_bits(), 384) + self.assertEqual(self.pointS.size_in_bytes(), 48) + + +class TestEccPoint_NIST_P521(unittest.TestCase): + """Tests defined in section 4.5 of https://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.204.9073&rep=rep1&type=pdf""" + + pointS = EccPoint( + 0x000001d5c693f66c08ed03ad0f031f937443458f601fd098d3d0227b4bf62873af50740b0bb84aa157fc847bcf8dc16a8b2b8bfd8e2d0a7d39af04b089930ef6dad5c1b4, + 0x00000144b7770963c63a39248865ff36b074151eac33549b224af5c8664c54012b818ed037b2b7c1a63ac89ebaa11e07db89fcee5b556e49764ee3fa66ea7ae61ac01823, + "p521") + + pointT = EccPoint( + 0x000000f411f2ac2eb971a267b80297ba67c322dba4bb21cec8b70073bf88fc1ca5fde3ba09e5df6d39acb2c0762c03d7bc224a3e197feaf760d6324006fe3be9a548c7d5, + 0x000001fdf842769c707c93c630df6d02eff399a06f1b36fb9684f0b373ed064889629abb92b1ae328fdb45534268384943f0e9222afe03259b32274d35d1b9584c65e305, + "p521") + + def test_set(self): + pointW = EccPoint(0, 0) + pointW.set(self.pointS) + self.assertEqual(pointW, self.pointS) + + def test_copy(self): + pointW = self.pointS.copy() + self.assertEqual(pointW, self.pointS) + pointW.set(self.pointT) + self.assertEqual(pointW, self.pointT) + self.assertNotEqual(self.pointS, self.pointT) + + def test_negate(self): + negS = -self.pointS + sum = self.pointS + negS + self.assertEqual(sum, self.pointS.point_at_infinity()) + + def test_addition(self): + pointRx = 0x000001264ae115ba9cbc2ee56e6f0059e24b52c8046321602c59a339cfb757c89a59c358a9a8e1f86d384b3f3b255ea3f73670c6dc9f45d46b6a196dc37bbe0f6b2dd9e9 + pointRy = 0x00000062a9c72b8f9f88a271690bfa017a6466c31b9cadc2fc544744aeb817072349cfddc5ad0e81b03f1897bd9c8c6efbdf68237dc3bb00445979fb373b20c9a967ac55 + + pointR = self.pointS + self.pointT + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + pai = pointR.point_at_infinity() + + # S + 0 + pointR = self.pointS + pai + self.assertEqual(pointR, self.pointS) + + # 0 + S + pointR = pai + self.pointS + self.assertEqual(pointR, self.pointS) + + # 0 + 0 + pointR = pai + pai + self.assertEqual(pointR, pai) + + def test_inplace_addition(self): + pointRx = 0x000001264ae115ba9cbc2ee56e6f0059e24b52c8046321602c59a339cfb757c89a59c358a9a8e1f86d384b3f3b255ea3f73670c6dc9f45d46b6a196dc37bbe0f6b2dd9e9 + pointRy = 0x00000062a9c72b8f9f88a271690bfa017a6466c31b9cadc2fc544744aeb817072349cfddc5ad0e81b03f1897bd9c8c6efbdf68237dc3bb00445979fb373b20c9a967ac55 + + pointR = self.pointS.copy() + pointR += self.pointT + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + pai = pointR.point_at_infinity() + + # S + 0 + pointR = self.pointS.copy() + pointR += pai + self.assertEqual(pointR, self.pointS) + + # 0 + S + pointR = pai.copy() + pointR += self.pointS + self.assertEqual(pointR, self.pointS) + + # 0 + 0 + pointR = pai.copy() + pointR += pai + self.assertEqual(pointR, pai) + + def test_doubling(self): + pointRx = 0x0000012879442f2450c119e7119a5f738be1f1eba9e9d7c6cf41b325d9ce6d643106e9d61124a91a96bcf201305a9dee55fa79136dc700831e54c3ca4ff2646bd3c36bc6 + pointRy = 0x0000019864a8b8855c2479cbefe375ae553e2393271ed36fadfc4494fc0583f6bd03598896f39854abeae5f9a6515a021e2c0eef139e71de610143f53382f4104dccb543 + + pointR = self.pointS.copy() + pointR.double() + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + # 2*0 + pai = self.pointS.point_at_infinity() + pointR = pai.copy() + pointR.double() + self.assertEqual(pointR, pai) + + # S + S + pointR = self.pointS.copy() + pointR += pointR + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + def test_scalar_multiply(self): + d = 0x000001eb7f81785c9629f136a7e8f8c674957109735554111a2a866fa5a166699419bfa9936c78b62653964df0d6da940a695c7294d41b2d6600de6dfcf0edcfc89fdcb1 + pointRx = 0x00000091b15d09d0ca0353f8f96b93cdb13497b0a4bb582ae9ebefa35eee61bf7b7d041b8ec34c6c00c0c0671c4ae063318fb75be87af4fe859608c95f0ab4774f8c95bb + pointRy = 0x00000130f8f8b5e1abb4dd94f6baaf654a2d5810411e77b7423965e0c7fd79ec1ae563c207bd255ee9828eb7a03fed565240d2cc80ddd2cecbb2eb50f0951f75ad87977f + + pointR = self.pointS * d + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + # 0*S + pai = self.pointS.point_at_infinity() + pointR = self.pointS * 0 + self.assertEqual(pointR, pai) + + # -1*S + self.assertRaises(ValueError, lambda: self.pointS * -1) + + def test_joing_scalar_multiply(self): + d = 0x000001eb7f81785c9629f136a7e8f8c674957109735554111a2a866fa5a166699419bfa9936c78b62653964df0d6da940a695c7294d41b2d6600de6dfcf0edcfc89fdcb1 + e = 0x00000137e6b73d38f153c3a7575615812608f2bab3229c92e21c0d1c83cfad9261dbb17bb77a63682000031b9122c2f0cdab2af72314be95254de4291a8f85f7c70412e3 + pointRx = 0x0000009d3802642b3bea152beb9e05fba247790f7fc168072d363340133402f2585588dc1385d40ebcb8552f8db02b23d687cae46185b27528adb1bf9729716e4eba653d + pointRy = 0x0000000fe44344e79da6f49d87c1063744e5957d9ac0a505bafa8281c9ce9ff25ad53f8da084a2deb0923e46501de5797850c61b229023dd9cf7fc7f04cd35ebb026d89d + + pointR = self.pointS * d + pointR += self.pointT * e + self.assertEqual(pointR.x, pointRx) + self.assertEqual(pointR.y, pointRy) + + def test_sizes(self): + self.assertEqual(self.pointS.size_in_bits(), 521) + self.assertEqual(self.pointS.size_in_bytes(), 66) + + +class TestEccPoint_PAI_P192(unittest.TestCase): + """Test vectors from http://point-at-infinity.org/ecc/nisttv""" + + curve = _curves['p192'] + pointG = EccPoint(curve.Gx, curve.Gy, "p192") + + +tv_pai = load_test_vectors(("PublicKey", "ECC"), + "point-at-infinity.org-P192.txt", + "P-192 tests from point-at-infinity.org", + {"k": lambda k: int(k), + "x": lambda x: int(x, 16), + "y": lambda y: int(y, 16)}) or [] +for tv in tv_pai: + def new_test(self, scalar=tv.k, x=tv.x, y=tv.y): + result = self.pointG * scalar + self.assertEqual(result.x, x) + self.assertEqual(result.y, y) + setattr(TestEccPoint_PAI_P192, "test_%d" % tv.count, new_test) + + +class TestEccPoint_PAI_P224(unittest.TestCase): + """Test vectors from http://point-at-infinity.org/ecc/nisttv""" + + curve = _curves['p224'] + pointG = EccPoint(curve.Gx, curve.Gy, "p224") + + +tv_pai = load_test_vectors(("PublicKey", "ECC"), + "point-at-infinity.org-P224.txt", + "P-224 tests from point-at-infinity.org", + {"k": lambda k: int(k), + "x": lambda x: int(x, 16), + "y": lambda y: int(y, 16)}) or [] +for tv in tv_pai: + def new_test(self, scalar=tv.k, x=tv.x, y=tv.y): + result = self.pointG * scalar + self.assertEqual(result.x, x) + self.assertEqual(result.y, y) + setattr(TestEccPoint_PAI_P224, "test_%d" % tv.count, new_test) + + +class TestEccPoint_PAI_P256(unittest.TestCase): + """Test vectors from http://point-at-infinity.org/ecc/nisttv""" + + curve = _curves['p256'] + pointG = EccPoint(curve.Gx, curve.Gy, "p256") + + +tv_pai = load_test_vectors(("PublicKey", "ECC"), + "point-at-infinity.org-P256.txt", + "P-256 tests from point-at-infinity.org", + {"k": lambda k: int(k), + "x": lambda x: int(x, 16), + "y": lambda y: int(y, 16)}) or [] +for tv in tv_pai: + def new_test(self, scalar=tv.k, x=tv.x, y=tv.y): + result = self.pointG * scalar + self.assertEqual(result.x, x) + self.assertEqual(result.y, y) + setattr(TestEccPoint_PAI_P256, "test_%d" % tv.count, new_test) + + +class TestEccPoint_PAI_P384(unittest.TestCase): + """Test vectors from http://point-at-infinity.org/ecc/nisttv""" + + curve = _curves['p384'] + pointG = EccPoint(curve.Gx, curve.Gy, "p384") + + +tv_pai = load_test_vectors(("PublicKey", "ECC"), + "point-at-infinity.org-P384.txt", + "P-384 tests from point-at-infinity.org", + {"k": lambda k: int(k), + "x": lambda x: int(x, 16), + "y": lambda y: int(y, 16)}) or [] +for tv in tv_pai: + def new_test(self, scalar=tv.k, x=tv.x, y=tv.y): + result = self.pointG * scalar + self.assertEqual(result.x, x) + self.assertEqual(result.y, y) + setattr(TestEccPoint_PAI_P384, "test_%d" % tv.count, new_test) + + +class TestEccPoint_PAI_P521(unittest.TestCase): + """Test vectors from http://point-at-infinity.org/ecc/nisttv""" + + curve = _curves['p521'] + pointG = EccPoint(curve.Gx, curve.Gy, "p521") + + +tv_pai = load_test_vectors(("PublicKey", "ECC"), + "point-at-infinity.org-P521.txt", + "P-521 tests from point-at-infinity.org", + {"k": lambda k: int(k), + "x": lambda x: int(x, 16), + "y": lambda y: int(y, 16)}) or [] +for tv in tv_pai: + def new_test(self, scalar=tv.k, x=tv.x, y=tv.y): + result = self.pointG * scalar + self.assertEqual(result.x, x) + self.assertEqual(result.y, y) + setattr(TestEccPoint_PAI_P521, "test_%d" % tv.count, new_test) + + +class TestEccKey_P192(unittest.TestCase): + + def test_private_key(self): + + key = EccKey(curve="P-192", d=1) + self.assertEqual(key.d, 1) + self.assertTrue(key.has_private()) + self.assertEqual(key.pointQ.x, _curves['p192'].Gx) + self.assertEqual(key.pointQ.y, _curves['p192'].Gy) + + point = EccPoint(_curves['p192'].Gx, _curves['p192'].Gy, curve='P-192') + key = EccKey(curve="P-192", d=1, point=point) + self.assertEqual(key.d, 1) + self.assertTrue(key.has_private()) + self.assertEqual(key.pointQ, point) + + # Other names + key = EccKey(curve="secp192r1", d=1) + key = EccKey(curve="prime192v1", d=1) + + def test_public_key(self): + + point = EccPoint(_curves['p192'].Gx, _curves['p192'].Gy, curve='P-192') + key = EccKey(curve="P-192", point=point) + self.assertFalse(key.has_private()) + self.assertEqual(key.pointQ, point) + + def test_public_key_derived(self): + + priv_key = EccKey(curve="P-192", d=3) + pub_key = priv_key.public_key() + self.assertFalse(pub_key.has_private()) + self.assertEqual(priv_key.pointQ, pub_key.pointQ) + + def test_invalid_curve(self): + self.assertRaises(ValueError, lambda: EccKey(curve="P-193", d=1)) + + def test_invalid_d(self): + self.assertRaises(ValueError, lambda: EccKey(curve="P-192", d=0)) + self.assertRaises(ValueError, lambda: EccKey(curve="P-192", + d=_curves['p192'].order)) + + def test_equality(self): + + private_key = ECC.construct(d=3, curve="P-192") + private_key2 = ECC.construct(d=3, curve="P-192") + private_key3 = ECC.construct(d=4, curve="P-192") + + public_key = private_key.public_key() + public_key2 = private_key2.public_key() + public_key3 = private_key3.public_key() + + self.assertEqual(private_key, private_key2) + self.assertNotEqual(private_key, private_key3) + + self.assertEqual(public_key, public_key2) + self.assertNotEqual(public_key, public_key3) + + self.assertNotEqual(public_key, private_key) + + def test_name_consistency(self): + key = ECC.generate(curve='p192') + self.assertIn("curve='NIST P-192'", repr(key)) + self.assertEqual(key.curve, 'NIST P-192') + self.assertEqual(key.public_key().curve, 'NIST P-192') + + +class TestEccKey_P224(unittest.TestCase): + + def test_private_key(self): + + key = EccKey(curve="P-224", d=1) + self.assertEqual(key.d, 1) + self.assertTrue(key.has_private()) + self.assertEqual(key.pointQ.x, _curves['p224'].Gx) + self.assertEqual(key.pointQ.y, _curves['p224'].Gy) + + point = EccPoint(_curves['p224'].Gx, _curves['p224'].Gy, curve='P-224') + key = EccKey(curve="P-224", d=1, point=point) + self.assertEqual(key.d, 1) + self.assertTrue(key.has_private()) + self.assertEqual(key.pointQ, point) + + # Other names + key = EccKey(curve="secp224r1", d=1) + key = EccKey(curve="prime224v1", d=1) + + def test_public_key(self): + + point = EccPoint(_curves['p224'].Gx, _curves['p224'].Gy, curve='P-224') + key = EccKey(curve="P-224", point=point) + self.assertFalse(key.has_private()) + self.assertEqual(key.pointQ, point) + + def test_public_key_derived(self): + + priv_key = EccKey(curve="P-224", d=3) + pub_key = priv_key.public_key() + self.assertFalse(pub_key.has_private()) + self.assertEqual(priv_key.pointQ, pub_key.pointQ) + + def test_invalid_curve(self): + self.assertRaises(ValueError, lambda: EccKey(curve="P-225", d=1)) + + def test_invalid_d(self): + self.assertRaises(ValueError, lambda: EccKey(curve="P-224", d=0)) + self.assertRaises(ValueError, lambda: EccKey(curve="P-224", + d=_curves['p224'].order)) + + def test_equality(self): + + private_key = ECC.construct(d=3, curve="P-224") + private_key2 = ECC.construct(d=3, curve="P-224") + private_key3 = ECC.construct(d=4, curve="P-224") + + public_key = private_key.public_key() + public_key2 = private_key2.public_key() + public_key3 = private_key3.public_key() + + self.assertEqual(private_key, private_key2) + self.assertNotEqual(private_key, private_key3) + + self.assertEqual(public_key, public_key2) + self.assertNotEqual(public_key, public_key3) + + self.assertNotEqual(public_key, private_key) + + def test_name_consistency(self): + key = ECC.generate(curve='p224') + self.assertIn("curve='NIST P-224'", repr(key)) + self.assertEqual(key.curve, 'NIST P-224') + self.assertEqual(key.public_key().curve, 'NIST P-224') + + +class TestEccKey_P256(unittest.TestCase): + + def test_private_key(self): + + key = EccKey(curve="P-256", d=1) + self.assertEqual(key.d, 1) + self.assertTrue(key.has_private()) + self.assertEqual(key.pointQ.x, _curves['p256'].Gx) + self.assertEqual(key.pointQ.y, _curves['p256'].Gy) + + point = EccPoint(_curves['p256'].Gx, _curves['p256'].Gy) + key = EccKey(curve="P-256", d=1, point=point) + self.assertEqual(key.d, 1) + self.assertTrue(key.has_private()) + self.assertEqual(key.pointQ, point) + + # Other names + key = EccKey(curve="secp256r1", d=1) + key = EccKey(curve="prime256v1", d=1) + + # Must not accept d parameter + self.assertRaises(ValueError, EccKey, curve="p256", seed=b'H'*32) + + def test_public_key(self): + + point = EccPoint(_curves['p256'].Gx, _curves['p256'].Gy) + key = EccKey(curve="P-256", point=point) + self.assertFalse(key.has_private()) + self.assertEqual(key.pointQ, point) + + def test_public_key_derived(self): + + priv_key = EccKey(curve="P-256", d=3) + pub_key = priv_key.public_key() + self.assertFalse(pub_key.has_private()) + self.assertEqual(priv_key.pointQ, pub_key.pointQ) + + def test_invalid_curve(self): + self.assertRaises(ValueError, lambda: EccKey(curve="P-257", d=1)) + + def test_invalid_d(self): + self.assertRaises(ValueError, lambda: EccKey(curve="P-256", d=0)) + self.assertRaises(ValueError, lambda: EccKey(curve="P-256", d=_curves['p256'].order)) + + def test_equality(self): + + private_key = ECC.construct(d=3, curve="P-256") + private_key2 = ECC.construct(d=3, curve="P-256") + private_key3 = ECC.construct(d=4, curve="P-256") + + public_key = private_key.public_key() + public_key2 = private_key2.public_key() + public_key3 = private_key3.public_key() + + self.assertEqual(private_key, private_key2) + self.assertNotEqual(private_key, private_key3) + + self.assertEqual(public_key, public_key2) + self.assertNotEqual(public_key, public_key3) + + self.assertNotEqual(public_key, private_key) + + def test_name_consistency(self): + key = ECC.generate(curve='p256') + self.assertIn("curve='NIST P-256'", repr(key)) + self.assertEqual(key.curve, 'NIST P-256') + self.assertEqual(key.public_key().curve, 'NIST P-256') + + +class TestEccKey_P384(unittest.TestCase): + + def test_private_key(self): + + p384 = _curves['p384'] + + key = EccKey(curve="P-384", d=1) + self.assertEqual(key.d, 1) + self.assertTrue(key.has_private()) + self.assertEqual(key.pointQ.x, p384.Gx) + self.assertEqual(key.pointQ.y, p384.Gy) + + point = EccPoint(p384.Gx, p384.Gy, "p384") + key = EccKey(curve="P-384", d=1, point=point) + self.assertEqual(key.d, 1) + self.assertTrue(key.has_private()) + self.assertEqual(key.pointQ, point) + + # Other names + key = EccKey(curve="p384", d=1) + key = EccKey(curve="secp384r1", d=1) + key = EccKey(curve="prime384v1", d=1) + + def test_public_key(self): + + p384 = _curves['p384'] + point = EccPoint(p384.Gx, p384.Gy, 'p384') + key = EccKey(curve="P-384", point=point) + self.assertFalse(key.has_private()) + self.assertEqual(key.pointQ, point) + + def test_public_key_derived(self): + + priv_key = EccKey(curve="P-384", d=3) + pub_key = priv_key.public_key() + self.assertFalse(pub_key.has_private()) + self.assertEqual(priv_key.pointQ, pub_key.pointQ) + + def test_invalid_curve(self): + self.assertRaises(ValueError, lambda: EccKey(curve="P-385", d=1)) + + def test_invalid_d(self): + self.assertRaises(ValueError, lambda: EccKey(curve="P-384", d=0)) + self.assertRaises(ValueError, lambda: EccKey(curve="P-384", + d=_curves['p384'].order)) + + def test_equality(self): + + private_key = ECC.construct(d=3, curve="P-384") + private_key2 = ECC.construct(d=3, curve="P-384") + private_key3 = ECC.construct(d=4, curve="P-384") + + public_key = private_key.public_key() + public_key2 = private_key2.public_key() + public_key3 = private_key3.public_key() + + self.assertEqual(private_key, private_key2) + self.assertNotEqual(private_key, private_key3) + + self.assertEqual(public_key, public_key2) + self.assertNotEqual(public_key, public_key3) + + self.assertNotEqual(public_key, private_key) + + def test_name_consistency(self): + key = ECC.generate(curve='p384') + self.assertIn("curve='NIST P-384'", repr(key)) + self.assertEqual(key.curve, 'NIST P-384') + self.assertEqual(key.public_key().curve, 'NIST P-384') + + +class TestEccKey_P521(unittest.TestCase): + + def test_private_key(self): + + p521 = _curves['p521'] + + key = EccKey(curve="P-521", d=1) + self.assertEqual(key.d, 1) + self.assertTrue(key.has_private()) + self.assertEqual(key.pointQ.x, p521.Gx) + self.assertEqual(key.pointQ.y, p521.Gy) + + point = EccPoint(p521.Gx, p521.Gy, "p521") + key = EccKey(curve="P-521", d=1, point=point) + self.assertEqual(key.d, 1) + self.assertTrue(key.has_private()) + self.assertEqual(key.pointQ, point) + + # Other names + key = EccKey(curve="p521", d=1) + key = EccKey(curve="secp521r1", d=1) + key = EccKey(curve="prime521v1", d=1) + + def test_public_key(self): + + p521 = _curves['p521'] + point = EccPoint(p521.Gx, p521.Gy, 'p521') + key = EccKey(curve="P-384", point=point) + self.assertFalse(key.has_private()) + self.assertEqual(key.pointQ, point) + + def test_public_key_derived(self): + + priv_key = EccKey(curve="P-521", d=3) + pub_key = priv_key.public_key() + self.assertFalse(pub_key.has_private()) + self.assertEqual(priv_key.pointQ, pub_key.pointQ) + + def test_invalid_curve(self): + self.assertRaises(ValueError, lambda: EccKey(curve="P-522", d=1)) + + def test_invalid_d(self): + self.assertRaises(ValueError, lambda: EccKey(curve="P-521", d=0)) + self.assertRaises(ValueError, lambda: EccKey(curve="P-521", + d=_curves['p521'].order)) + + def test_equality(self): + + private_key = ECC.construct(d=3, curve="P-521") + private_key2 = ECC.construct(d=3, curve="P-521") + private_key3 = ECC.construct(d=4, curve="P-521") + + public_key = private_key.public_key() + public_key2 = private_key2.public_key() + public_key3 = private_key3.public_key() + + self.assertEqual(private_key, private_key2) + self.assertNotEqual(private_key, private_key3) + + self.assertEqual(public_key, public_key2) + self.assertNotEqual(public_key, public_key3) + + self.assertNotEqual(public_key, private_key) + + def test_name_consistency(self): + key = ECC.generate(curve='p521') + self.assertIn("curve='NIST P-521'", repr(key)) + self.assertEqual(key.curve, 'NIST P-521') + self.assertEqual(key.public_key().curve, 'NIST P-521') + + +class TestEccModule_P192(unittest.TestCase): + + def test_generate(self): + + key = ECC.generate(curve="P-192") + self.assertTrue(key.has_private()) + self.assertEqual(key.pointQ, EccPoint(_curves['p192'].Gx, + _curves['p192'].Gy, + "P-192") * key.d, + "p192") + + # Other names + ECC.generate(curve="secp192r1") + ECC.generate(curve="prime192v1") + + def test_construct(self): + + key = ECC.construct(curve="P-192", d=1) + self.assertTrue(key.has_private()) + self.assertEqual(key.pointQ, _curves['p192'].G) + + key = ECC.construct(curve="P-192", point_x=_curves['p192'].Gx, + point_y=_curves['p192'].Gy) + self.assertFalse(key.has_private()) + self.assertEqual(key.pointQ, _curves['p192'].G) + + # Other names + ECC.construct(curve="p192", d=1) + ECC.construct(curve="secp192r1", d=1) + ECC.construct(curve="prime192v1", d=1) + + def test_negative_construct(self): + coord = dict(point_x=10, point_y=4) + coordG = dict(point_x=_curves['p192'].Gx, point_y=_curves['p192'].Gy) + + self.assertRaises(ValueError, ECC.construct, curve="P-192", **coord) + self.assertRaises(ValueError, ECC.construct, curve="P-192", d=2, **coordG) + + +class TestEccModule_P224(unittest.TestCase): + + def test_generate(self): + + key = ECC.generate(curve="P-224") + self.assertTrue(key.has_private()) + self.assertEqual(key.pointQ, EccPoint(_curves['p224'].Gx, + _curves['p224'].Gy, + "P-224") * key.d, + "p224") + + # Other names + ECC.generate(curve="secp224r1") + ECC.generate(curve="prime224v1") + + def test_construct(self): + + key = ECC.construct(curve="P-224", d=1) + self.assertTrue(key.has_private()) + self.assertEqual(key.pointQ, _curves['p224'].G) + + key = ECC.construct(curve="P-224", point_x=_curves['p224'].Gx, + point_y=_curves['p224'].Gy) + self.assertFalse(key.has_private()) + self.assertEqual(key.pointQ, _curves['p224'].G) + + # Other names + ECC.construct(curve="p224", d=1) + ECC.construct(curve="secp224r1", d=1) + ECC.construct(curve="prime224v1", d=1) + + def test_negative_construct(self): + coord = dict(point_x=10, point_y=4) + coordG = dict(point_x=_curves['p224'].Gx, point_y=_curves['p224'].Gy) + + self.assertRaises(ValueError, ECC.construct, curve="P-224", **coord) + self.assertRaises(ValueError, ECC.construct, curve="P-224", d=2, **coordG) + + +class TestEccModule_P256(unittest.TestCase): + + def test_generate(self): + + key = ECC.generate(curve="P-256") + self.assertTrue(key.has_private()) + self.assertEqual(key.pointQ, EccPoint(_curves['p256'].Gx, + _curves['p256'].Gy) * key.d, + "p256") + + # Other names + ECC.generate(curve="secp256r1") + ECC.generate(curve="prime256v1") + + def test_construct(self): + + key = ECC.construct(curve="P-256", d=1) + self.assertTrue(key.has_private()) + self.assertEqual(key.pointQ, _curves['p256'].G) + + key = ECC.construct(curve="P-256", point_x=_curves['p256'].Gx, + point_y=_curves['p256'].Gy) + self.assertFalse(key.has_private()) + self.assertEqual(key.pointQ, _curves['p256'].G) + + # Other names + ECC.construct(curve="p256", d=1) + ECC.construct(curve="secp256r1", d=1) + ECC.construct(curve="prime256v1", d=1) + + def test_negative_construct(self): + coord = dict(point_x=10, point_y=4) + coordG = dict(point_x=_curves['p256'].Gx, point_y=_curves['p256'].Gy) + + self.assertRaises(ValueError, ECC.construct, curve="P-256", **coord) + self.assertRaises(ValueError, ECC.construct, curve="P-256", d=2, **coordG) + + +class TestEccModule_P384(unittest.TestCase): + + def test_generate(self): + + curve = _curves['p384'] + key = ECC.generate(curve="P-384") + self.assertTrue(key.has_private()) + self.assertEqual(key.pointQ, EccPoint(curve.Gx, curve.Gy, "p384") * key.d) + + # Other names + ECC.generate(curve="secp384r1") + ECC.generate(curve="prime384v1") + + def test_construct(self): + + curve = _curves['p384'] + key = ECC.construct(curve="P-384", d=1) + self.assertTrue(key.has_private()) + self.assertEqual(key.pointQ, _curves['p384'].G) + + key = ECC.construct(curve="P-384", point_x=curve.Gx, point_y=curve.Gy) + self.assertFalse(key.has_private()) + self.assertEqual(key.pointQ, curve.G) + + # Other names + ECC.construct(curve="p384", d=1) + ECC.construct(curve="secp384r1", d=1) + ECC.construct(curve="prime384v1", d=1) + + def test_negative_construct(self): + coord = dict(point_x=10, point_y=4) + coordG = dict(point_x=_curves['p384'].Gx, point_y=_curves['p384'].Gy) + + self.assertRaises(ValueError, ECC.construct, curve="P-384", **coord) + self.assertRaises(ValueError, ECC.construct, curve="P-384", d=2, **coordG) + + +class TestEccModule_P521(unittest.TestCase): + + def test_generate(self): + + curve = _curves['p521'] + key = ECC.generate(curve="P-521") + self.assertTrue(key.has_private()) + self.assertEqual(key.pointQ, EccPoint(curve.Gx, curve.Gy, "p521") * key.d) + + # Other names + ECC.generate(curve="secp521r1") + ECC.generate(curve="prime521v1") + + def test_construct(self): + + curve = _curves['p521'] + key = ECC.construct(curve="P-521", d=1) + self.assertTrue(key.has_private()) + self.assertEqual(key.pointQ, _curves['p521'].G) + + key = ECC.construct(curve="P-521", point_x=curve.Gx, point_y=curve.Gy) + self.assertFalse(key.has_private()) + self.assertEqual(key.pointQ, curve.G) + + # Other names + ECC.construct(curve="p521", d=1) + ECC.construct(curve="secp521r1", d=1) + ECC.construct(curve="prime521v1", d=1) + + def test_negative_construct(self): + coord = dict(point_x=10, point_y=4) + coordG = dict(point_x=_curves['p521'].Gx, point_y=_curves['p521'].Gy) + + self.assertRaises(ValueError, ECC.construct, curve="P-521", **coord) + self.assertRaises(ValueError, ECC.construct, curve="P-521", d=2, **coordG) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(TestEccPoint) + tests += list_test_cases(TestEccPoint_NIST_P192) + tests += list_test_cases(TestEccPoint_NIST_P224) + tests += list_test_cases(TestEccPoint_NIST_P256) + tests += list_test_cases(TestEccPoint_NIST_P384) + tests += list_test_cases(TestEccPoint_NIST_P521) + tests += list_test_cases(TestEccPoint_PAI_P192) + tests += list_test_cases(TestEccPoint_PAI_P224) + tests += list_test_cases(TestEccPoint_PAI_P256) + tests += list_test_cases(TestEccPoint_PAI_P384) + tests += list_test_cases(TestEccPoint_PAI_P521) + tests += list_test_cases(TestEccKey_P192) + tests += list_test_cases(TestEccKey_P224) + tests += list_test_cases(TestEccKey_P256) + tests += list_test_cases(TestEccKey_P384) + tests += list_test_cases(TestEccKey_P521) + tests += list_test_cases(TestEccModule_P192) + tests += list_test_cases(TestEccModule_P224) + tests += list_test_cases(TestEccModule_P256) + tests += list_test_cases(TestEccModule_P384) + tests += list_test_cases(TestEccModule_P521) + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/test_ElGamal.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/test_ElGamal.py new file mode 100644 index 000000000..0b394aef0 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/test_ElGamal.py @@ -0,0 +1,217 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/PublicKey/test_ElGamal.py: Self-test for the ElGamal primitive +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.PublicKey.ElGamal""" + +__revision__ = "$Id$" + +import unittest +from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex +from Crypto import Random +from Crypto.PublicKey import ElGamal +from Crypto.Util.number import bytes_to_long +from Crypto.Util.py3compat import * + +class ElGamalTest(unittest.TestCase): + + # + # Test vectors + # + # There seem to be no real ElGamal test vectors available in the + # public domain. The following test vectors have been generated + # with libgcrypt 1.5.0. + # + # Encryption + tve=[ + { + # 256 bits + 'p' :'BA4CAEAAED8CBE952AFD2126C63EB3B345D65C2A0A73D2A3AD4138B6D09BD933', + 'g' :'05', + 'y' :'60D063600ECED7C7C55146020E7A31C4476E9793BEAED420FEC9E77604CAE4EF', + 'x' :'1D391BA2EE3C37FE1BA175A69B2C73A11238AD77675932', + 'k' :'F5893C5BAB4131264066F57AB3D8AD89E391A0B68A68A1', + 'pt' :'48656C6C6F207468657265', + 'ct1':'32BFD5F487966CEA9E9356715788C491EC515E4ED48B58F0F00971E93AAA5EC7', + 'ct2':'7BE8FBFF317C93E82FCEF9BD515284BA506603FEA25D01C0CB874A31F315EE68' + }, + + { + # 512 bits + 'p' :'F1B18AE9F7B4E08FDA9A04832F4E919D89462FD31BF12F92791A93519F75076D6CE3942689CDFF2F344CAFF0F82D01864F69F3AECF566C774CBACF728B81A227', + 'g' :'07', + 'y' :'688628C676E4F05D630E1BE39D0066178CA7AA83836B645DE5ADD359B4825A12B02EF4252E4E6FA9BEC1DB0BE90F6D7C8629CABB6E531F472B2664868156E20C', + 'x' :'14E60B1BDFD33436C0DA8A22FDC14A2CCDBBED0627CE68', + 'k' :'38DBF14E1F319BDA9BAB33EEEADCAF6B2EA5250577ACE7', + 'pt' :'48656C6C6F207468657265', + 'ct1':'290F8530C2CC312EC46178724F196F308AD4C523CEABB001FACB0506BFED676083FE0F27AC688B5C749AB3CB8A80CD6F7094DBA421FB19442F5A413E06A9772B', + 'ct2':'1D69AAAD1DC50493FB1B8E8721D621D683F3BF1321BE21BC4A43E11B40C9D4D9C80DE3AAC2AB60D31782B16B61112E68220889D53C4C3136EE6F6CE61F8A23A0' + } + ] + + # Signature + tvs=[ + { + # 256 bits + 'p' :'D2F3C41EA66530838A704A48FFAC9334F4701ECE3A97CEE4C69DD01AE7129DD7', + 'g' :'05', + 'y' :'C3F9417DC0DAFEA6A05C1D2333B7A95E63B3F4F28CC962254B3256984D1012E7', + 'x' :'165E4A39BE44D5A2D8B1332D416BC559616F536BC735BB', + 'k' :'C7F0C794A7EAD726E25A47FF8928013680E73C51DD3D7D99BFDA8F492585928F', + 'h' :'48656C6C6F207468657265', + 'sig1':'35CA98133779E2073EF31165AFCDEB764DD54E96ADE851715495F9C635E1E7C2', + 'sig2':'0135B88B1151279FE5D8078D4FC685EE81177EE9802AB123A73925FC1CB059A7', + }, + { + # 512 bits + 'p' :'E24CF3A4B8A6AF749DCA6D714282FE4AABEEE44A53BB6ED15FBE32B5D3C3EF9CC4124A2ECA331F3C1C1B667ACA3766825217E7B5F9856648D95F05330C6A19CF', + 'g' :'0B', + 'y' :'2AD3A1049CA5D4ED207B2431C79A8719BB4073D4A94E450EA6CEE8A760EB07ADB67C0D52C275EE85D7B52789061EE45F2F37D9B2AE522A51C28329766BFE68AC', + 'x' :'16CBB4F46D9ECCF24FF9F7E63CAA3BD8936341555062AB', + 'k' :'8A3D89A4E429FD2476D7D717251FB79BF900FFE77444E6BB8299DC3F84D0DD57ABAB50732AE158EA52F5B9E7D8813E81FD9F79470AE22F8F1CF9AEC820A78C69', + 'h' :'48656C6C6F207468657265', + 'sig1':'BE001AABAFFF976EC9016198FBFEA14CBEF96B000CCC0063D3324016F9E91FE80D8F9325812ED24DDB2B4D4CF4430B169880B3CE88313B53255BD4EC0378586F', + 'sig2':'5E266F3F837BA204E3BBB6DBECC0611429D96F8C7CE8F4EFDF9D4CB681C2A954468A357BF4242CEC7418B51DFC081BCD21299EF5B5A0DDEF3A139A1817503DDE', + } + ] + + def test_generate_180(self): + self._test_random_key(180) + + def test_encryption(self): + for tv in self.tve: + d = self.convert_tv(tv, True) + key = ElGamal.construct(d['key']) + ct = key._encrypt(d['pt'], d['k']) + self.assertEqual(ct[0], d['ct1']) + self.assertEqual(ct[1], d['ct2']) + + def test_decryption(self): + for tv in self.tve: + d = self.convert_tv(tv, True) + key = ElGamal.construct(d['key']) + pt = key._decrypt((d['ct1'], d['ct2'])) + self.assertEqual(pt, d['pt']) + + def test_signing(self): + for tv in self.tvs: + d = self.convert_tv(tv, True) + key = ElGamal.construct(d['key']) + sig1, sig2 = key._sign(d['h'], d['k']) + self.assertEqual(sig1, d['sig1']) + self.assertEqual(sig2, d['sig2']) + + def test_verification(self): + for tv in self.tvs: + d = self.convert_tv(tv, True) + key = ElGamal.construct(d['key']) + # Positive test + res = key._verify( d['h'], (d['sig1'],d['sig2']) ) + self.assertTrue(res) + # Negative test + res = key._verify( d['h'], (d['sig1']+1,d['sig2']) ) + self.assertFalse(res) + + def test_bad_key3(self): + tup = tup0 = list(self.convert_tv(self.tvs[0], 1)['key'])[:3] + tup[0] += 1 # p += 1 (not prime) + self.assertRaises(ValueError, ElGamal.construct, tup) + + tup = tup0 + tup[1] = 1 # g = 1 + self.assertRaises(ValueError, ElGamal.construct, tup) + + tup = tup0 + tup[2] = tup[0]*2 # y = 2*p + self.assertRaises(ValueError, ElGamal.construct, tup) + + def test_bad_key4(self): + tup = tup0 = list(self.convert_tv(self.tvs[0], 1)['key']) + tup[3] += 1 # x += 1 + self.assertRaises(ValueError, ElGamal.construct, tup) + + def convert_tv(self, tv, as_longs=0): + """Convert a test vector from textual form (hexadecimal ascii + to either integers or byte strings.""" + key_comps = 'p','g','y','x' + tv2 = {} + for c in tv.keys(): + tv2[c] = a2b_hex(tv[c]) + if as_longs or c in key_comps or c in ('sig1','sig2'): + tv2[c] = bytes_to_long(tv2[c]) + tv2['key']=[] + for c in key_comps: + tv2['key'] += [tv2[c]] + del tv2[c] + return tv2 + + def _test_random_key(self, bits): + elgObj = ElGamal.generate(bits, Random.new().read) + self._check_private_key(elgObj) + self._exercise_primitive(elgObj) + pub = elgObj.publickey() + self._check_public_key(pub) + self._exercise_public_primitive(elgObj) + + def _check_private_key(self, elgObj): + + # Check capabilities + self.assertTrue(elgObj.has_private()) + + # Sanity check key data + self.assertTrue(1 +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.PublicKey.RSA""" + +__revision__ = "$Id$" + +import os +import pickle +from pickle import PicklingError +from Crypto.Util.py3compat import * + +import unittest +from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex + +class RSATest(unittest.TestCase): + # Test vectors from "RSA-OAEP and RSA-PSS test vectors (.zip file)" + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # See RSADSI's PKCS#1 page at + # http://www.rsa.com/rsalabs/node.asp?id=2125 + + # from oaep-int.txt + + # TODO: PyCrypto treats the message as starting *after* the leading "00" + # TODO: That behaviour should probably be changed in the future. + plaintext = """ + eb 7a 19 ac e9 e3 00 63 50 e3 29 50 4b 45 e2 + ca 82 31 0b 26 dc d8 7d 5c 68 f1 ee a8 f5 52 67 + c3 1b 2e 8b b4 25 1f 84 d7 e0 b2 c0 46 26 f5 af + f9 3e dc fb 25 c9 c2 b3 ff 8a e1 0e 83 9a 2d db + 4c dc fe 4f f4 77 28 b4 a1 b7 c1 36 2b aa d2 9a + b4 8d 28 69 d5 02 41 21 43 58 11 59 1b e3 92 f9 + 82 fb 3e 87 d0 95 ae b4 04 48 db 97 2f 3a c1 4f + 7b c2 75 19 52 81 ce 32 d2 f1 b7 6d 4d 35 3e 2d + """ + + ciphertext = """ + 12 53 e0 4d c0 a5 39 7b b4 4a 7a b8 7e 9b f2 a0 + 39 a3 3d 1e 99 6f c8 2a 94 cc d3 00 74 c9 5d f7 + 63 72 20 17 06 9e 52 68 da 5d 1c 0b 4f 87 2c f6 + 53 c1 1d f8 23 14 a6 79 68 df ea e2 8d ef 04 bb + 6d 84 b1 c3 1d 65 4a 19 70 e5 78 3b d6 eb 96 a0 + 24 c2 ca 2f 4a 90 fe 9f 2e f5 c9 c1 40 e5 bb 48 + da 95 36 ad 87 00 c8 4f c9 13 0a de a7 4e 55 8d + 51 a7 4d df 85 d8 b5 0d e9 68 38 d6 06 3e 09 55 + """ + + modulus = """ + bb f8 2f 09 06 82 ce 9c 23 38 ac 2b 9d a8 71 f7 + 36 8d 07 ee d4 10 43 a4 40 d6 b6 f0 74 54 f5 1f + b8 df ba af 03 5c 02 ab 61 ea 48 ce eb 6f cd 48 + 76 ed 52 0d 60 e1 ec 46 19 71 9d 8a 5b 8b 80 7f + af b8 e0 a3 df c7 37 72 3e e6 b4 b7 d9 3a 25 84 + ee 6a 64 9d 06 09 53 74 88 34 b2 45 45 98 39 4e + e0 aa b1 2d 7b 61 a5 1f 52 7a 9a 41 f6 c1 68 7f + e2 53 72 98 ca 2a 8f 59 46 f8 e5 fd 09 1d bd cb + """ + + e = 0x11 # public exponent + + prime_factor = """ + c9 7f b1 f0 27 f4 53 f6 34 12 33 ea aa d1 d9 35 + 3f 6c 42 d0 88 66 b1 d0 5a 0f 20 35 02 8b 9d 86 + 98 40 b4 16 66 b4 2e 92 ea 0d a3 b4 32 04 b5 cf + ce 33 52 52 4d 04 16 a5 a4 41 e7 00 af 46 15 03 + """ + + def setUp(self): + global RSA, Random, bytes_to_long + from Crypto.PublicKey import RSA + from Crypto import Random + from Crypto.Util.number import bytes_to_long, inverse + self.n = bytes_to_long(a2b_hex(self.modulus)) + self.p = bytes_to_long(a2b_hex(self.prime_factor)) + + # Compute q, d, and u from n, e, and p + self.q = self.n // self.p + self.d = inverse(self.e, (self.p-1)*(self.q-1)) + self.u = inverse(self.p, self.q) # u = e**-1 (mod q) + + self.rsa = RSA + + def test_generate_1arg(self): + """RSA (default implementation) generated key (1 argument)""" + rsaObj = self.rsa.generate(1024) + self._check_private_key(rsaObj) + self._exercise_primitive(rsaObj) + pub = rsaObj.public_key() + self._check_public_key(pub) + self._exercise_public_primitive(rsaObj) + + def test_generate_2arg(self): + """RSA (default implementation) generated key (2 arguments)""" + rsaObj = self.rsa.generate(1024, Random.new().read) + self._check_private_key(rsaObj) + self._exercise_primitive(rsaObj) + pub = rsaObj.public_key() + self._check_public_key(pub) + self._exercise_public_primitive(rsaObj) + + def test_generate_3args(self): + rsaObj = self.rsa.generate(1024, Random.new().read,e=65537) + self._check_private_key(rsaObj) + self._exercise_primitive(rsaObj) + pub = rsaObj.public_key() + self._check_public_key(pub) + self._exercise_public_primitive(rsaObj) + self.assertEqual(65537,rsaObj.e) + + def test_construct_2tuple(self): + """RSA (default implementation) constructed key (2-tuple)""" + pub = self.rsa.construct((self.n, self.e)) + self._check_public_key(pub) + self._check_encryption(pub) + + def test_construct_3tuple(self): + """RSA (default implementation) constructed key (3-tuple)""" + rsaObj = self.rsa.construct((self.n, self.e, self.d)) + self._check_encryption(rsaObj) + self._check_decryption(rsaObj) + + def test_construct_4tuple(self): + """RSA (default implementation) constructed key (4-tuple)""" + rsaObj = self.rsa.construct((self.n, self.e, self.d, self.p)) + self._check_encryption(rsaObj) + self._check_decryption(rsaObj) + + def test_construct_5tuple(self): + """RSA (default implementation) constructed key (5-tuple)""" + rsaObj = self.rsa.construct((self.n, self.e, self.d, self.p, self.q)) + self._check_private_key(rsaObj) + self._check_encryption(rsaObj) + self._check_decryption(rsaObj) + + def test_construct_6tuple(self): + """RSA (default implementation) constructed key (6-tuple)""" + rsaObj = self.rsa.construct((self.n, self.e, self.d, self.p, self.q, self.u)) + self._check_private_key(rsaObj) + self._check_encryption(rsaObj) + self._check_decryption(rsaObj) + + def test_construct_bad_key2(self): + tup = (self.n, 1) + self.assertRaises(ValueError, self.rsa.construct, tup) + + # An even modulus is wrong + tup = (self.n+1, self.e) + self.assertRaises(ValueError, self.rsa.construct, tup) + + def test_construct_bad_key3(self): + tup = (self.n, self.e, self.d+1) + self.assertRaises(ValueError, self.rsa.construct, tup) + + def test_construct_bad_key5(self): + tup = (self.n, self.e, self.d, self.p, self.p) + self.assertRaises(ValueError, self.rsa.construct, tup) + + tup = (self.p*self.p, self.e, self.p, self.p) + self.assertRaises(ValueError, self.rsa.construct, tup) + + tup = (self.p*self.p, 3, self.p, self.q) + self.assertRaises(ValueError, self.rsa.construct, tup) + + def test_construct_bad_key6(self): + tup = (self.n, self.e, self.d, self.p, self.q, 10) + self.assertRaises(ValueError, self.rsa.construct, tup) + + from Crypto.Util.number import inverse + tup = (self.n, self.e, self.d, self.p, self.q, inverse(self.q, self.p)) + self.assertRaises(ValueError, self.rsa.construct, tup) + + def test_factoring(self): + rsaObj = self.rsa.construct([self.n, self.e, self.d]) + self.assertTrue(rsaObj.p==self.p or rsaObj.p==self.q) + self.assertTrue(rsaObj.q==self.p or rsaObj.q==self.q) + self.assertTrue(rsaObj.q*rsaObj.p == self.n) + + self.assertRaises(ValueError, self.rsa.construct, [self.n, self.e, self.n-1]) + + def test_repr(self): + rsaObj = self.rsa.construct((self.n, self.e, self.d, self.p, self.q)) + repr(rsaObj) + + def test_serialization(self): + """RSA keys are unpickable""" + + rsa_key = self.rsa.generate(1024) + self.assertRaises(PicklingError, pickle.dumps, rsa_key) + + def test_raw_rsa_boundary(self): + # The argument of every RSA raw operation (encrypt/decrypt) must be + # non-negative and no larger than the modulus + rsa_obj = self.rsa.generate(1024) + + self.assertRaises(ValueError, rsa_obj._decrypt, rsa_obj.n) + self.assertRaises(ValueError, rsa_obj._decrypt_to_bytes, rsa_obj.n) + self.assertRaises(ValueError, rsa_obj._encrypt, rsa_obj.n) + + self.assertRaises(ValueError, rsa_obj._decrypt, -1) + self.assertRaises(ValueError, rsa_obj._decrypt_to_bytes, -1) + self.assertRaises(ValueError, rsa_obj._encrypt, -1) + + def test_size(self): + pub = self.rsa.construct((self.n, self.e)) + self.assertEqual(pub.size_in_bits(), 1024) + self.assertEqual(pub.size_in_bytes(), 128) + + def _check_private_key(self, rsaObj): + from Crypto.Math.Numbers import Integer + + # Check capabilities + self.assertEqual(1, rsaObj.has_private()) + + # Sanity check key data + self.assertEqual(rsaObj.n, rsaObj.p * rsaObj.q) # n = pq + lcm = int(Integer(rsaObj.p-1).lcm(rsaObj.q-1)) + self.assertEqual(1, rsaObj.d * rsaObj.e % lcm) # ed = 1 (mod LCM(p-1, q-1)) + self.assertEqual(1, rsaObj.p * rsaObj.u % rsaObj.q) # pu = 1 (mod q) + self.assertEqual(1, rsaObj.p > 1) # p > 1 + self.assertEqual(1, rsaObj.q > 1) # q > 1 + self.assertEqual(1, rsaObj.e > 1) # e > 1 + self.assertEqual(1, rsaObj.d > 1) # d > 1 + + self.assertEqual(rsaObj.u, rsaObj.invp) + self.assertEqual(1, rsaObj.q * rsaObj.invq % rsaObj.p) + + def _check_public_key(self, rsaObj): + ciphertext = a2b_hex(self.ciphertext) + + # Check capabilities + self.assertEqual(0, rsaObj.has_private()) + + # Check rsaObj.[ne] -> rsaObj.[ne] mapping + self.assertEqual(rsaObj.n, rsaObj.n) + self.assertEqual(rsaObj.e, rsaObj.e) + + # Check that private parameters are all missing + self.assertEqual(0, hasattr(rsaObj, 'd')) + self.assertEqual(0, hasattr(rsaObj, 'p')) + self.assertEqual(0, hasattr(rsaObj, 'q')) + self.assertEqual(0, hasattr(rsaObj, 'u')) + + # Sanity check key data + self.assertEqual(1, rsaObj.e > 1) # e > 1 + + # Public keys should not be able to sign or decrypt + self.assertRaises(TypeError, rsaObj._decrypt, + bytes_to_long(ciphertext)) + self.assertRaises(TypeError, rsaObj._decrypt_to_bytes, + bytes_to_long(ciphertext)) + + # Check __eq__ and __ne__ + self.assertEqual(rsaObj.public_key() == rsaObj.public_key(),True) # assert_ + self.assertEqual(rsaObj.public_key() != rsaObj.public_key(),False) # assertFalse + + self.assertEqual(rsaObj.publickey(), rsaObj.public_key()) + + def _exercise_primitive(self, rsaObj): + # Since we're using a randomly-generated key, we can't check the test + # vector, but we can make sure encryption and decryption are inverse + # operations. + ciphertext = bytes_to_long(a2b_hex(self.ciphertext)) + + # Test decryption + plaintext = rsaObj._decrypt(ciphertext) + + # Test encryption (2 arguments) + new_ciphertext2 = rsaObj._encrypt(plaintext) + self.assertEqual(ciphertext, new_ciphertext2) + + def _exercise_public_primitive(self, rsaObj): + plaintext = a2b_hex(self.plaintext) + + # Test encryption (2 arguments) + new_ciphertext2 = rsaObj._encrypt(bytes_to_long(plaintext)) + + def _check_encryption(self, rsaObj): + plaintext = a2b_hex(self.plaintext) + ciphertext = a2b_hex(self.ciphertext) + + # Test encryption + new_ciphertext2 = rsaObj._encrypt(bytes_to_long(plaintext)) + self.assertEqual(bytes_to_long(ciphertext), new_ciphertext2) + + def _check_decryption(self, rsaObj): + plaintext = bytes_to_long(a2b_hex(self.plaintext)) + ciphertext = bytes_to_long(a2b_hex(self.ciphertext)) + + # Test plain decryption + new_plaintext = rsaObj._decrypt(ciphertext) + self.assertEqual(plaintext, new_plaintext) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(RSATest) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/test_import_DSA.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/test_import_DSA.py new file mode 100644 index 000000000..266b46f01 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/test_import_DSA.py @@ -0,0 +1,554 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/PublicKey/test_import_DSA.py: Self-test for importing DSA keys +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +import unittest +import re + +from Crypto.PublicKey import DSA +from Crypto.SelfTest.st_common import * +from Crypto.Util.py3compat import * + +from binascii import unhexlify + +class ImportKeyTests(unittest.TestCase): + + y = 92137165128186062214622779787483327510946462589285775188003362705875131352591574106484271700740858696583623951844732128165434284507709057439633739849986759064015013893156866539696757799934634945787496920169462601722830899660681779448742875054459716726855443681559131362852474817534616736104831095601710736729 + p = 162452170958135306109773853318304545923250830605675936228618290525164105310663722368377131295055868997377338797580997938253236213714988311430600065853662861806894003694743806769284131194035848116051021923956699231855223389086646903420682639786976554552864568460372266462812137447840653688476258666833303658691 + q = 988791743931120302950649732173330531512663554851 + g = 85583152299197514738065570254868711517748965097380456700369348466136657764813442044039878840094809620913085570225318356734366886985903212775602770761953571967834823306046501307810937486758039063386311593890777319935391363872375452381836756832784184928202587843258855704771836753434368484556809100537243908232 + x = 540873410045082450874416847965843801027716145253 + + def setUp(self): + + # It is easier to write test vectors in text form, + # and convert them to byte strigs dynamically here + for mname, mvalue in ImportKeyTests.__dict__.items(): + if mname[:4] in ('der_', 'pem_', 'ssh_'): + if mname[:4] == 'der_': + mvalue = unhexlify(tobytes(mvalue)) + mvalue = tobytes(mvalue) + setattr(self, mname, mvalue) + + # 1. SubjectPublicKeyInfo + der_public=\ + '308201b73082012b06072a8648ce3804013082011e02818100e756ee1717f4b6'+\ + '794c7c214724a19763742c45572b4b3f8ff3b44f3be9f44ce039a2757695ec91'+\ + '5697da74ef914fcd1b05660e2419c761d639f45d2d79b802dbd23e7ab8b81b47'+\ + '9a380e1f30932584ba2a0b955032342ebc83cb5ca906e7b0d7cd6fe656cecb4c'+\ + '8b5a77123a8c6750a481e3b06057aff6aa6eba620b832d60c3021500ad32f48c'+\ + 'd3ae0c45a198a61fa4b5e20320763b2302818079dfdc3d614fe635fceb7eaeae'+\ + '3718dc2efefb45282993ac6749dc83c223d8c1887296316b3b0b54466cf444f3'+\ + '4b82e3554d0b90a778faaf1306f025dae6a3e36c7f93dd5bac4052b92370040a'+\ + 'ca70b8d5820599711900efbc961812c355dd9beffe0981da85c5548074b41c56'+\ + 'ae43fd300d89262e4efd89943f99a651b03888038185000281810083352a69a1'+\ + '32f34843d2a0eb995bff4e2f083a73f0049d2c91ea2f0ce43d144abda48199e4'+\ + 'b003c570a8af83303d45105f606c5c48d925a40ed9c2630c2fa4cdbf838539de'+\ + 'b9a29f919085f2046369f627ca84b2cb1e2c7940564b670f963ab1164d4e2ca2'+\ + 'bf6ffd39f12f548928bf4d2d1b5e6980b4f1be4c92a91986fba559' + + def testImportKey1(self): + key_obj = DSA.importKey(self.der_public) + self.assertFalse(key_obj.has_private()) + self.assertEqual(self.y, key_obj.y) + self.assertEqual(self.p, key_obj.p) + self.assertEqual(self.q, key_obj.q) + self.assertEqual(self.g, key_obj.g) + + def testExportKey1(self): + tup = (self.y, self.g, self.p, self.q) + key = DSA.construct(tup) + encoded = key.export_key('DER') + self.assertEqual(self.der_public, encoded) + + # 2. + pem_public="""\ +-----BEGIN PUBLIC KEY----- +MIIBtzCCASsGByqGSM44BAEwggEeAoGBAOdW7hcX9LZ5THwhRyShl2N0LEVXK0s/ +j/O0Tzvp9EzgOaJ1dpXskVaX2nTvkU/NGwVmDiQZx2HWOfRdLXm4AtvSPnq4uBtH +mjgOHzCTJYS6KguVUDI0LryDy1ypBuew181v5lbOy0yLWncSOoxnUKSB47BgV6/2 +qm66YguDLWDDAhUArTL0jNOuDEWhmKYfpLXiAyB2OyMCgYB539w9YU/mNfzrfq6u +NxjcLv77RSgpk6xnSdyDwiPYwYhyljFrOwtURmz0RPNLguNVTQuQp3j6rxMG8CXa +5qPjbH+T3VusQFK5I3AECspwuNWCBZlxGQDvvJYYEsNV3Zvv/gmB2oXFVIB0tBxW +rkP9MA2JJi5O/YmUP5mmUbA4iAOBhQACgYEAgzUqaaEy80hD0qDrmVv/Ti8IOnPw +BJ0skeovDOQ9FEq9pIGZ5LADxXCor4MwPUUQX2BsXEjZJaQO2cJjDC+kzb+DhTne +uaKfkZCF8gRjafYnyoSyyx4seUBWS2cPljqxFk1OLKK/b/058S9UiSi/TS0bXmmA +tPG+TJKpGYb7pVk= +-----END PUBLIC KEY-----""" + + def testImportKey2(self): + for pem in (self.pem_public, tostr(self.pem_public)): + key_obj = DSA.importKey(pem) + self.assertFalse(key_obj.has_private()) + self.assertEqual(self.y, key_obj.y) + self.assertEqual(self.p, key_obj.p) + self.assertEqual(self.q, key_obj.q) + self.assertEqual(self.g, key_obj.g) + + def testExportKey2(self): + tup = (self.y, self.g, self.p, self.q) + key = DSA.construct(tup) + encoded = key.export_key('PEM') + self.assertEqual(self.pem_public, encoded) + + # 3. OpenSSL/OpenSSH format + der_private=\ + '308201bb02010002818100e756ee1717f4b6794c7c214724a19763742c45572b'+\ + '4b3f8ff3b44f3be9f44ce039a2757695ec915697da74ef914fcd1b05660e2419'+\ + 'c761d639f45d2d79b802dbd23e7ab8b81b479a380e1f30932584ba2a0b955032'+\ + '342ebc83cb5ca906e7b0d7cd6fe656cecb4c8b5a77123a8c6750a481e3b06057'+\ + 'aff6aa6eba620b832d60c3021500ad32f48cd3ae0c45a198a61fa4b5e2032076'+\ + '3b2302818079dfdc3d614fe635fceb7eaeae3718dc2efefb45282993ac6749dc'+\ + '83c223d8c1887296316b3b0b54466cf444f34b82e3554d0b90a778faaf1306f0'+\ + '25dae6a3e36c7f93dd5bac4052b92370040aca70b8d5820599711900efbc9618'+\ + '12c355dd9beffe0981da85c5548074b41c56ae43fd300d89262e4efd89943f99'+\ + 'a651b038880281810083352a69a132f34843d2a0eb995bff4e2f083a73f0049d'+\ + '2c91ea2f0ce43d144abda48199e4b003c570a8af83303d45105f606c5c48d925'+\ + 'a40ed9c2630c2fa4cdbf838539deb9a29f919085f2046369f627ca84b2cb1e2c'+\ + '7940564b670f963ab1164d4e2ca2bf6ffd39f12f548928bf4d2d1b5e6980b4f1'+\ + 'be4c92a91986fba55902145ebd9a3f0b82069d98420986b314215025756065' + + def testImportKey3(self): + key_obj = DSA.importKey(self.der_private) + self.assertTrue(key_obj.has_private()) + self.assertEqual(self.y, key_obj.y) + self.assertEqual(self.p, key_obj.p) + self.assertEqual(self.q, key_obj.q) + self.assertEqual(self.g, key_obj.g) + self.assertEqual(self.x, key_obj.x) + + def testExportKey3(self): + tup = (self.y, self.g, self.p, self.q, self.x) + key = DSA.construct(tup) + encoded = key.export_key('DER', pkcs8=False) + self.assertEqual(self.der_private, encoded) + + # 4. + pem_private="""\ +-----BEGIN DSA PRIVATE KEY----- +MIIBuwIBAAKBgQDnVu4XF/S2eUx8IUckoZdjdCxFVytLP4/ztE876fRM4DmidXaV +7JFWl9p075FPzRsFZg4kGcdh1jn0XS15uALb0j56uLgbR5o4Dh8wkyWEuioLlVAy +NC68g8tcqQbnsNfNb+ZWzstMi1p3EjqMZ1CkgeOwYFev9qpuumILgy1gwwIVAK0y +9IzTrgxFoZimH6S14gMgdjsjAoGAed/cPWFP5jX8636urjcY3C7++0UoKZOsZ0nc +g8Ij2MGIcpYxazsLVEZs9ETzS4LjVU0LkKd4+q8TBvAl2uaj42x/k91brEBSuSNw +BArKcLjVggWZcRkA77yWGBLDVd2b7/4JgdqFxVSAdLQcVq5D/TANiSYuTv2JlD+Z +plGwOIgCgYEAgzUqaaEy80hD0qDrmVv/Ti8IOnPwBJ0skeovDOQ9FEq9pIGZ5LAD +xXCor4MwPUUQX2BsXEjZJaQO2cJjDC+kzb+DhTneuaKfkZCF8gRjafYnyoSyyx4s +eUBWS2cPljqxFk1OLKK/b/058S9UiSi/TS0bXmmAtPG+TJKpGYb7pVkCFF69mj8L +ggadmEIJhrMUIVAldWBl +-----END DSA PRIVATE KEY-----""" + + def testImportKey4(self): + for pem in (self.pem_private, tostr(self.pem_private)): + key_obj = DSA.importKey(pem) + self.assertTrue(key_obj.has_private()) + self.assertEqual(self.y, key_obj.y) + self.assertEqual(self.p, key_obj.p) + self.assertEqual(self.q, key_obj.q) + self.assertEqual(self.g, key_obj.g) + self.assertEqual(self.x, key_obj.x) + + def testExportKey4(self): + tup = (self.y, self.g, self.p, self.q, self.x) + key = DSA.construct(tup) + encoded = key.export_key('PEM', pkcs8=False) + self.assertEqual(self.pem_private, encoded) + + # 5. PKCS8 (unencrypted) + der_pkcs8=\ + '3082014a0201003082012b06072a8648ce3804013082011e02818100e756ee17'+\ + '17f4b6794c7c214724a19763742c45572b4b3f8ff3b44f3be9f44ce039a27576'+\ + '95ec915697da74ef914fcd1b05660e2419c761d639f45d2d79b802dbd23e7ab8'+\ + 'b81b479a380e1f30932584ba2a0b955032342ebc83cb5ca906e7b0d7cd6fe656'+\ + 'cecb4c8b5a77123a8c6750a481e3b06057aff6aa6eba620b832d60c3021500ad'+\ + '32f48cd3ae0c45a198a61fa4b5e20320763b2302818079dfdc3d614fe635fceb'+\ + '7eaeae3718dc2efefb45282993ac6749dc83c223d8c1887296316b3b0b54466c'+\ + 'f444f34b82e3554d0b90a778faaf1306f025dae6a3e36c7f93dd5bac4052b923'+\ + '70040aca70b8d5820599711900efbc961812c355dd9beffe0981da85c5548074'+\ + 'b41c56ae43fd300d89262e4efd89943f99a651b03888041602145ebd9a3f0b82'+\ + '069d98420986b314215025756065' + + def testImportKey5(self): + key_obj = DSA.importKey(self.der_pkcs8) + self.assertTrue(key_obj.has_private()) + self.assertEqual(self.y, key_obj.y) + self.assertEqual(self.p, key_obj.p) + self.assertEqual(self.q, key_obj.q) + self.assertEqual(self.g, key_obj.g) + self.assertEqual(self.x, key_obj.x) + + def testExportKey5(self): + tup = (self.y, self.g, self.p, self.q, self.x) + key = DSA.construct(tup) + encoded = key.export_key('DER') + self.assertEqual(self.der_pkcs8, encoded) + encoded = key.export_key('DER', pkcs8=True) + self.assertEqual(self.der_pkcs8, encoded) + + # 6. + pem_pkcs8="""\ +-----BEGIN PRIVATE KEY----- +MIIBSgIBADCCASsGByqGSM44BAEwggEeAoGBAOdW7hcX9LZ5THwhRyShl2N0LEVX +K0s/j/O0Tzvp9EzgOaJ1dpXskVaX2nTvkU/NGwVmDiQZx2HWOfRdLXm4AtvSPnq4 +uBtHmjgOHzCTJYS6KguVUDI0LryDy1ypBuew181v5lbOy0yLWncSOoxnUKSB47Bg +V6/2qm66YguDLWDDAhUArTL0jNOuDEWhmKYfpLXiAyB2OyMCgYB539w9YU/mNfzr +fq6uNxjcLv77RSgpk6xnSdyDwiPYwYhyljFrOwtURmz0RPNLguNVTQuQp3j6rxMG +8CXa5qPjbH+T3VusQFK5I3AECspwuNWCBZlxGQDvvJYYEsNV3Zvv/gmB2oXFVIB0 +tBxWrkP9MA2JJi5O/YmUP5mmUbA4iAQWAhRevZo/C4IGnZhCCYazFCFQJXVgZQ== +-----END PRIVATE KEY-----""" + + def testImportKey6(self): + for pem in (self.pem_pkcs8, tostr(self.pem_pkcs8)): + key_obj = DSA.importKey(pem) + self.assertTrue(key_obj.has_private()) + self.assertEqual(self.y, key_obj.y) + self.assertEqual(self.p, key_obj.p) + self.assertEqual(self.q, key_obj.q) + self.assertEqual(self.g, key_obj.g) + self.assertEqual(self.x, key_obj.x) + + def testExportKey6(self): + tup = (self.y, self.g, self.p, self.q, self.x) + key = DSA.construct(tup) + encoded = key.export_key('PEM') + self.assertEqual(self.pem_pkcs8, encoded) + encoded = key.export_key('PEM', pkcs8=True) + self.assertEqual(self.pem_pkcs8, encoded) + + # 7. OpenSSH/RFC4253 + ssh_pub="""ssh-dss AAAAB3NzaC1kc3MAAACBAOdW7hcX9LZ5THwhRyShl2N0LEVXK0s/j/O0Tzvp9EzgOaJ1dpXskVaX2nTvkU/NGwVmDiQZx2HWOfRdLXm4AtvSPnq4uBtHmjgOHzCTJYS6KguVUDI0LryDy1ypBuew181v5lbOy0yLWncSOoxnUKSB47BgV6/2qm66YguDLWDDAAAAFQCtMvSM064MRaGYph+kteIDIHY7IwAAAIB539w9YU/mNfzrfq6uNxjcLv77RSgpk6xnSdyDwiPYwYhyljFrOwtURmz0RPNLguNVTQuQp3j6rxMG8CXa5qPjbH+T3VusQFK5I3AECspwuNWCBZlxGQDvvJYYEsNV3Zvv/gmB2oXFVIB0tBxWrkP9MA2JJi5O/YmUP5mmUbA4iAAAAIEAgzUqaaEy80hD0qDrmVv/Ti8IOnPwBJ0skeovDOQ9FEq9pIGZ5LADxXCor4MwPUUQX2BsXEjZJaQO2cJjDC+kzb+DhTneuaKfkZCF8gRjafYnyoSyyx4seUBWS2cPljqxFk1OLKK/b/058S9UiSi/TS0bXmmAtPG+TJKpGYb7pVk=""" + + def testImportKey7(self): + for ssh in (self.ssh_pub, tostr(self.ssh_pub)): + key_obj = DSA.importKey(ssh) + self.assertFalse(key_obj.has_private()) + self.assertEqual(self.y, key_obj.y) + self.assertEqual(self.p, key_obj.p) + self.assertEqual(self.q, key_obj.q) + self.assertEqual(self.g, key_obj.g) + + def testExportKey7(self): + tup = (self.y, self.g, self.p, self.q) + key = DSA.construct(tup) + encoded = key.export_key('OpenSSH') + self.assertEqual(self.ssh_pub, encoded) + + # 8. Encrypted OpenSSL/OpenSSH + pem_private_encrypted="""\ +-----BEGIN DSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: AES-128-CBC,70B6908939D65E9F2EB999E8729788CE + +4V6GHRDpCrdZ8MBjbyp5AlGUrjvr2Pn2e2zVxy5RBt4FBj9/pa0ae0nnyUPMLSUU +kKyOR0topRYTVRLElm4qVrb5uNZ3hRwfbklr+pSrB7O9eHz9V5sfOQxyODS07JxK +k1OdOs70/ouMXLF9EWfAZOmWUccZKHNblUwg1p1UrZIz5jXw4dUE/zqhvXh6d+iC +ADsICaBCjCrRQJKDp50h3+ndQjkYBKVH+pj8TiQ79U7lAvdp3+iMghQN6YXs9mdI +gFpWw/f97oWM4GHZFqHJ+VSMNFjBiFhAvYV587d7Lk4dhD8sCfbxj42PnfRgUItc +nnPqHxmhMQozBWzYM4mQuo3XbF2WlsNFbOzFVyGhw1Bx1s91qvXBVWJh2ozrW0s6 +HYDV7ZkcTml/4kjA/d+mve6LZ8kuuR1qCiZx6rkffhh1gDN/1Xz3HVvIy/dQ+h9s +5zp7PwUoWbhqp3WCOr156P6gR8qo7OlT6wMh33FSXK/mxikHK136fV2shwTKQVII +rJBvXpj8nACUmi7scKuTWGeUoXa+dwTZVVe+b+L2U1ZM7+h/neTJiXn7u99PFUwu +xVJtxaV37m3aXxtCsPnbBg== +-----END DSA PRIVATE KEY-----""" + + def testImportKey8(self): + for pem in (self.pem_private_encrypted, tostr(self.pem_private_encrypted)): + key_obj = DSA.importKey(pem, "PWDTEST") + self.assertTrue(key_obj.has_private()) + self.assertEqual(self.y, key_obj.y) + self.assertEqual(self.p, key_obj.p) + self.assertEqual(self.q, key_obj.q) + self.assertEqual(self.g, key_obj.g) + self.assertEqual(self.x, key_obj.x) + + def testExportKey8(self): + tup = (self.y, self.g, self.p, self.q, self.x) + key = DSA.construct(tup) + encoded = key.export_key('PEM', pkcs8=False, passphrase="PWDTEST") + key = DSA.importKey(encoded, "PWDTEST") + self.assertEqual(self.y, key.y) + self.assertEqual(self.p, key.p) + self.assertEqual(self.q, key.q) + self.assertEqual(self.g, key.g) + self.assertEqual(self.x, key.x) + + # 9. Encrypted PKCS8 + # pbeWithMD5AndDES-CBC + pem_pkcs8_encrypted="""\ +-----BEGIN ENCRYPTED PRIVATE KEY----- +MIIBcTAbBgkqhkiG9w0BBQMwDgQI0GC3BJ/jSw8CAggABIIBUHc1cXZpExIE9tC7 +7ryiW+5ihtF2Ekurq3e408GYSAu5smJjN2bvQXmzRFBz8W38K8eMf1sbWroZ4+zn +kZSbb9nSm5kAa8lR2+oF2k+WRswMR/PTC3f/D9STO2X0QxdrzKgIHEcSGSHp5jTx +aVvbkCDHo9vhBTl6S3ogZ48As/MEro76+9igUwJ1jNhIQZPJ7e20QH5qDpQFFJN4 +CKl2ENSEuwGiqBszItFy4dqH0g63ZGZV/xt9wSO9Rd7SK/EbA/dklOxBa5Y/VItM +gnIhs9XDMoGYyn6F023EicNJm6g/bVQk81BTTma4tm+12TKGdYm+QkeZvCOMZylr +Wv67cKwO3cAXt5C3QXMDgYR64XvuaT5h7C0igMp2afSXJlnbHEbFxQVJlv83T4FM +eZ4k+NQDbEL8GiHmFxzDWQAuPPZKJWEEEV2p/To+WOh+kSDHQw== +-----END ENCRYPTED PRIVATE KEY-----""" + + def testImportKey9(self): + for pem in (self.pem_pkcs8_encrypted, tostr(self.pem_pkcs8_encrypted)): + key_obj = DSA.importKey(pem, "PWDTEST") + self.assertTrue(key_obj.has_private()) + self.assertEqual(self.y, key_obj.y) + self.assertEqual(self.p, key_obj.p) + self.assertEqual(self.q, key_obj.q) + self.assertEqual(self.g, key_obj.g) + self.assertEqual(self.x, key_obj.x) + + # 10. Encrypted PKCS8 + # pkcs5PBES2 / + # pkcs5PBKDF2 (rounds=1000, salt=D725BF1B6B8239F4) / + # des-EDE3-CBC (iv=27A1C66C42AFEECE) + # + der_pkcs8_encrypted=\ + '30820196304006092a864886f70d01050d3033301b06092a864886f70d01050c'+\ + '300e0408d725bf1b6b8239f4020203e8301406082a864886f70d0307040827a1'+\ + 'c66c42afeece048201505cacfde7bf8edabb3e0d387950dc872662ea7e9b1ed4'+\ + '400d2e7e6186284b64668d8d0328c33a9d9397e6f03df7cb68268b0a06b4e22f'+\ + '7d132821449ecf998a8b696dbc6dd2b19e66d7eb2edfeb4153c1771d49702395'+\ + '4f36072868b5fcccf93413a5ac4b2eb47d4b3f681c6bd67ae363ed776f45ae47'+\ + '174a00098a7c930a50f820b227ddf50f9742d8e950d02586ff2dac0e3c372248'+\ + 'e5f9b6a7a02f4004f20c87913e0f7b52bccc209b95d478256a890b31d4c9adec'+\ + '21a4d157a179a93a3dad06f94f3ce486b46dfa7fc15fd852dd7680bbb2f17478'+\ + '7e71bd8dbaf81eca7518d76c1d26256e95424864ba45ca5d47d7c5a421be02fa'+\ + 'b94ab01e18593f66cf9094eb5c94b9ecf3aa08b854a195cf87612fbe5e96c426'+\ + '2b0d573e52dc71ba3f5e468c601e816c49b7d32c698b22175e89aaef0c443770'+\ + '5ef2f88a116d99d8e2869a4fd09a771b84b49e4ccb79aadcb1c9' + + def testImportKey10(self): + key_obj = DSA.importKey(self.der_pkcs8_encrypted, "PWDTEST") + self.assertTrue(key_obj.has_private()) + self.assertEqual(self.y, key_obj.y) + self.assertEqual(self.p, key_obj.p) + self.assertEqual(self.q, key_obj.q) + self.assertEqual(self.g, key_obj.g) + self.assertEqual(self.x, key_obj.x) + + def testExportKey10(self): + tup = (self.y, self.g, self.p, self.q, self.x) + key = DSA.construct(tup) + randfunc = BytesIO(unhexlify(b("27A1C66C42AFEECE") + b("D725BF1B6B8239F4"))).read + encoded = key.export_key('DER', pkcs8=True, passphrase="PWDTEST", randfunc=randfunc) + self.assertEqual(self.der_pkcs8_encrypted, encoded) + + # ---- + + def testImportError1(self): + self.assertRaises(ValueError, DSA.importKey, self.der_pkcs8_encrypted, "wrongpwd") + + def testExportError2(self): + tup = (self.y, self.g, self.p, self.q, self.x) + key = DSA.construct(tup) + self.assertRaises(ValueError, key.export_key, 'DER', pkcs8=False, passphrase="PWDTEST") + + def test_import_key(self): + """Verify importKey is an alias to import_key""" + + key_obj = DSA.import_key(self.der_public) + self.assertFalse(key_obj.has_private()) + self.assertEqual(self.y, key_obj.y) + self.assertEqual(self.p, key_obj.p) + self.assertEqual(self.q, key_obj.q) + self.assertEqual(self.g, key_obj.g) + + def test_exportKey(self): + tup = (self.y, self.g, self.p, self.q, self.x) + key = DSA.construct(tup) + self.assertEqual(key.exportKey(), key.export_key()) + + + def test_import_empty(self): + self.assertRaises(ValueError, DSA.import_key, b'') + + +class ImportKeyFromX509Cert(unittest.TestCase): + + def test_x509v1(self): + + # Sample V1 certificate with a 1024 bit DSA key + x509_v1_cert = """ +-----BEGIN CERTIFICATE----- +MIIDUjCCArsCAQIwDQYJKoZIhvcNAQEFBQAwfjENMAsGA1UEChMEQWNtZTELMAkG +A1UECxMCUkQxHDAaBgkqhkiG9w0BCQEWDXNwYW1AYWNtZS5vcmcxEzARBgNVBAcT +Ck1ldHJvcG9saXMxETAPBgNVBAgTCE5ldyBZb3JrMQswCQYDVQQGEwJVUzENMAsG +A1UEAxMEdGVzdDAeFw0xNDA3MTEyMDM4NDNaFw0xNzA0MDYyMDM4NDNaME0xCzAJ +BgNVBAYTAlVTMREwDwYDVQQIEwhOZXcgWW9yazENMAsGA1UEChMEQWNtZTELMAkG +A1UECxMCUkQxDzANBgNVBAMTBnBvbGFuZDCCAbYwggErBgcqhkjOOAQBMIIBHgKB +gQDOrN4Ox4+t3T6wKeHfhzArhcrNEFMQ4Ss+4PIKyimDy9Bn64WPkL1B/9dvYIga +23GLu6tVJmXo6EdJnVOHEMhr99EeOwuDWWeP7Awq7RSlKEejokr4BEzMTW/tExSD +cO6/GI7xzh0eTH+VTTPDfyrJMYCkh0rJAfCP+5xrmPNetwIVALtXYOV1yoRrzJ2Q +M5uEjidH6GiZAoGAfUqA1SAm5g5U68SILMVX9l5rq0OpB0waBMpJQ31/R/yXNDqo +c3gGWZTOJFU4IzwNpGhrGNADUByz/lc1SAOAdEJIr0JVrhbGewQjB4pWqoLGbBKz +RoavTNDc/zD7SYa12evWDHADwvlXoeQg+lWop1zS8OqaDC7aLGKpWN3/m8kDgYQA +AoGAKoirPAfcp1rbbl4y2FFAIktfW8f4+T7d2iKSg73aiVfujhNOt1Zz1lfC0NI2 +eonLWO3tAM4XGKf1TLjb5UXngGn40okPsaA81YE6ZIKm20ywjlOY3QkAEdMaLVY3 +9PJvM8RGB9m7pLKxyHfGMfF40MVN4222zKeGp7xhM0CNiCUwDQYJKoZIhvcNAQEF +BQADgYEAfbNZfpYa2KlALEM1FZnwvQDvJHntHz8LdeJ4WM7CXDlKi67wY2HKM30w +s2xej75imkVOFd1kF2d0A8sjfriXLVIt1Hwq9ANZomhu4Edx0xpH8tqdh/bDtnM2 +TmduZNY9OWkb07h0CtWD6Zt8fhRllVsSSrlWd/2or7FXNC5weFQ= +-----END CERTIFICATE----- + """.strip() + + # DSA public key as dumped by openssl + y_str = """ +2a:88:ab:3c:07:dc:a7:5a:db:6e:5e:32:d8:51:40: +22:4b:5f:5b:c7:f8:f9:3e:dd:da:22:92:83:bd:da: +89:57:ee:8e:13:4e:b7:56:73:d6:57:c2:d0:d2:36: +7a:89:cb:58:ed:ed:00:ce:17:18:a7:f5:4c:b8:db: +e5:45:e7:80:69:f8:d2:89:0f:b1:a0:3c:d5:81:3a: +64:82:a6:db:4c:b0:8e:53:98:dd:09:00:11:d3:1a: +2d:56:37:f4:f2:6f:33:c4:46:07:d9:bb:a4:b2:b1: +c8:77:c6:31:f1:78:d0:c5:4d:e3:6d:b6:cc:a7:86: +a7:bc:61:33:40:8d:88:25 + """ + p_str = """ +00:ce:ac:de:0e:c7:8f:ad:dd:3e:b0:29:e1:df:87: +30:2b:85:ca:cd:10:53:10:e1:2b:3e:e0:f2:0a:ca: +29:83:cb:d0:67:eb:85:8f:90:bd:41:ff:d7:6f:60: +88:1a:db:71:8b:bb:ab:55:26:65:e8:e8:47:49:9d: +53:87:10:c8:6b:f7:d1:1e:3b:0b:83:59:67:8f:ec: +0c:2a:ed:14:a5:28:47:a3:a2:4a:f8:04:4c:cc:4d: +6f:ed:13:14:83:70:ee:bf:18:8e:f1:ce:1d:1e:4c: +7f:95:4d:33:c3:7f:2a:c9:31:80:a4:87:4a:c9:01: +f0:8f:fb:9c:6b:98:f3:5e:b7 + """ + q_str = """ +00:bb:57:60:e5:75:ca:84:6b:cc:9d:90:33:9b:84: +8e:27:47:e8:68:99 + """ + g_str = """ +7d:4a:80:d5:20:26:e6:0e:54:eb:c4:88:2c:c5:57: +f6:5e:6b:ab:43:a9:07:4c:1a:04:ca:49:43:7d:7f: +47:fc:97:34:3a:a8:73:78:06:59:94:ce:24:55:38: +23:3c:0d:a4:68:6b:18:d0:03:50:1c:b3:fe:57:35: +48:03:80:74:42:48:af:42:55:ae:16:c6:7b:04:23: +07:8a:56:aa:82:c6:6c:12:b3:46:86:af:4c:d0:dc: +ff:30:fb:49:86:b5:d9:eb:d6:0c:70:03:c2:f9:57: +a1:e4:20:fa:55:a8:a7:5c:d2:f0:ea:9a:0c:2e:da: +2c:62:a9:58:dd:ff:9b:c9 + """ + + key = DSA.importKey(x509_v1_cert) + for comp_name in ('y', 'p', 'q', 'g'): + comp_str = locals()[comp_name + "_str"] + comp = int(re.sub("[^0-9a-f]", "", comp_str), 16) + self.assertEqual(getattr(key, comp_name), comp) + self.assertFalse(key.has_private()) + + def test_x509v3(self): + + # Sample V3 certificate with a 1024 bit DSA key + x509_v3_cert = """ +-----BEGIN CERTIFICATE----- +MIIFhjCCA26gAwIBAgIBAzANBgkqhkiG9w0BAQsFADBhMQswCQYDVQQGEwJVUzEL +MAkGA1UECAwCTUQxEjAQBgNVBAcMCUJhbHRpbW9yZTEQMA4GA1UEAwwHVGVzdCBD +QTEfMB0GCSqGSIb3DQEJARYQdGVzdEBleGFtcGxlLmNvbTAeFw0xNDA3MTMyMDUz +MjBaFw0xNzA0MDgyMDUzMjBaMEAxCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJNRDES +MBAGA1UEBwwJQmFsdGltb3JlMRAwDgYDVQQDDAdhdXN0cmlhMIIBtjCCASsGByqG +SM44BAEwggEeAoGBALfd8gyEpVPA0ZI69Kp3nyJcu5N0ZZ3K1K9hleQLNqKEcZOh +7a/C2J1TPdmHTLJ0rAwBZ1nWxnARSgRphziGDFspKCYQwYcSMz8KoFgvXbXpuchy +oFACiQ2LqZnc5MakuLQtLcQciSYGYj3zmZdYMoa904F1aDWr+DxQI6DVC3/bAhUA +hqXMCJ6fQK3G2O9S3/CC/yVZXCsCgYBRXROl3R2khX7l10LQjDEgo3B1IzjXU/jP +McMBl6XO+nBJXxr/scbq8Ajiv7LTnGpSjgryHtvfj887kfvo8QbSS3kp3vq5uSqI +ui7E7r3jguWaLj616AG1HWOctXJUjqsiabZwsp2h09gHTzmHEXBOmiARu8xFxKAH +xsuo7onAbwOBhAACgYBylWjWSnKHE8mHx1A5m/0GQx6xnhWIe3+MJAnEhRGxA2J4 +SCsfWU0OwglIQToh1z5uUU9oDi9cYgNPBevOFRnDhc2yaJY6VAYnI+D+6J5IU6Yd +0iaG/iSc4sV4bFr0axcPpse3SN0XaQxiKeSFBfFnoMqL+dd9Gb3QPZSllBcVD6OB +1TCB0jAdBgNVHQ4EFgQUx5wN0Puotv388M9Tp/fsPbZpzAUwHwYDVR0jBBgwFoAU +a0hkif3RMaraiWtsOOZZlLu9wJwwCQYDVR0TBAIwADALBgNVHQ8EBAMCBeAwSgYD +VR0RBEMwQYILZXhhbXBsZS5jb22CD3d3dy5leGFtcGxlLmNvbYIQbWFpbC5leGFt +cGxlLmNvbYIPZnRwLmV4YW1wbGUuY29tMCwGCWCGSAGG+EIBDQQfFh1PcGVuU1NM +IEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTANBgkqhkiG9w0BAQsFAAOCAgEAyWf1TiJI +aNEIA9o/PG8/JiGASTS2/HBVTJbkq03k6NkJVk/GxC1DPziTUJ+CdWlHWcAi1EOW +Ach3QxNDRrVfCOfCMDgElIO1094/reJgdFYG00LRi8QkRJuxANV7YS4tLudhyHJC +kR2lhdMNmEuzWK+s2y+5cLrdm7qdvdENQCcV67uvGPx4sc+EaE7x13SczKjWBtbo +QCs6JTOW+EkPRl4Zo27K4OIZ43/J+GxvwU9QUVH3wPVdbbLNw+QeTFBYMTEcxyc4 +kv50HPBFaithziXBFyvdIs19FjkFzu0Uz/e0zb1+vMzQlJMD94HVOrMnIj5Sb2cL +KKdYXS4uhxFJmdV091Xur5JkYYwEzuaGav7J3zOzYutrIGTgDluLCvA+VQkRcTsy +jZ065SkY/v+38QHp+cmm8WRluupJTs8wYzVp6Fu0iFaaK7ztFmaZmHpiPIfDFjva +aCIgzzT5NweJd/b71A2SyzHXJ14zBXsr1PMylMp2TpHIidhuuNuQL6I0HaollB4M +Z3FsVBMhVDw4Z76qnFPr8mZE2tar33hSlJI/3pS/bBiukuBk8U7VB0X8OqaUnP3C +7b2Z4G8GtqDVcKGMzkvMjT4n9rKd/Le+qHSsQOGO9W/0LB7UDAZSwUsfAPnoBgdS +5t9tIomLCOstByXi+gGZue1TcdCa3Ph4kO0= +-----END CERTIFICATE----- + """.strip() + + # DSA public key as dumped by openssl + y_str = """ +72:95:68:d6:4a:72:87:13:c9:87:c7:50:39:9b:fd: +06:43:1e:b1:9e:15:88:7b:7f:8c:24:09:c4:85:11: +b1:03:62:78:48:2b:1f:59:4d:0e:c2:09:48:41:3a: +21:d7:3e:6e:51:4f:68:0e:2f:5c:62:03:4f:05:eb: +ce:15:19:c3:85:cd:b2:68:96:3a:54:06:27:23:e0: +fe:e8:9e:48:53:a6:1d:d2:26:86:fe:24:9c:e2:c5: +78:6c:5a:f4:6b:17:0f:a6:c7:b7:48:dd:17:69:0c: +62:29:e4:85:05:f1:67:a0:ca:8b:f9:d7:7d:19:bd: +d0:3d:94:a5:94:17:15:0f + """ + p_str = """ +00:b7:dd:f2:0c:84:a5:53:c0:d1:92:3a:f4:aa:77: +9f:22:5c:bb:93:74:65:9d:ca:d4:af:61:95:e4:0b: +36:a2:84:71:93:a1:ed:af:c2:d8:9d:53:3d:d9:87: +4c:b2:74:ac:0c:01:67:59:d6:c6:70:11:4a:04:69: +87:38:86:0c:5b:29:28:26:10:c1:87:12:33:3f:0a: +a0:58:2f:5d:b5:e9:b9:c8:72:a0:50:02:89:0d:8b: +a9:99:dc:e4:c6:a4:b8:b4:2d:2d:c4:1c:89:26:06: +62:3d:f3:99:97:58:32:86:bd:d3:81:75:68:35:ab: +f8:3c:50:23:a0:d5:0b:7f:db + """ + q_str = """ +00:86:a5:cc:08:9e:9f:40:ad:c6:d8:ef:52:df:f0: +82:ff:25:59:5c:2b + """ + g_str = """ +51:5d:13:a5:dd:1d:a4:85:7e:e5:d7:42:d0:8c:31: +20:a3:70:75:23:38:d7:53:f8:cf:31:c3:01:97:a5: +ce:fa:70:49:5f:1a:ff:b1:c6:ea:f0:08:e2:bf:b2: +d3:9c:6a:52:8e:0a:f2:1e:db:df:8f:cf:3b:91:fb: +e8:f1:06:d2:4b:79:29:de:fa:b9:b9:2a:88:ba:2e: +c4:ee:bd:e3:82:e5:9a:2e:3e:b5:e8:01:b5:1d:63: +9c:b5:72:54:8e:ab:22:69:b6:70:b2:9d:a1:d3:d8: +07:4f:39:87:11:70:4e:9a:20:11:bb:cc:45:c4:a0: +07:c6:cb:a8:ee:89:c0:6f + """ + + key = DSA.importKey(x509_v3_cert) + for comp_name in ('y', 'p', 'q', 'g'): + comp_str = locals()[comp_name + "_str"] + comp = int(re.sub("[^0-9a-f]", "", comp_str), 16) + self.assertEqual(getattr(key, comp_name), comp) + self.assertFalse(key.has_private()) + + +if __name__ == '__main__': + unittest.main() + +def get_tests(config={}): + tests = [] + tests += list_test_cases(ImportKeyTests) + tests += list_test_cases(ImportKeyFromX509Cert) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/test_import_ECC.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/test_import_ECC.py new file mode 100644 index 000000000..aa54921ff --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/test_import_ECC.py @@ -0,0 +1,2763 @@ +# =================================================================== +# +# Copyright (c) 2015, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import os +import errno +import warnings +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Util.py3compat import bord, tostr, FileNotFoundError +from Crypto.Util.asn1 import DerSequence, DerBitString +from Crypto.Util.number import bytes_to_long +from Crypto.Hash import SHAKE128 + +from Crypto.PublicKey import ECC + +try: + import pycryptodome_test_vectors # type: ignore + test_vectors_available = True +except ImportError: + test_vectors_available = False + + +class MissingTestVectorException(ValueError): + pass + + +def load_file(file_name, mode="rb"): + results = None + + try: + if not test_vectors_available: + raise FileNotFoundError(errno.ENOENT, + os.strerror(errno.ENOENT), + file_name) + + dir_comps = ("PublicKey", "ECC") + init_dir = os.path.dirname(pycryptodome_test_vectors.__file__) + full_file_name = os.path.join(os.path.join(init_dir, *dir_comps), file_name) + with open(full_file_name, mode) as file_in: + results = file_in.read() + + except FileNotFoundError: + warnings.warn("Warning: skipping extended tests for ECC", + UserWarning, + stacklevel=2) + + if results is None: + raise MissingTestVectorException("Missing %s" % file_name) + + return results + + +def compact(lines): + ext = b"".join(lines) + return unhexlify(tostr(ext).replace(" ", "").replace(":", "")) + + +def create_ref_keys_p192(): + key_len = 24 + key_lines = load_file("ecc_p192.txt").splitlines() + private_key_d = bytes_to_long(compact(key_lines[2:4])) + public_key_xy = compact(key_lines[5:9]) + assert bord(public_key_xy[0]) == 4 # Uncompressed + public_key_x = bytes_to_long(public_key_xy[1:key_len+1]) + public_key_y = bytes_to_long(public_key_xy[key_len+1:]) + + return (ECC.construct(curve="P-192", d=private_key_d), + ECC.construct(curve="P-192", point_x=public_key_x, point_y=public_key_y)) + + +def create_ref_keys_p224(): + key_len = 28 + key_lines = load_file("ecc_p224.txt").splitlines() + private_key_d = bytes_to_long(compact(key_lines[2:4])) + public_key_xy = compact(key_lines[5:9]) + assert bord(public_key_xy[0]) == 4 # Uncompressed + public_key_x = bytes_to_long(public_key_xy[1:key_len+1]) + public_key_y = bytes_to_long(public_key_xy[key_len+1:]) + + return (ECC.construct(curve="P-224", d=private_key_d), + ECC.construct(curve="P-224", point_x=public_key_x, point_y=public_key_y)) + + +def create_ref_keys_p256(): + key_len = 32 + key_lines = load_file("ecc_p256.txt").splitlines() + private_key_d = bytes_to_long(compact(key_lines[2:5])) + public_key_xy = compact(key_lines[6:11]) + assert bord(public_key_xy[0]) == 4 # Uncompressed + public_key_x = bytes_to_long(public_key_xy[1:key_len+1]) + public_key_y = bytes_to_long(public_key_xy[key_len+1:]) + + return (ECC.construct(curve="P-256", d=private_key_d), + ECC.construct(curve="P-256", point_x=public_key_x, point_y=public_key_y)) + + +def create_ref_keys_p384(): + key_len = 48 + key_lines = load_file("ecc_p384.txt").splitlines() + private_key_d = bytes_to_long(compact(key_lines[2:6])) + public_key_xy = compact(key_lines[7:14]) + assert bord(public_key_xy[0]) == 4 # Uncompressed + public_key_x = bytes_to_long(public_key_xy[1:key_len+1]) + public_key_y = bytes_to_long(public_key_xy[key_len+1:]) + + return (ECC.construct(curve="P-384", d=private_key_d), + ECC.construct(curve="P-384", point_x=public_key_x, point_y=public_key_y)) + + +def create_ref_keys_p521(): + key_len = 66 + key_lines = load_file("ecc_p521.txt").splitlines() + private_key_d = bytes_to_long(compact(key_lines[2:7])) + public_key_xy = compact(key_lines[8:17]) + assert bord(public_key_xy[0]) == 4 # Uncompressed + public_key_x = bytes_to_long(public_key_xy[1:key_len+1]) + public_key_y = bytes_to_long(public_key_xy[key_len+1:]) + + return (ECC.construct(curve="P-521", d=private_key_d), + ECC.construct(curve="P-521", point_x=public_key_x, point_y=public_key_y)) + + +def create_ref_keys_ed25519(): + key_lines = load_file("ecc_ed25519.txt").splitlines() + seed = compact(key_lines[5:8]) + key = ECC.construct(curve="Ed25519", seed=seed) + return (key, key.public_key()) + + +def create_ref_keys_ed448(): + key_lines = load_file("ecc_ed448.txt").splitlines() + seed = compact(key_lines[6:10]) + key = ECC.construct(curve="Ed448", seed=seed) + return (key, key.public_key()) + + +# Create reference key pair +# ref_private, ref_public = create_ref_keys_p521() + +def get_fixed_prng(): + return SHAKE128.new().update(b"SEED").read + + +def extract_bitstring_from_spki(data): + seq = DerSequence() + seq.decode(data) + bs = DerBitString() + bs.decode(seq[1]) + return bs.value + + +class TestImport(unittest.TestCase): + + def test_empty(self): + self.assertRaises(ValueError, ECC.import_key, b"") + + def test_mismatch(self): + # The private key does not match the public key + mismatch = """-----BEGIN PRIVATE KEY----- +MIG2AgEAMBAGByqGSM49AgEGBSuBBAAiBIGeMIGbAgEBBDAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJChZANiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXk= +-----END PRIVATE KEY-----""" + self.assertRaises(ValueError, ECC.import_key, mismatch) + + +class TestImport_P192(unittest.TestCase): + + def __init__(self, *args, **kwargs): + super(TestImport_P192, self).__init__(*args, **kwargs) + self.ref_private, self.ref_public = create_ref_keys_p192() + + def test_import_public_der(self): + key_file = load_file("ecc_p192_public.der") + + key = ECC._import_subjectPublicKeyInfo(key_file) + self.assertEqual(self.ref_public, key) + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_sec1_uncompressed(self): + key_file = load_file("ecc_p192_public.der") + value = extract_bitstring_from_spki(key_file) + key = ECC.import_key(key_file, curve_name='P192') + self.assertEqual(self.ref_public, key) + + def test_import_sec1_compressed(self): + key_file = load_file("ecc_p192_public_compressed.der") + value = extract_bitstring_from_spki(key_file) + key = ECC.import_key(key_file, curve_name='P192') + self.assertEqual(self.ref_public, key) + + def test_import_rfc5915_der(self): + key_file = load_file("ecc_p192_private.der") + + key = ECC._import_rfc5915_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_clear(self): + key_file = load_file("ecc_p192_private_p8_clear.der") + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_in_pem_clear(self): + key_file = load_file("ecc_p192_private_p8_clear.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_1(self): + key_file = load_file("ecc_p192_private_p8.der") + + key = ECC._import_der(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_2(self): + key_file = load_file("ecc_p192_private_p8.pem") + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_x509_der(self): + key_file = load_file("ecc_p192_x509.der") + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_public_pem(self): + key_file = load_file("ecc_p192_public.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_private_pem(self): + key_file = load_file("ecc_p192_private.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pem_encrypted(self): + for algo in "des3", "aes128", "aes192", "aes256", "aes256_gcm": + key_file = load_file("ecc_p192_private_enc_%s.pem" % algo) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(tostr(key_file), b"secret") + self.assertEqual(self.ref_private, key) + + def test_import_x509_pem(self): + key_file = load_file("ecc_p192_x509.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + +class TestImport_P224(unittest.TestCase): + + def __init__(self, *args, **kwargs): + super(TestImport_P224, self).__init__(*args, **kwargs) + self.ref_private, self.ref_public = create_ref_keys_p224() + + def test_import_public_der(self): + key_file = load_file("ecc_p224_public.der") + + key = ECC._import_subjectPublicKeyInfo(key_file) + self.assertEqual(self.ref_public, key) + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_sec1_uncompressed(self): + key_file = load_file("ecc_p224_public.der") + value = extract_bitstring_from_spki(key_file) + key = ECC.import_key(key_file, curve_name='P224') + self.assertEqual(self.ref_public, key) + + def test_import_sec1_compressed(self): + key_file = load_file("ecc_p224_public_compressed.der") + value = extract_bitstring_from_spki(key_file) + key = ECC.import_key(key_file, curve_name='P224') + self.assertEqual(self.ref_public, key) + + def test_import_rfc5915_der(self): + key_file = load_file("ecc_p224_private.der") + + key = ECC._import_rfc5915_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_clear(self): + key_file = load_file("ecc_p224_private_p8_clear.der") + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_in_pem_clear(self): + key_file = load_file("ecc_p224_private_p8_clear.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_1(self): + key_file = load_file("ecc_p224_private_p8.der") + + key = ECC._import_der(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_2(self): + key_file = load_file("ecc_p224_private_p8.pem") + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_3(self): + key_file = load_file("ecc_p224_private_p8_2.der") + + key = ECC._import_der(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_x509_der(self): + key_file = load_file("ecc_p224_x509.der") + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_public_pem(self): + key_file = load_file("ecc_p224_public.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_private_pem(self): + key_file = load_file("ecc_p224_private.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pem_encrypted(self): + for algo in "des3", "aes128", "aes192", "aes256", "aes256_gcm": + key_file = load_file("ecc_p224_private_enc_%s.pem" % algo) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(tostr(key_file), b"secret") + self.assertEqual(self.ref_private, key) + + def test_import_x509_pem(self): + key_file = load_file("ecc_p224_x509.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + +class TestImport_P256(unittest.TestCase): + + def __init__(self, *args, **kwargs): + super(TestImport_P256, self).__init__(*args, **kwargs) + self.ref_private, self.ref_public = create_ref_keys_p256() + + def test_import_public_der(self): + key_file = load_file("ecc_p256_public.der") + + key = ECC._import_subjectPublicKeyInfo(key_file) + self.assertEqual(self.ref_public, key) + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_sec1_uncompressed(self): + key_file = load_file("ecc_p256_public.der") + value = extract_bitstring_from_spki(key_file) + key = ECC.import_key(key_file, curve_name='P256') + self.assertEqual(self.ref_public, key) + + def test_import_sec1_compressed(self): + key_file = load_file("ecc_p256_public_compressed.der") + value = extract_bitstring_from_spki(key_file) + key = ECC.import_key(key_file, curve_name='P256') + self.assertEqual(self.ref_public, key) + + def test_import_rfc5915_der(self): + key_file = load_file("ecc_p256_private.der") + + key = ECC._import_rfc5915_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_clear(self): + key_file = load_file("ecc_p256_private_p8_clear.der") + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_in_pem_clear(self): + key_file = load_file("ecc_p256_private_p8_clear.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_1(self): + key_file = load_file("ecc_p256_private_p8.der") + + key = ECC._import_der(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_2(self): + key_file = load_file("ecc_p256_private_p8.pem") + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_3(self): + key_file = load_file("ecc_p256_private_p8_2.der") + + key = ECC._import_der(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_x509_der(self): + key_file = load_file("ecc_p256_x509.der") + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_public_pem(self): + key_file = load_file("ecc_p256_public.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_private_pem(self): + key_file = load_file("ecc_p256_private.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pem_with_ecparams(self): + key_file = load_file("ecc_p256_private_ecparams.pem") + key = ECC.import_key(key_file) + # We just check if the import succeeds + + def test_import_private_pem_encrypted(self): + for algo in "des3", "aes128", "aes192", "aes256", "aes256_gcm": + key_file = load_file("ecc_p256_private_enc_%s.pem" % algo) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(tostr(key_file), b"secret") + self.assertEqual(self.ref_private, key) + + def test_import_x509_pem(self): + key_file = load_file("ecc_p256_x509.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_openssh_public(self): + key_file = load_file("ecc_p256_public_openssh.txt") + + key = ECC._import_openssh_public(key_file) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_openssh_private_clear(self): + key_file = load_file("ecc_p256_private_openssh.pem") + key_file_old = load_file("ecc_p256_private_openssh_old.pem") + + key = ECC.import_key(key_file) + key_old = ECC.import_key(key_file_old) + self.assertEqual(key, key_old) + + def test_import_openssh_private_password(self): + key_file = load_file("ecc_p256_private_openssh_pwd.pem") + key_file_old = load_file("ecc_p256_private_openssh_pwd_old.pem") + + key = ECC.import_key(key_file, b"password") + key_old = ECC.import_key(key_file_old) + self.assertEqual(key, key_old) + + +class TestImport_P384(unittest.TestCase): + + def __init__(self, *args, **kwargs): + super(TestImport_P384, self).__init__(*args, **kwargs) + self.ref_private, self.ref_public = create_ref_keys_p384() + + def test_import_public_der(self): + key_file = load_file("ecc_p384_public.der") + + key = ECC._import_subjectPublicKeyInfo(key_file) + self.assertEqual(self.ref_public, key) + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_sec1_uncompressed(self): + key_file = load_file("ecc_p384_public.der") + value = extract_bitstring_from_spki(key_file) + key = ECC.import_key(key_file, curve_name='P384') + self.assertEqual(self.ref_public, key) + + def test_import_sec1_compressed(self): + key_file = load_file("ecc_p384_public_compressed.der") + value = extract_bitstring_from_spki(key_file) + key = ECC.import_key(key_file, curve_name='P384') + self.assertEqual(self.ref_public, key) + + def test_import_rfc5915_der(self): + key_file = load_file("ecc_p384_private.der") + + key = ECC._import_rfc5915_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_clear(self): + key_file = load_file("ecc_p384_private_p8_clear.der") + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_in_pem_clear(self): + key_file = load_file("ecc_p384_private_p8_clear.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_1(self): + key_file = load_file("ecc_p384_private_p8.der") + + key = ECC._import_der(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_2(self): + key_file = load_file("ecc_p384_private_p8.pem") + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_3(self): + key_file = load_file("ecc_p384_private_p8_2.der") + + key = ECC._import_der(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_x509_der(self): + key_file = load_file("ecc_p384_x509.der") + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_public_pem(self): + key_file = load_file("ecc_p384_public.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_private_pem(self): + key_file = load_file("ecc_p384_private.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pem_encrypted(self): + for algo in "des3", "aes128", "aes192", "aes256", "aes256_gcm": + key_file = load_file("ecc_p384_private_enc_%s.pem" % algo) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(tostr(key_file), b"secret") + self.assertEqual(self.ref_private, key) + + def test_import_x509_pem(self): + key_file = load_file("ecc_p384_x509.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_openssh_public(self): + key_file = load_file("ecc_p384_public_openssh.txt") + + key = ECC._import_openssh_public(key_file) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_openssh_private_clear(self): + key_file = load_file("ecc_p384_private_openssh.pem") + key_file_old = load_file("ecc_p384_private_openssh_old.pem") + + key = ECC.import_key(key_file) + key_old = ECC.import_key(key_file_old) + self.assertEqual(key, key_old) + + def test_import_openssh_private_password(self): + key_file = load_file("ecc_p384_private_openssh_pwd.pem") + key_file_old = load_file("ecc_p384_private_openssh_pwd_old.pem") + + key = ECC.import_key(key_file, b"password") + key_old = ECC.import_key(key_file_old) + self.assertEqual(key, key_old) + + +class TestImport_P521(unittest.TestCase): + + def __init__(self, *args, **kwargs): + super(TestImport_P521, self).__init__(*args, **kwargs) + self.ref_private, self.ref_public = create_ref_keys_p521() + + def test_import_public_der(self): + key_file = load_file("ecc_p521_public.der") + + key = ECC._import_subjectPublicKeyInfo(key_file) + self.assertEqual(self.ref_public, key) + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_sec1_uncompressed(self): + key_file = load_file("ecc_p521_public.der") + value = extract_bitstring_from_spki(key_file) + key = ECC.import_key(key_file, curve_name='P521') + self.assertEqual(self.ref_public, key) + + def test_import_sec1_compressed(self): + key_file = load_file("ecc_p521_public_compressed.der") + value = extract_bitstring_from_spki(key_file) + key = ECC.import_key(key_file, curve_name='P521') + self.assertEqual(self.ref_public, key) + + def test_import_rfc5915_der(self): + key_file = load_file("ecc_p521_private.der") + + key = ECC._import_rfc5915_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_clear(self): + key_file = load_file("ecc_p521_private_p8_clear.der") + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_in_pem_clear(self): + key_file = load_file("ecc_p521_private_p8_clear.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_1(self): + key_file = load_file("ecc_p521_private_p8.der") + + key = ECC._import_der(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_2(self): + key_file = load_file("ecc_p521_private_p8.pem") + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_3(self): + key_file = load_file("ecc_p521_private_p8_2.der") + + key = ECC._import_der(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_x509_der(self): + key_file = load_file("ecc_p521_x509.der") + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_public_pem(self): + key_file = load_file("ecc_p521_public.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_private_pem(self): + key_file = load_file("ecc_p521_private.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pem_encrypted(self): + for algo in "des3", "aes128", "aes192", "aes256", "aes256_gcm": + key_file = load_file("ecc_p521_private_enc_%s.pem" % algo) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(tostr(key_file), b"secret") + self.assertEqual(self.ref_private, key) + + def test_import_x509_pem(self): + key_file = load_file("ecc_p521_x509.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_openssh_public(self): + key_file = load_file("ecc_p521_public_openssh.txt") + + key = ECC._import_openssh_public(key_file) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_openssh_private_clear(self): + key_file = load_file("ecc_p521_private_openssh.pem") + key_file_old = load_file("ecc_p521_private_openssh_old.pem") + + key = ECC.import_key(key_file) + key_old = ECC.import_key(key_file_old) + self.assertEqual(key, key_old) + + def test_import_openssh_private_password(self): + key_file = load_file("ecc_p521_private_openssh_pwd.pem") + key_file_old = load_file("ecc_p521_private_openssh_pwd_old.pem") + + key = ECC.import_key(key_file, b"password") + key_old = ECC.import_key(key_file_old) + self.assertEqual(key, key_old) + + +class TestExport_P192(unittest.TestCase): + + def __init__(self, *args, **kwargs): + super(TestExport_P192, self).__init__(*args, **kwargs) + self.ref_private, self.ref_public = create_ref_keys_p192() + + def test_export_public_der_uncompressed(self): + key_file = load_file("ecc_p192_public.der") + + encoded = self.ref_public._export_subjectPublicKeyInfo(False) + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="DER") + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="DER", compress=False) + self.assertEqual(key_file, encoded) + + def test_export_public_der_compressed(self): + key_file = load_file("ecc_p192_public.der") + pub_key = ECC.import_key(key_file) + key_file_compressed = pub_key.export_key(format="DER", compress=True) + + key_file_compressed_ref = load_file("ecc_p192_public_compressed.der") + self.assertEqual(key_file_compressed, key_file_compressed_ref) + + def test_export_public_sec1_uncompressed(self): + key_file = load_file("ecc_p192_public.der") + value = extract_bitstring_from_spki(key_file) + + encoded = self.ref_public.export_key(format="SEC1") + self.assertEqual(value, encoded) + + def test_export_public_sec1_compressed(self): + key_file = load_file("ecc_p192_public.der") + encoded = self.ref_public.export_key(format="SEC1", compress=True) + + key_file_compressed_ref = load_file("ecc_p192_public_compressed.der") + value = extract_bitstring_from_spki(key_file_compressed_ref) + self.assertEqual(value, encoded) + + def test_export_rfc5915_private_der(self): + key_file = load_file("ecc_p192_private.der") + + encoded = self.ref_private._export_rfc5915_private_der() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="DER", use_pkcs8=False) + self.assertEqual(key_file, encoded) + + def test_export_private_pkcs8_clear(self): + key_file = load_file("ecc_p192_private_p8_clear.der") + + encoded = self.ref_private._export_pkcs8() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="DER") + self.assertEqual(key_file, encoded) + + def test_export_private_pkcs8_encrypted(self): + encoded = self.ref_private._export_pkcs8(passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC._import_pkcs8, encoded, None) + + decoded = ECC._import_pkcs8(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="DER", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="DER", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA224AndAES192-CBC", + prot_params={'iteration_count':123}) + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_export_public_pem_uncompressed(self): + key_file = load_file("ecc_p192_public.pem", "rt").strip() + + encoded = self.ref_private._export_public_pem(False) + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_public.export_key(format="PEM") + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="PEM", compress=False) + self.assertEqual(key_file, encoded) + + def test_export_public_pem_compressed(self): + key_file = load_file("ecc_p192_public.pem", "rt").strip() + pub_key = ECC.import_key(key_file) + + key_file_compressed = pub_key.export_key(format="PEM", compress=True) + key_file_compressed_ref = load_file("ecc_p192_public_compressed.pem", "rt").strip() + + self.assertEqual(key_file_compressed, key_file_compressed_ref) + + def test_export_private_pem_clear(self): + key_file = load_file("ecc_p192_private.pem", "rt").strip() + + encoded = self.ref_private._export_private_pem(None) + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM", use_pkcs8=False) + self.assertEqual(key_file, encoded) + + def test_export_private_pem_encrypted(self): + encoded = self.ref_private._export_private_pem(passphrase=b"secret") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC.import_key, encoded) + + assert "EC PRIVATE KEY" in encoded + + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM", + passphrase="secret", + use_pkcs8=False) + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_export_private_pkcs8_and_pem_1(self): + # PKCS8 inside PEM with both unencrypted + key_file = load_file("ecc_p192_private_p8_clear.pem", "rt").strip() + + encoded = self.ref_private._export_private_clear_pkcs8_in_clear_pem() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM") + self.assertEqual(key_file, encoded) + + def test_export_private_pkcs8_and_pem_2(self): + # PKCS8 inside PEM with PKCS8 encryption + encoded = self.ref_private._export_private_encrypted_pkcs8_in_clear_pem("secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC.import_key, encoded) + + assert "ENCRYPTED PRIVATE KEY" in encoded + + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_prng(self): + # Test that password-protected containers use the provided PRNG + encoded1 = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", + randfunc=get_fixed_prng()) + self.assertEqual(encoded1, encoded2) + + # --- + + encoded1 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase="secret", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase="secret", + randfunc=get_fixed_prng()) + self.assertEqual(encoded1, encoded2) + + def test_byte_or_string_passphrase(self): + encoded1 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase="secret", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase=b"secret", + randfunc=get_fixed_prng()) + self.assertEqual(encoded1, encoded2) + + def test_error_params1(self): + # Unknown format + self.assertRaises(ValueError, self.ref_private.export_key, format="XXX") + + # Missing 'protection' parameter when PKCS#8 is used + self.ref_private.export_key(format="PEM", passphrase="secret", + use_pkcs8=False) + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="secret") + + # DER format but no PKCS#8 + self.assertRaises(ValueError, self.ref_private.export_key, format="DER", + passphrase="secret", + use_pkcs8=False, + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # Incorrect parameters for public keys + self.assertRaises(ValueError, self.ref_public.export_key, format="DER", + use_pkcs8=False) + + # Empty password + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="", use_pkcs8=False) + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + def test_compressed_curve(self): + + # Compressed P-192 curve (Y-point is even) + pem1 = """-----BEGIN EC PRIVATE KEY----- + MF8CAQEEGHvhXmIW95JxZYfd4AUPu9BwknjuvS36aqAKBggqhkjOPQMBAaE0AzIA + BLJZCyTu35DQIlqvMlBynn3k1Ig+dWfg/brRhHecxptrbloqFSP8ITw0CwbGF+2X + 5g== + -----END EC PRIVATE KEY-----""" + + # Compressed P-192 curve (Y-point is odd) + pem2 = """-----BEGIN EC PRIVATE KEY----- + MF8CAQEEGA3rAotUaWl7d47eX6tz9JmLzOMJwl13XaAKBggqhkjOPQMBAaE0AzIA + BG4tHlTBBBGokcWmGm2xubVB0NvPC/Ou5AYwivs+3iCxmEjsymVAj6iiuX2Lxr6g + /Q== + -----END EC PRIVATE KEY-----""" + + key1 = ECC.import_key(pem1) + low16 = int(key1.pointQ.y % 65536) + self.assertEqual(low16, 0x97E6) + + key2 = ECC.import_key(pem2) + low16 = int(key2.pointQ.y % 65536) + self.assertEqual(low16, 0xA0FD) + + +class TestExport_P224(unittest.TestCase): + + def __init__(self, *args, **kwargs): + super(TestExport_P224, self).__init__(*args, **kwargs) + self.ref_private, self.ref_public = create_ref_keys_p224() + + def test_export_public_der_uncompressed(self): + key_file = load_file("ecc_p224_public.der") + + encoded = self.ref_public._export_subjectPublicKeyInfo(False) + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="DER") + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="DER", compress=False) + self.assertEqual(key_file, encoded) + + def test_export_public_der_compressed(self): + key_file = load_file("ecc_p224_public.der") + pub_key = ECC.import_key(key_file) + key_file_compressed = pub_key.export_key(format="DER", compress=True) + + key_file_compressed_ref = load_file("ecc_p224_public_compressed.der") + self.assertEqual(key_file_compressed, key_file_compressed_ref) + + def test_export_public_sec1_uncompressed(self): + key_file = load_file("ecc_p224_public.der") + value = extract_bitstring_from_spki(key_file) + + encoded = self.ref_public.export_key(format="SEC1") + self.assertEqual(value, encoded) + + def test_export_public_sec1_compressed(self): + key_file = load_file("ecc_p224_public.der") + encoded = self.ref_public.export_key(format="SEC1", compress=True) + + key_file_compressed_ref = load_file("ecc_p224_public_compressed.der") + value = extract_bitstring_from_spki(key_file_compressed_ref) + self.assertEqual(value, encoded) + + def test_export_rfc5915_private_der(self): + key_file = load_file("ecc_p224_private.der") + + encoded = self.ref_private._export_rfc5915_private_der() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="DER", use_pkcs8=False) + self.assertEqual(key_file, encoded) + + def test_export_private_pkcs8_clear(self): + key_file = load_file("ecc_p224_private_p8_clear.der") + + encoded = self.ref_private._export_pkcs8() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="DER") + self.assertEqual(key_file, encoded) + + def test_export_private_pkcs8_encrypted(self): + encoded = self.ref_private._export_pkcs8(passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC._import_pkcs8, encoded, None) + + decoded = ECC._import_pkcs8(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="DER", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA512-224AndAES128-CBC", + prot_params={'iteration_count':123}) + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_export_public_pem_uncompressed(self): + key_file = load_file("ecc_p224_public.pem", "rt").strip() + + encoded = self.ref_private._export_public_pem(False) + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_public.export_key(format="PEM") + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="PEM", compress=False) + self.assertEqual(key_file, encoded) + + def test_export_public_pem_compressed(self): + key_file = load_file("ecc_p224_public.pem", "rt").strip() + pub_key = ECC.import_key(key_file) + + key_file_compressed = pub_key.export_key(format="PEM", compress=True) + key_file_compressed_ref = load_file("ecc_p224_public_compressed.pem", "rt").strip() + + self.assertEqual(key_file_compressed, key_file_compressed_ref) + + def test_export_private_pem_clear(self): + key_file = load_file("ecc_p224_private.pem", "rt").strip() + + encoded = self.ref_private._export_private_pem(None) + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM", use_pkcs8=False) + self.assertEqual(key_file, encoded) + + def test_export_private_pem_encrypted(self): + encoded = self.ref_private._export_private_pem(passphrase=b"secret") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC.import_key, encoded) + + assert "EC PRIVATE KEY" in encoded + + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM", + passphrase="secret", + use_pkcs8=False) + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_export_private_pkcs8_and_pem_1(self): + # PKCS8 inside PEM with both unencrypted + key_file = load_file("ecc_p224_private_p8_clear.pem", "rt").strip() + + encoded = self.ref_private._export_private_clear_pkcs8_in_clear_pem() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM") + self.assertEqual(key_file, encoded) + + def test_export_private_pkcs8_and_pem_2(self): + # PKCS8 inside PEM with PKCS8 encryption + encoded = self.ref_private._export_private_encrypted_pkcs8_in_clear_pem("secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC.import_key, encoded) + + assert "ENCRYPTED PRIVATE KEY" in encoded + + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_prng(self): + # Test that password-protected containers use the provided PRNG + encoded1 = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", + randfunc=get_fixed_prng()) + self.assertEqual(encoded1, encoded2) + + # --- + + encoded1 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase="secret", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase="secret", + randfunc=get_fixed_prng()) + self.assertEqual(encoded1, encoded2) + + def test_byte_or_string_passphrase(self): + encoded1 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase="secret", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase=b"secret", + randfunc=get_fixed_prng()) + self.assertEqual(encoded1, encoded2) + + def test_error_params1(self): + # Unknown format + self.assertRaises(ValueError, self.ref_private.export_key, format="XXX") + + # Missing 'protection' parameter when PKCS#8 is used + self.ref_private.export_key(format="PEM", passphrase="secret", + use_pkcs8=False) + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="secret") + + # DER format but no PKCS#8 + self.assertRaises(ValueError, self.ref_private.export_key, format="DER", + passphrase="secret", + use_pkcs8=False, + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # Incorrect parameters for public keys + self.assertRaises(ValueError, self.ref_public.export_key, format="DER", + use_pkcs8=False) + + # Empty password + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="", use_pkcs8=False) + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + def test_compressed_curve(self): + + # Compressed P-224 curve (Y-point is even) + pem1 = """-----BEGIN EC PRIVATE KEY----- + MGgCAQEEHPYicBNI9nd6wDKAX2l+f3A0Q+KWUQeMqSt5GoOgBwYFK4EEACGhPAM6 + AATCL6rUIDT14zXKoS5GQUMDP/tpc+1iI/FyEZikt2roKDkhU5q08srmqaysbfJN + eUr7Xf1lnCVGag== + -----END EC PRIVATE KEY-----""" + + # Compressed P-224 curve (Y-point is odd) + pem2 = """-----BEGIN EC PRIVATE KEY----- + MGgCAQEEHEFjbaVPLJ3ngZyCibCvT0RLUqSlHjC5Z3e0FtugBwYFK4EEACGhPAM6 + AAT5IvL2V6m48y1JLMGr6ZbnOqNKP9hMf9mxyVkk6/SaRoBoJVkXrNIpYL0P7DS7 + QF8E/OGeZRwvow== + -----END EC PRIVATE KEY-----""" + + key1 = ECC.import_key(pem1) + low16 = int(key1.pointQ.y % 65536) + self.assertEqual(low16, 0x466A) + + key2 = ECC.import_key(pem2) + low16 = int(key2.pointQ.y % 65536) + self.assertEqual(low16, 0x2FA3) + + +class TestExport_P256(unittest.TestCase): + + def __init__(self, *args, **kwargs): + super(TestExport_P256, self).__init__(*args, **kwargs) + self.ref_private, self.ref_public = create_ref_keys_p256() + + def test_export_public_der_uncompressed(self): + key_file = load_file("ecc_p256_public.der") + + encoded = self.ref_public._export_subjectPublicKeyInfo(False) + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="DER") + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="DER", compress=False) + self.assertEqual(key_file, encoded) + + def test_export_public_der_compressed(self): + key_file = load_file("ecc_p256_public.der") + pub_key = ECC.import_key(key_file) + key_file_compressed = pub_key.export_key(format="DER", compress=True) + + key_file_compressed_ref = load_file("ecc_p256_public_compressed.der") + self.assertEqual(key_file_compressed, key_file_compressed_ref) + + def test_export_public_sec1_uncompressed(self): + key_file = load_file("ecc_p256_public.der") + value = extract_bitstring_from_spki(key_file) + + encoded = self.ref_public.export_key(format="SEC1") + self.assertEqual(value, encoded) + + def test_export_public_sec1_compressed(self): + key_file = load_file("ecc_p256_public.der") + encoded = self.ref_public.export_key(format="SEC1", compress=True) + + key_file_compressed_ref = load_file("ecc_p256_public_compressed.der") + value = extract_bitstring_from_spki(key_file_compressed_ref) + self.assertEqual(value, encoded) + + def test_export_rfc5915_private_der(self): + key_file = load_file("ecc_p256_private.der") + + encoded = self.ref_private._export_rfc5915_private_der() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="DER", use_pkcs8=False) + self.assertEqual(key_file, encoded) + + def test_export_private_pkcs8_clear(self): + key_file = load_file("ecc_p256_private_p8_clear.der") + + encoded = self.ref_private._export_pkcs8() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="DER") + self.assertEqual(key_file, encoded) + + def test_export_private_pkcs8_encrypted(self): + encoded = self.ref_private._export_pkcs8(passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC._import_pkcs8, encoded, None) + + decoded = ECC._import_pkcs8(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="DER", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="DER", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA512-256AndAES128-CBC", + prot_params={'iteration_count':123}) + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_export_public_pem_uncompressed(self): + key_file = load_file("ecc_p256_public.pem", "rt").strip() + + encoded = self.ref_private._export_public_pem(False) + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_public.export_key(format="PEM") + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="PEM", compress=False) + self.assertEqual(key_file, encoded) + + def test_export_public_pem_compressed(self): + key_file = load_file("ecc_p256_public.pem", "rt").strip() + pub_key = ECC.import_key(key_file) + + key_file_compressed = pub_key.export_key(format="PEM", compress=True) + key_file_compressed_ref = load_file("ecc_p256_public_compressed.pem", "rt").strip() + + self.assertEqual(key_file_compressed, key_file_compressed_ref) + + def test_export_private_pem_clear(self): + key_file = load_file("ecc_p256_private.pem", "rt").strip() + + encoded = self.ref_private._export_private_pem(None) + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM", use_pkcs8=False) + self.assertEqual(key_file, encoded) + + def test_export_private_pem_encrypted(self): + encoded = self.ref_private._export_private_pem(passphrase=b"secret") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC.import_key, encoded) + + assert "EC PRIVATE KEY" in encoded + + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM", + passphrase="secret", + use_pkcs8=False) + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_export_private_pkcs8_and_pem_1(self): + # PKCS8 inside PEM with both unencrypted + key_file = load_file("ecc_p256_private_p8_clear.pem", "rt").strip() + + encoded = self.ref_private._export_private_clear_pkcs8_in_clear_pem() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM") + self.assertEqual(key_file, encoded) + + def test_export_private_pkcs8_and_pem_2(self): + # PKCS8 inside PEM with PKCS8 encryption + encoded = self.ref_private._export_private_encrypted_pkcs8_in_clear_pem("secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC.import_key, encoded) + + assert "ENCRYPTED PRIVATE KEY" in encoded + + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_export_openssh_uncompressed(self): + key_file = load_file("ecc_p256_public_openssh.txt", "rt") + + encoded = self.ref_public._export_openssh(False) + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_public.export_key(format="OpenSSH") + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="OpenSSH", compress=False) + self.assertEqual(key_file, encoded) + + def test_export_openssh_compressed(self): + key_file = load_file("ecc_p256_public_openssh.txt", "rt") + pub_key = ECC.import_key(key_file) + + key_file_compressed = pub_key.export_key(format="OpenSSH", compress=True) + assert len(key_file) > len(key_file_compressed) + self.assertEqual(pub_key, ECC.import_key(key_file_compressed)) + + def test_prng(self): + # Test that password-protected containers use the provided PRNG + encoded1 = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", + randfunc=get_fixed_prng()) + self.assertEqual(encoded1, encoded2) + + # --- + + encoded1 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase="secret", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase="secret", + randfunc=get_fixed_prng()) + self.assertEqual(encoded1, encoded2) + + def test_byte_or_string_passphrase(self): + encoded1 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase="secret", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase=b"secret", + randfunc=get_fixed_prng()) + self.assertEqual(encoded1, encoded2) + + def test_error_params1(self): + # Unknown format + self.assertRaises(ValueError, self.ref_private.export_key, format="XXX") + + # Missing 'protection' parameter when PKCS#8 is used + self.ref_private.export_key(format="PEM", passphrase="secret", + use_pkcs8=False) + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="secret") + + # DER format but no PKCS#8 + self.assertRaises(ValueError, self.ref_private.export_key, format="DER", + passphrase="secret", + use_pkcs8=False, + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # Incorrect parameters for public keys + self.assertRaises(ValueError, self.ref_public.export_key, format="DER", + use_pkcs8=False) + + # Empty password + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="", use_pkcs8=False) + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # No private keys with OpenSSH + self.assertRaises(ValueError, self.ref_private.export_key, format="OpenSSH", + passphrase="secret") + + + def test_compressed_curve(self): + + # Compressed P-256 curve (Y-point is even) + pem1 = """-----BEGIN EC PRIVATE KEY----- + MFcCAQEEIHTuc09jC51xXomV6MVCDN+DpAAvSmaJWZPTEHM6D5H1oAoGCCqGSM49 + AwEHoSQDIgACWFuGbHe8yJ43rir7PMTE9w8vHz0BSpXHq90Xi7/s+a0= + -----END EC PRIVATE KEY-----""" + + # Compressed P-256 curve (Y-point is odd) + pem2 = """-----BEGIN EC PRIVATE KEY----- + MFcCAQEEIFggiPN9SQP+FAPTCPp08fRUz7rHp2qNBRcBJ1DXhb3ZoAoGCCqGSM49 + AwEHoSQDIgADLpph1trTIlVfa8NJvlMUPyWvL+wP+pW3BJITUL/wj9A= + -----END EC PRIVATE KEY-----""" + + key1 = ECC.import_key(pem1) + low16 = int(key1.pointQ.y % 65536) + self.assertEqual(low16, 0xA6FC) + + key2 = ECC.import_key(pem2) + low16 = int(key2.pointQ.y % 65536) + self.assertEqual(low16, 0x6E57) + + +class TestExport_P384(unittest.TestCase): + + def __init__(self, *args, **kwargs): + super(TestExport_P384, self).__init__(*args, **kwargs) + self.ref_private, self.ref_public = create_ref_keys_p384() + + def test_export_public_der_uncompressed(self): + key_file = load_file("ecc_p384_public.der") + + encoded = self.ref_public._export_subjectPublicKeyInfo(False) + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="DER") + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="DER", compress=False) + self.assertEqual(key_file, encoded) + + def test_export_public_der_compressed(self): + key_file = load_file("ecc_p384_public.der") + pub_key = ECC.import_key(key_file) + key_file_compressed = pub_key.export_key(format="DER", compress=True) + + key_file_compressed_ref = load_file("ecc_p384_public_compressed.der") + self.assertEqual(key_file_compressed, key_file_compressed_ref) + + def test_export_public_sec1_uncompressed(self): + key_file = load_file("ecc_p384_public.der") + value = extract_bitstring_from_spki(key_file) + + encoded = self.ref_public.export_key(format="SEC1") + self.assertEqual(value, encoded) + + def test_export_public_sec1_compressed(self): + key_file = load_file("ecc_p384_public.der") + encoded = self.ref_public.export_key(format="SEC1", compress=True) + + key_file_compressed_ref = load_file("ecc_p384_public_compressed.der") + value = extract_bitstring_from_spki(key_file_compressed_ref) + self.assertEqual(value, encoded) + + def test_export_rfc5915_private_der(self): + key_file = load_file("ecc_p384_private.der") + + encoded = self.ref_private._export_rfc5915_private_der() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="DER", use_pkcs8=False) + self.assertEqual(key_file, encoded) + + def test_export_private_pkcs8_clear(self): + key_file = load_file("ecc_p384_private_p8_clear.der") + + encoded = self.ref_private._export_pkcs8() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="DER") + self.assertEqual(key_file, encoded) + + def test_export_private_pkcs8_encrypted(self): + encoded = self.ref_private._export_pkcs8(passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC._import_pkcs8, encoded, None) + + decoded = ECC._import_pkcs8(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="DER", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="DER", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA384AndAES128-CBC", + prot_params={'iteration_count':123}) + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_export_public_pem_uncompressed(self): + key_file = load_file("ecc_p384_public.pem", "rt").strip() + + encoded = self.ref_private._export_public_pem(False) + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_public.export_key(format="PEM") + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="PEM", compress=False) + self.assertEqual(key_file, encoded) + + def test_export_public_pem_compressed(self): + key_file = load_file("ecc_p384_public.pem", "rt").strip() + pub_key = ECC.import_key(key_file) + + key_file_compressed = pub_key.export_key(format="PEM", compress=True) + key_file_compressed_ref = load_file("ecc_p384_public_compressed.pem", "rt").strip() + + self.assertEqual(key_file_compressed, key_file_compressed_ref) + + def test_export_private_pem_clear(self): + key_file = load_file("ecc_p384_private.pem", "rt").strip() + + encoded = self.ref_private._export_private_pem(None) + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM", use_pkcs8=False) + self.assertEqual(key_file, encoded) + + def test_export_private_pem_encrypted(self): + encoded = self.ref_private._export_private_pem(passphrase=b"secret") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC.import_key, encoded) + + assert "EC PRIVATE KEY" in encoded + + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM", + passphrase="secret", + use_pkcs8=False) + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_export_private_pkcs8_and_pem_1(self): + # PKCS8 inside PEM with both unencrypted + key_file = load_file("ecc_p384_private_p8_clear.pem", "rt").strip() + + encoded = self.ref_private._export_private_clear_pkcs8_in_clear_pem() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM") + self.assertEqual(key_file, encoded) + + def test_export_private_pkcs8_and_pem_2(self): + # PKCS8 inside PEM with PKCS8 encryption + encoded = self.ref_private._export_private_encrypted_pkcs8_in_clear_pem("secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC.import_key, encoded) + + assert "ENCRYPTED PRIVATE KEY" in encoded + + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_export_openssh_uncompressed(self): + key_file = load_file("ecc_p384_public_openssh.txt", "rt") + + encoded = self.ref_public._export_openssh(False) + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_public.export_key(format="OpenSSH") + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="OpenSSH", compress=False) + self.assertEqual(key_file, encoded) + + def test_export_openssh_compressed(self): + key_file = load_file("ecc_p384_public_openssh.txt", "rt") + pub_key = ECC.import_key(key_file) + + key_file_compressed = pub_key.export_key(format="OpenSSH", compress=True) + assert len(key_file) > len(key_file_compressed) + self.assertEqual(pub_key, ECC.import_key(key_file_compressed)) + + def test_prng(self): + # Test that password-protected containers use the provided PRNG + encoded1 = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", + randfunc=get_fixed_prng()) + self.assertEqual(encoded1, encoded2) + + # --- + + encoded1 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase="secret", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase="secret", + randfunc=get_fixed_prng()) + self.assertEqual(encoded1, encoded2) + + def test_byte_or_string_passphrase(self): + encoded1 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase="secret", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase=b"secret", + randfunc=get_fixed_prng()) + self.assertEqual(encoded1, encoded2) + + def test_error_params1(self): + # Unknown format + self.assertRaises(ValueError, self.ref_private.export_key, format="XXX") + + # Missing 'protection' parameter when PKCS#8 is used + self.ref_private.export_key(format="PEM", passphrase="secret", + use_pkcs8=False) + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="secret") + + # DER format but no PKCS#8 + self.assertRaises(ValueError, self.ref_private.export_key, format="DER", + passphrase="secret", + use_pkcs8=False, + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # Incorrect parameters for public keys + self.assertRaises(ValueError, self.ref_public.export_key, format="DER", + use_pkcs8=False) + + # Empty password + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="", use_pkcs8=False) + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # No private keys with OpenSSH + self.assertRaises(ValueError, self.ref_private.export_key, format="OpenSSH", + passphrase="secret") + + def test_compressed_curve(self): + + # Compressed P-384 curve (Y-point is even) + # openssl ecparam -name secp384p1 -genkey -noout -conv_form compressed -out /tmp/a.pem + # openssl ec -in /tmp/a.pem -text -noout + pem1 = """-----BEGIN EC PRIVATE KEY----- +MIGkAgEBBDAM0lEIhvXuekK2SWtdbgOcZtBaxa9TxfpO/GcDFZLCJ3JVXaTgwken +QT+C+XLtD6WgBwYFK4EEACKhZANiAATs0kZMhFDu8DoBC21jrSDPyAUn4aXZ/DM4 +ylhDfWmb4LEbeszXceIzfhIUaaGs5y1xXaqf5KXTiAAYx2pKUzAAM9lcGUHCGKJG +k4AgUmVJON29XoUilcFrzjDmuye3B6Q= +-----END EC PRIVATE KEY-----""" + + # Compressed P-384 curve (Y-point is odd) + pem2 = """-----BEGIN EC PRIVATE KEY----- +MIGkAgEBBDDHPFTslYLltE16fHdSDTtE/2HTmd3M8mqy5MttAm4wZ833KXiGS9oe +kFdx9sNV0KygBwYFK4EEACKhZANiAASLIE5RqVMtNhtBH/u/p/ifqOAlKnK/+RrQ +YC46ZRsnKNayw3wATdPjgja7L/DSII3nZK0G6KOOVwJBznT/e+zudUJYhZKaBLRx +/bgXyxUtYClOXxb1Y/5N7txLstYRyP0= +-----END EC PRIVATE KEY-----""" + + key1 = ECC.import_key(pem1) + low16 = int(key1.pointQ.y % 65536) + self.assertEqual(low16, 0x07a4) + + key2 = ECC.import_key(pem2) + low16 = int(key2.pointQ.y % 65536) + self.assertEqual(low16, 0xc8fd) + + +class TestExport_P521(unittest.TestCase): + + def __init__(self, *args, **kwargs): + super(TestExport_P521, self).__init__(*args, **kwargs) + self.ref_private, self.ref_public = create_ref_keys_p521() + + def test_export_public_der_uncompressed(self): + key_file = load_file("ecc_p521_public.der") + + encoded = self.ref_public._export_subjectPublicKeyInfo(False) + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="DER") + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="DER", compress=False) + self.assertEqual(key_file, encoded) + + def test_export_public_der_compressed(self): + key_file = load_file("ecc_p521_public.der") + pub_key = ECC.import_key(key_file) + key_file_compressed = pub_key.export_key(format="DER", compress=True) + + key_file_compressed_ref = load_file("ecc_p521_public_compressed.der") + self.assertEqual(key_file_compressed, key_file_compressed_ref) + + def test_export_public_sec1_uncompressed(self): + key_file = load_file("ecc_p521_public.der") + value = extract_bitstring_from_spki(key_file) + + encoded = self.ref_public.export_key(format="SEC1") + self.assertEqual(value, encoded) + + encoded = self.ref_public.export_key(format="raw") + self.assertEqual(value, encoded) + + def test_export_public_sec1_compressed(self): + key_file = load_file("ecc_p521_public.der") + encoded = self.ref_public.export_key(format="SEC1", compress=True) + + key_file_compressed_ref = load_file("ecc_p521_public_compressed.der") + value = extract_bitstring_from_spki(key_file_compressed_ref) + self.assertEqual(value, encoded) + + encoded = self.ref_public.export_key(format="raw", compress=True) + self.assertEqual(value, encoded) + + def test_export_rfc5915_private_der(self): + key_file = load_file("ecc_p521_private.der") + + encoded = self.ref_private._export_rfc5915_private_der() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="DER", use_pkcs8=False) + self.assertEqual(key_file, encoded) + + def test_export_private_pkcs8_clear(self): + key_file = load_file("ecc_p521_private_p8_clear.der") + + encoded = self.ref_private._export_pkcs8() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="DER") + self.assertEqual(key_file, encoded) + + def test_export_private_pkcs8_encrypted(self): + encoded = self.ref_private._export_pkcs8(passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC._import_pkcs8, encoded, None) + + decoded = ECC._import_pkcs8(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="DER", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="DER", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", + prot_params={'iteration_count':123}) + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_export_public_pem_uncompressed(self): + key_file = load_file("ecc_p521_public.pem", "rt").strip() + + encoded = self.ref_private._export_public_pem(False) + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_public.export_key(format="PEM") + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="PEM", compress=False) + self.assertEqual(key_file, encoded) + + def test_export_public_pem_compressed(self): + key_file = load_file("ecc_p521_public.pem", "rt").strip() + pub_key = ECC.import_key(key_file) + + key_file_compressed = pub_key.export_key(format="PEM", compress=True) + key_file_compressed_ref = load_file("ecc_p521_public_compressed.pem", "rt").strip() + + self.assertEqual(key_file_compressed, key_file_compressed_ref) + + def test_export_private_pem_clear(self): + key_file = load_file("ecc_p521_private.pem", "rt").strip() + + encoded = self.ref_private._export_private_pem(None) + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM", use_pkcs8=False) + self.assertEqual(key_file, encoded) + + def test_export_private_pem_encrypted(self): + encoded = self.ref_private._export_private_pem(passphrase=b"secret") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC.import_key, encoded) + + assert "EC PRIVATE KEY" in encoded + + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM", + passphrase="secret", + use_pkcs8=False) + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_export_private_pkcs8_and_pem_1(self): + # PKCS8 inside PEM with both unencrypted + key_file = load_file("ecc_p521_private_p8_clear.pem", "rt").strip() + + encoded = self.ref_private._export_private_clear_pkcs8_in_clear_pem() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM") + self.assertEqual(key_file, encoded) + + def test_export_private_pkcs8_and_pem_2(self): + # PKCS8 inside PEM with PKCS8 encryption + encoded = self.ref_private._export_private_encrypted_pkcs8_in_clear_pem("secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC.import_key, encoded) + + assert "ENCRYPTED PRIVATE KEY" in encoded + + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_export_openssh_uncompressed(self): + key_file = load_file("ecc_p521_public_openssh.txt", "rt") + + encoded = self.ref_public._export_openssh(False) + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_public.export_key(format="OpenSSH") + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="OpenSSH", compress=False) + self.assertEqual(key_file, encoded) + + def test_export_openssh_compressed(self): + key_file = load_file("ecc_p521_public_openssh.txt", "rt") + pub_key = ECC.import_key(key_file) + + key_file_compressed = pub_key.export_key(format="OpenSSH", compress=True) + assert len(key_file) > len(key_file_compressed) + self.assertEqual(pub_key, ECC.import_key(key_file_compressed)) + + def test_prng(self): + # Test that password-protected containers use the provided PRNG + encoded1 = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", + randfunc=get_fixed_prng()) + self.assertEqual(encoded1, encoded2) + + # --- + + encoded1 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase="secret", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase="secret", + randfunc=get_fixed_prng()) + self.assertEqual(encoded1, encoded2) + + def test_byte_or_string_passphrase(self): + encoded1 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase="secret", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + use_pkcs8=False, + passphrase=b"secret", + randfunc=get_fixed_prng()) + self.assertEqual(encoded1, encoded2) + + def test_error_params1(self): + # Unknown format + self.assertRaises(ValueError, self.ref_private.export_key, format="XXX") + + # Missing 'protection' parameter when PKCS#8 is used + self.ref_private.export_key(format="PEM", passphrase="secret", + use_pkcs8=False) + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="secret") + + # DER format but no PKCS#8 + self.assertRaises(ValueError, self.ref_private.export_key, format="DER", + passphrase="secret", + use_pkcs8=False, + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # Incorrect parameters for public keys + self.assertRaises(ValueError, self.ref_public.export_key, format="DER", + use_pkcs8=False) + + # Empty password + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="", use_pkcs8=False) + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # No private keys with OpenSSH + self.assertRaises(ValueError, self.ref_private.export_key, format="OpenSSH", + passphrase="secret") + + def test_compressed_curve(self): + + # Compressed P-521 curve (Y-point is even) + # openssl ecparam -name secp521r1 -genkey -noout -conv_form compressed -out /tmp/a.pem + # openssl ec -in /tmp/a.pem -text -noout + pem1 = """-----BEGIN EC PRIVATE KEY----- +MIHcAgEBBEIAnm1CEjVjvNfXEN730p+D6su5l+mOztdc5XmTEoti+s2R4GQ4mAv3 +0zYLvyklvOHw0+yy8d0cyGEJGb8T3ZVKmg2gBwYFK4EEACOhgYkDgYYABAHzjTI1 +ckxQ3Togi0LAxiG0PucdBBBs5oIy3df95xv6SInp70z+4qQ2EltEmdNMssH8eOrl +M5CYdZ6nbcHMVaJUvQEzTrYxvFjOgJiOd+E9eBWbLkbMNqsh1UKVO6HbMbW0ohCI +uGxO8tM6r3w89/qzpG2SvFM/fvv3mIR30wSZDD84qA== +-----END EC PRIVATE KEY-----""" + + # Compressed P-521 curve (Y-point is odd) + pem2 = """-----BEGIN EC PRIVATE KEY----- +MIHcAgEBBEIB84OfhJluLBRLn3+cC/RQ37C2SfQVP/t0gQK2tCsTf5avRcWYRrOJ +PmX9lNnkC0Hobd75QFRmdxrB0Wd1/M4jZOWgBwYFK4EEACOhgYkDgYYABAAMZcdJ +1YLCGHt3bHCEzdidVy6+brlJIbv1aQ9fPQLF7WKNv4c8w3H8d5a2+SDZilBOsk5c +6cNJDMz2ExWQvxl4CwDJtJGt1+LHVKFGy73NANqVxMbRu+2F8lOxkNp/ziFTbVyV +vv6oYkMIIi7r5oQWAiQDrR2mlrrFDL9V7GH/r8SWQw== +-----END EC PRIVATE KEY-----""" + + key1 = ECC.import_key(pem1) + low16 = int(key1.pointQ.y % 65536) + self.assertEqual(low16, 0x38a8) + + key2 = ECC.import_key(pem2) + low16 = int(key2.pointQ.y % 65536) + self.assertEqual(low16, 0x9643) + + +class TestImport_Ed25519(unittest.TestCase): + + def __init__(self, *args, **kwargs): + super(TestImport_Ed25519, self).__init__(*args, **kwargs) + self.ref_private, self.ref_public = create_ref_keys_ed25519() + + def test_import_public_der(self): + key_file = load_file("ecc_ed25519_public.der") + + key = ECC._import_subjectPublicKeyInfo(key_file) + self.assertEqual(self.ref_public, key) + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_pkcs8_der(self): + key_file = load_file("ecc_ed25519_private.der") + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_1(self): + key_file = load_file("ecc_ed25519_private_p8.der") + + key = ECC._import_der(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_2(self): + key_file = load_file("ecc_ed25519_private_p8.pem") + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_3(self): + key_file = load_file("ecc_ed25519_private_p8_2.der") + + key = ECC._import_der(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_x509_der(self): + key_file = load_file("ecc_ed25519_x509.der") + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_public_pem(self): + key_file = load_file("ecc_ed25519_public.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_private_pem(self): + key_file = load_file("ecc_ed25519_private.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pem_encrypted(self): + for algo in "des3", "aes128", "aes192", "aes256": + key_file = load_file("ecc_ed25519_private_enc_%s.pem" % algo) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(tostr(key_file), b"secret") + self.assertEqual(self.ref_private, key) + + def test_import_x509_pem(self): + key_file = load_file("ecc_ed25519_x509.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_openssh_public(self): + key_file = load_file("ecc_ed25519_public_openssh.txt") + key = ECC._import_openssh_public(key_file) + self.assertFalse(key.has_private()) + key = ECC.import_key(key_file) + self.assertFalse(key.has_private()) + + def test_import_openssh_private_clear(self): + key_file = load_file("ecc_ed25519_private_openssh.pem") + key = ECC.import_key(key_file) + + def test_import_openssh_private_password(self): + key_file = load_file("ecc_ed25519_private_openssh_pwd.pem") + key = ECC.import_key(key_file, b"password") + + +class TestExport_Ed25519(unittest.TestCase): + + def __init__(self, *args, **kwargs): + super(TestExport_Ed25519, self).__init__(*args, **kwargs) + self.ref_private, self.ref_public = create_ref_keys_ed25519() + + def test_export_public_der(self): + key_file = load_file("ecc_ed25519_public.der") + + encoded = self.ref_public._export_subjectPublicKeyInfo(True) + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="DER") + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="DER", compress=False) + self.assertEqual(key_file, encoded) + + def test_export_public_sec1(self): + self.assertRaises(ValueError, self.ref_public.export_key, format="SEC1") + + def test_export_private_pkcs8_clear(self): + key_file = load_file("ecc_ed25519_private.der") + + encoded = self.ref_private._export_pkcs8() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="DER") + self.assertEqual(key_file, encoded) + + self.assertRaises(ValueError, self.ref_private.export_key, + format="DER", use_pkcs8=False) + + def test_export_private_pkcs8_encrypted(self): + encoded = self.ref_private._export_pkcs8(passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC._import_pkcs8, encoded, None) + + decoded = ECC._import_pkcs8(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="DER", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="DER", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA256AndAES128-CBC", + prot_params={'iteration_count':123}) + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_export_public_pem(self): + key_file_ref = load_file("ecc_ed25519_public.pem", "rt").strip() + key_file = self.ref_public.export_key(format="PEM").strip() + self.assertEqual(key_file_ref, key_file) + + def test_export_private_pem_clear(self): + key_file = load_file("ecc_ed25519_private.pem", "rt").strip() + encoded = self.ref_private.export_key(format="PEM").strip() + self.assertEqual(key_file, encoded) + + def test_export_private_pem_encrypted(self): + encoded = self.ref_private.export_key(format="PEM", + passphrase=b"secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC.import_key, encoded) + + assert "ENCRYPTED PRIVATE KEY" in encoded + + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_export_openssh(self): + key_file = load_file("ecc_ed25519_public_openssh.txt", "rt") + public_key = ECC.import_key(key_file) + key_file = " ".join(key_file.split(' ')[:2]) # remove comment + + encoded = public_key._export_openssh(False) + self.assertEqual(key_file, encoded.strip()) + + encoded = public_key.export_key(format="OpenSSH") + self.assertEqual(key_file, encoded.strip()) + + def test_export_raw(self): + encoded = self.ref_public.export_key(format='raw') + self.assertEqual(encoded, unhexlify(b'bc85b8cf585d20a4de47e84d1cb6183f63d9ba96223fcbc886e363ffdea20cff')) + + def test_prng(self): + # Test that password-protected containers use the provided PRNG + encoded1 = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", + randfunc=get_fixed_prng()) + self.assertEqual(encoded1, encoded2) + + def test_byte_or_string_passphrase(self): + encoded1 = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + passphrase=b"secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", + randfunc=get_fixed_prng()) + self.assertEqual(encoded1, encoded2) + + def test_error_params1(self): + # Unknown format + self.assertRaises(ValueError, self.ref_private.export_key, format="XXX") + + # Missing 'protection' parameter when PKCS#8 is used + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="secret") + + # Empty password + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="", use_pkcs8=False) + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # No private keys with OpenSSH + self.assertRaises(ValueError, self.ref_private.export_key, format="OpenSSH", + passphrase="secret") + + +class TestImport_Ed448(unittest.TestCase): + + def __init__(self, *args, **kwargs): + super(TestImport_Ed448, self).__init__(*args, **kwargs) + self.ref_private, self.ref_public = create_ref_keys_ed448() + + def test_import_public_der(self): + key_file = load_file("ecc_ed448_public.der") + + key = ECC._import_subjectPublicKeyInfo(key_file) + self.assertEqual(self.ref_public, key) + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_pkcs8_der(self): + key_file = load_file("ecc_ed448_private.der") + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_1(self): + key_file = load_file("ecc_ed448_private_p8.der") + + key = ECC._import_der(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_2(self): + key_file = load_file("ecc_ed448_private_p8.pem") + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_private_pkcs8_encrypted_3(self): + key_file = load_file("ecc_ed448_private_p8_2.der") + + key = ECC._import_der(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + def test_import_x509_der(self): + key_file = load_file("ecc_ed448_x509.der") + + key = ECC._import_der(key_file, None) + self.assertEqual(self.ref_public, key) + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_public_pem(self): + key_file = load_file("ecc_ed448_public.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + def test_import_private_pem(self): + key_file = load_file("ecc_ed448_private.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_private, key) + + def test_import_private_pem_encrypted(self): + for algo in "des3", "aes128", "aes192", "aes256": + key_file = load_file("ecc_ed448_private_enc_%s.pem" % algo) + + key = ECC.import_key(key_file, "secret") + self.assertEqual(self.ref_private, key) + + key = ECC.import_key(tostr(key_file), b"secret") + self.assertEqual(self.ref_private, key) + + def test_import_x509_pem(self): + key_file = load_file("ecc_ed448_x509.pem") + + key = ECC.import_key(key_file) + self.assertEqual(self.ref_public, key) + + +class TestExport_Ed448(unittest.TestCase): + + def __init__(self, *args, **kwargs): + super(TestExport_Ed448, self).__init__(*args, **kwargs) + self.ref_private, self.ref_public = create_ref_keys_ed448() + + def test_export_public_der(self): + key_file = load_file("ecc_ed448_public.der") + + encoded = self.ref_public._export_subjectPublicKeyInfo(True) + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="DER") + self.assertEqual(key_file, encoded) + + encoded = self.ref_public.export_key(format="DER", compress=False) + self.assertEqual(key_file, encoded) + + def test_export_public_sec1(self): + self.assertRaises(ValueError, self.ref_public.export_key, format="SEC1") + + def test_export_private_pkcs8_clear(self): + key_file = load_file("ecc_ed448_private.der") + + encoded = self.ref_private._export_pkcs8() + self.assertEqual(key_file, encoded) + + # --- + + encoded = self.ref_private.export_key(format="DER") + self.assertEqual(key_file, encoded) + + self.assertRaises(ValueError, self.ref_private.export_key, + format="DER", use_pkcs8=False) + + def test_export_private_pkcs8_encrypted(self): + encoded = self.ref_private._export_pkcs8(passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC._import_pkcs8, encoded, None) + + decoded = ECC._import_pkcs8(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="DER", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + # --- + + encoded = self.ref_private.export_key(format="DER", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA384AndAES128-CBC", + prot_params={'iteration_count':123}) + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + + def test_export_public_pem(self): + key_file_ref = load_file("ecc_ed448_public.pem", "rt").strip() + key_file = self.ref_public.export_key(format="PEM").strip() + self.assertEqual(key_file_ref, key_file) + + def test_export_private_pem_clear(self): + key_file = load_file("ecc_ed448_private.pem", "rt").strip() + encoded = self.ref_private.export_key(format="PEM").strip() + self.assertEqual(key_file, encoded) + + def test_export_private_pem_encrypted(self): + encoded = self.ref_private.export_key(format="PEM", + passphrase=b"secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # This should prove that the output is password-protected + self.assertRaises(ValueError, ECC.import_key, encoded) + + assert "ENCRYPTED PRIVATE KEY" in encoded + + decoded = ECC.import_key(encoded, "secret") + self.assertEqual(self.ref_private, decoded) + + def test_export_openssh(self): + # Not supported + self.assertRaises(ValueError, self.ref_public.export_key, format="OpenSSH") + + def test_export_raw(self): + encoded = self.ref_public.export_key(format='raw') + self.assertEqual(encoded, unhexlify(b'899014ddc0a0e1260cfc1085afdf952019e9fd63372e3e366e26dad32b176624884330a14617237e3081febd9d1a15069e7499433d2f55dd80')) + + def test_prng(self): + # Test that password-protected containers use the provided PRNG + encoded1 = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", + randfunc=get_fixed_prng()) + self.assertEqual(encoded1, encoded2) + + def test_byte_or_string_passphrase(self): + encoded1 = self.ref_private.export_key(format="PEM", + passphrase="secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", + randfunc=get_fixed_prng()) + encoded2 = self.ref_private.export_key(format="PEM", + passphrase=b"secret", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC", + randfunc=get_fixed_prng()) + self.assertEqual(encoded1, encoded2) + + def test_error_params1(self): + # Unknown format + self.assertRaises(ValueError, self.ref_private.export_key, format="XXX") + + # Missing 'protection' parameter when PKCS#8 is used + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="secret") + + # Empty password + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="", use_pkcs8=False) + self.assertRaises(ValueError, self.ref_private.export_key, format="PEM", + passphrase="", + protection="PBKDF2WithHMAC-SHA1AndAES128-CBC") + + # No private keys with OpenSSH + self.assertRaises(ValueError, self.ref_private.export_key, format="OpenSSH", + passphrase="secret") + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(TestImport) + try: + tests += list_test_cases(TestImport_P192) + tests += list_test_cases(TestImport_P224) + tests += list_test_cases(TestImport_P256) + tests += list_test_cases(TestImport_P384) + tests += list_test_cases(TestImport_P521) + tests += list_test_cases(TestImport_Ed25519) + tests += list_test_cases(TestImport_Ed448) + + tests += list_test_cases(TestExport_P192) + tests += list_test_cases(TestExport_P224) + tests += list_test_cases(TestExport_P256) + tests += list_test_cases(TestExport_P384) + tests += list_test_cases(TestExport_P521) + tests += list_test_cases(TestExport_Ed25519) + tests += list_test_cases(TestExport_Ed448) + + except MissingTestVectorException: + pass + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/test_import_RSA.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/test_import_RSA.py new file mode 100644 index 000000000..59a9c6d0f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/PublicKey/test_import_RSA.py @@ -0,0 +1,629 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/PublicKey/test_importKey.py: Self-test for importing RSA keys +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +import os +import re +import errno +import warnings +import unittest + +from Crypto.PublicKey import RSA +from Crypto.SelfTest.st_common import a2b_hex, list_test_cases +from Crypto.IO import PEM +from Crypto.Util.py3compat import b, tostr, FileNotFoundError +from Crypto.Util.number import inverse, bytes_to_long +from Crypto.Util import asn1 + +try: + import pycryptodome_test_vectors # type: ignore + test_vectors_available = True +except ImportError: + test_vectors_available = False + + +def load_file(file_name, mode="rb"): + results = None + + try: + if not test_vectors_available: + raise FileNotFoundError(errno.ENOENT, + os.strerror(errno.ENOENT), + file_name) + + dir_comps = ("PublicKey", "RSA") + init_dir = os.path.dirname(pycryptodome_test_vectors.__file__) + full_file_name = os.path.join(os.path.join(init_dir, *dir_comps), file_name) + with open(full_file_name, mode) as file_in: + results = file_in.read() + + except FileNotFoundError: + warnings.warn("Warning: skipping extended tests for RSA", + UserWarning, + stacklevel=2) + + return results + + +def der2pem(der, text='PUBLIC'): + import binascii + chunks = [binascii.b2a_base64(der[i:i+48]) for i in range(0, len(der), 48)] + pem = b('-----BEGIN %s KEY-----\n' % text) + pem += b('').join(chunks) + pem += b('-----END %s KEY-----' % text) + return pem + + +class ImportKeyTests(unittest.TestCase): + # 512-bit RSA key generated with openssl + rsaKeyPEM = u'''-----BEGIN RSA PRIVATE KEY----- +MIIBOwIBAAJBAL8eJ5AKoIsjURpcEoGubZMxLD7+kT+TLr7UkvEtFrRhDDKMtuII +q19FrL4pUIMymPMSLBn3hJLe30Dw48GQM4UCAwEAAQJACUSDEp8RTe32ftq8IwG8 +Wojl5mAd1wFiIOrZ/Uv8b963WJOJiuQcVN29vxU5+My9GPZ7RA3hrDBEAoHUDPrI +OQIhAPIPLz4dphiD9imAkivY31Rc5AfHJiQRA7XixTcjEkojAiEAyh/pJHks/Mlr ++rdPNEpotBjfV4M4BkgGAA/ipcmaAjcCIQCHvhwwKVBLzzTscT2HeUdEeBMoiXXK +JACAr3sJQJGxIQIgarRp+m1WSKV1MciwMaTOnbU7wxFs9DP1pva76lYBzgUCIQC9 +n0CnZCJ6IZYqSt0H5N7+Q+2Ro64nuwV/OSQfM6sBwQ== +-----END RSA PRIVATE KEY-----''' + + # As above, but this is actually an unencrypted PKCS#8 key + rsaKeyPEM8 = u'''-----BEGIN PRIVATE KEY----- +MIIBVQIBADANBgkqhkiG9w0BAQEFAASCAT8wggE7AgEAAkEAvx4nkAqgiyNRGlwS +ga5tkzEsPv6RP5MuvtSS8S0WtGEMMoy24girX0WsvilQgzKY8xIsGfeEkt7fQPDj +wZAzhQIDAQABAkAJRIMSnxFN7fZ+2rwjAbxaiOXmYB3XAWIg6tn9S/xv3rdYk4mK +5BxU3b2/FTn4zL0Y9ntEDeGsMEQCgdQM+sg5AiEA8g8vPh2mGIP2KYCSK9jfVFzk +B8cmJBEDteLFNyMSSiMCIQDKH+kkeSz8yWv6t080Smi0GN9XgzgGSAYAD+KlyZoC +NwIhAIe+HDApUEvPNOxxPYd5R0R4EyiJdcokAICvewlAkbEhAiBqtGn6bVZIpXUx +yLAxpM6dtTvDEWz0M/Wm9rvqVgHOBQIhAL2fQKdkInohlipK3Qfk3v5D7ZGjrie7 +BX85JB8zqwHB +-----END PRIVATE KEY-----''' + + # The same RSA private key as in rsaKeyPEM, but now encrypted + rsaKeyEncryptedPEM = ( + + # PEM encryption + # With DES and passphrase 'test' + ('test', u'''-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: DES-CBC,AF8F9A40BD2FA2FC + +Ckl9ex1kaVEWhYC2QBmfaF+YPiR4NFkRXA7nj3dcnuFEzBnY5XULupqQpQI3qbfA +u8GYS7+b3toWWiHZivHbAAUBPDIZG9hKDyB9Sq2VMARGsX1yW1zhNvZLIiVJzUHs +C6NxQ1IJWOXzTew/xM2I26kPwHIvadq+/VaT8gLQdjdH0jOiVNaevjWnLgrn1mLP +BCNRMdcexozWtAFNNqSzfW58MJL2OdMi21ED184EFytIc1BlB+FZiGZduwKGuaKy +9bMbdb/1PSvsSzPsqW7KSSrTw6MgJAFJg6lzIYvR5F4poTVBxwBX3+EyEmShiaNY +IRX3TgQI0IjrVuLmvlZKbGWP18FXj7I7k9tSsNOOzllTTdq3ny5vgM3A+ynfAaxp +dysKznQ6P+IoqML1WxAID4aGRMWka+uArOJ148Rbj9s= +-----END RSA PRIVATE KEY-----'''), + + # PKCS8 encryption + ('winter', u'''-----BEGIN ENCRYPTED PRIVATE KEY----- +MIIBpjBABgkqhkiG9w0BBQ0wMzAbBgkqhkiG9w0BBQwwDgQIeZIsbW3O+JcCAggA +MBQGCCqGSIb3DQMHBAgSM2p0D8FilgSCAWBhFyP2tiGKVpGj3mO8qIBzinU60ApR +3unvP+N6j7LVgnV2lFGaXbJ6a1PbQXe+2D6DUyBLo8EMXrKKVLqOMGkFMHc0UaV6 +R6MmrsRDrbOqdpTuVRW+NVd5J9kQQh4xnfU/QrcPPt7vpJvSf4GzG0n666Ki50OV +M/feuVlIiyGXY6UWdVDpcOV72cq02eNUs/1JWdh2uEBvA9fCL0c07RnMrdT+CbJQ +NjJ7f8ULtp7xvR9O3Al/yJ4Wv3i4VxF1f3MCXzhlUD4I0ONlr0kJWgeQ80q/cWhw +ntvgJwnCn2XR1h6LA8Wp+0ghDTsL2NhJpWd78zClGhyU4r3hqu1XDjoXa7YCXCix +jCV15+ViDJzlNCwg+W6lRg18sSLkCT7alviIE0U5tHc6UPbbHwT5QqAxAABaP+nZ +CGqJGyiwBzrKebjgSm/KRd4C91XqcsysyH2kKPfT51MLAoD4xelOURBP +-----END ENCRYPTED PRIVATE KEY-----''' + ), + ) + + rsaPublicKeyPEM = u'''-----BEGIN PUBLIC KEY----- +MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAL8eJ5AKoIsjURpcEoGubZMxLD7+kT+T +Lr7UkvEtFrRhDDKMtuIIq19FrL4pUIMymPMSLBn3hJLe30Dw48GQM4UCAwEAAQ== +-----END PUBLIC KEY-----''' + + # Obtained using 'ssh-keygen -i -m PKCS8 -f rsaPublicKeyPEM' + rsaPublicKeyOpenSSH = b('''ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAQQC/HieQCqCLI1EaXBKBrm2TMSw+/pE/ky6+1JLxLRa0YQwyjLbiCKtfRay+KVCDMpjzEiwZ94SS3t9A8OPBkDOF comment\n''') + + # The private key, in PKCS#1 format encoded with DER + rsaKeyDER = a2b_hex( + '''3082013b020100024100bf1e27900aa08b23511a5c1281ae6d93312c3efe + 913f932ebed492f12d16b4610c328cb6e208ab5f45acbe2950833298f312 + 2c19f78492dedf40f0e3c190338502030100010240094483129f114dedf6 + 7edabc2301bc5a88e5e6601dd7016220ead9fd4bfc6fdeb75893898ae41c + 54ddbdbf1539f8ccbd18f67b440de1ac30440281d40cfac839022100f20f + 2f3e1da61883f62980922bd8df545ce407c726241103b5e2c53723124a23 + 022100ca1fe924792cfcc96bfab74f344a68b418df578338064806000fe2 + a5c99a023702210087be1c3029504bcf34ec713d877947447813288975ca + 240080af7b094091b12102206ab469fa6d5648a57531c8b031a4ce9db53b + c3116cf433f5a6f6bbea5601ce05022100bd9f40a764227a21962a4add07 + e4defe43ed91a3ae27bb057f39241f33ab01c1 + '''.replace(" ","")) + + # The private key, in unencrypted PKCS#8 format encoded with DER + rsaKeyDER8 = a2b_hex( + '''30820155020100300d06092a864886f70d01010105000482013f3082013 + b020100024100bf1e27900aa08b23511a5c1281ae6d93312c3efe913f932 + ebed492f12d16b4610c328cb6e208ab5f45acbe2950833298f3122c19f78 + 492dedf40f0e3c190338502030100010240094483129f114dedf67edabc2 + 301bc5a88e5e6601dd7016220ead9fd4bfc6fdeb75893898ae41c54ddbdb + f1539f8ccbd18f67b440de1ac30440281d40cfac839022100f20f2f3e1da + 61883f62980922bd8df545ce407c726241103b5e2c53723124a23022100c + a1fe924792cfcc96bfab74f344a68b418df578338064806000fe2a5c99a0 + 23702210087be1c3029504bcf34ec713d877947447813288975ca240080a + f7b094091b12102206ab469fa6d5648a57531c8b031a4ce9db53bc3116cf + 433f5a6f6bbea5601ce05022100bd9f40a764227a21962a4add07e4defe4 + 3ed91a3ae27bb057f39241f33ab01c1 + '''.replace(" ","")) + + rsaPublicKeyDER = a2b_hex( + '''305c300d06092a864886f70d0101010500034b003048024100bf1e27900a + a08b23511a5c1281ae6d93312c3efe913f932ebed492f12d16b4610c328c + b6e208ab5f45acbe2950833298f3122c19f78492dedf40f0e3c190338502 + 03010001 + '''.replace(" ","")) + + n = int('BF 1E 27 90 0A A0 8B 23 51 1A 5C 12 81 AE 6D 93 31 2C 3E FE 91 3F 93 2E BE D4 92 F1 2D 16 B4 61 0C 32 8C B6 E2 08 AB 5F 45 AC BE 29 50 83 32 98 F3 12 2C 19 F7 84 92 DE DF 40 F0 E3 C1 90 33 85'.replace(" ",""),16) + e = 65537 + d = int('09 44 83 12 9F 11 4D ED F6 7E DA BC 23 01 BC 5A 88 E5 E6 60 1D D7 01 62 20 EA D9 FD 4B FC 6F DE B7 58 93 89 8A E4 1C 54 DD BD BF 15 39 F8 CC BD 18 F6 7B 44 0D E1 AC 30 44 02 81 D4 0C FA C8 39'.replace(" ",""),16) + p = int('00 F2 0F 2F 3E 1D A6 18 83 F6 29 80 92 2B D8 DF 54 5C E4 07 C7 26 24 11 03 B5 E2 C5 37 23 12 4A 23'.replace(" ",""),16) + q = int('00 CA 1F E9 24 79 2C FC C9 6B FA B7 4F 34 4A 68 B4 18 DF 57 83 38 06 48 06 00 0F E2 A5 C9 9A 02 37'.replace(" ",""),16) + + # This is q^{-1} mod p). fastmath and slowmath use pInv (p^{-1} + # mod q) instead! + qInv = int('00 BD 9F 40 A7 64 22 7A 21 96 2A 4A DD 07 E4 DE FE 43 ED 91 A3 AE 27 BB 05 7F 39 24 1F 33 AB 01 C1'.replace(" ",""),16) + pInv = inverse(p,q) + + def testImportKey1(self): + """Verify import of RSAPrivateKey DER SEQUENCE""" + key = RSA.importKey(self.rsaKeyDER) + self.assertTrue(key.has_private()) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def testImportKey2(self): + """Verify import of SubjectPublicKeyInfo DER SEQUENCE""" + key = RSA.importKey(self.rsaPublicKeyDER) + self.assertFalse(key.has_private()) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + + def testImportKey3unicode(self): + """Verify import of RSAPrivateKey DER SEQUENCE, encoded with PEM as unicode""" + key = RSA.importKey(self.rsaKeyPEM) + self.assertEqual(key.has_private(),True) # assert_ + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def testImportKey3bytes(self): + """Verify import of RSAPrivateKey DER SEQUENCE, encoded with PEM as byte string""" + key = RSA.importKey(b(self.rsaKeyPEM)) + self.assertEqual(key.has_private(),True) # assert_ + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def testImportKey4unicode(self): + """Verify import of RSAPrivateKey DER SEQUENCE, encoded with PEM as unicode""" + key = RSA.importKey(self.rsaPublicKeyPEM) + self.assertEqual(key.has_private(),False) # assertFalse + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + + def testImportKey4bytes(self): + """Verify import of SubjectPublicKeyInfo DER SEQUENCE, encoded with PEM as byte string""" + key = RSA.importKey(b(self.rsaPublicKeyPEM)) + self.assertEqual(key.has_private(),False) # assertFalse + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + + def testImportKey5(self): + """Verifies that the imported key is still a valid RSA pair""" + key = RSA.importKey(self.rsaKeyPEM) + idem = key._encrypt(key._decrypt(89)) + self.assertEqual(idem, 89) + + def testImportKey6(self): + """Verifies that the imported key is still a valid RSA pair""" + key = RSA.importKey(self.rsaKeyDER) + idem = key._encrypt(key._decrypt(65)) + self.assertEqual(idem, 65) + + def testImportKey7(self): + """Verify import of OpenSSH public key""" + key = RSA.importKey(self.rsaPublicKeyOpenSSH) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + + def testImportKey8(self): + """Verify import of encrypted PrivateKeyInfo DER SEQUENCE""" + for t in self.rsaKeyEncryptedPEM: + key = RSA.importKey(t[1], t[0]) + self.assertTrue(key.has_private()) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def testImportKey9(self): + """Verify import of unencrypted PrivateKeyInfo DER SEQUENCE""" + key = RSA.importKey(self.rsaKeyDER8) + self.assertTrue(key.has_private()) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def testImportKey10(self): + """Verify import of unencrypted PrivateKeyInfo DER SEQUENCE, encoded with PEM""" + key = RSA.importKey(self.rsaKeyPEM8) + self.assertTrue(key.has_private()) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def testImportKey11(self): + """Verify import of RSAPublicKey DER SEQUENCE""" + der = asn1.DerSequence([17, 3]).encode() + key = RSA.importKey(der) + self.assertEqual(key.n, 17) + self.assertEqual(key.e, 3) + + def testImportKey12(self): + """Verify import of RSAPublicKey DER SEQUENCE, encoded with PEM""" + der = asn1.DerSequence([17, 3]).encode() + pem = der2pem(der) + key = RSA.importKey(pem) + self.assertEqual(key.n, 17) + self.assertEqual(key.e, 3) + + def test_import_key_windows_cr_lf(self): + pem_cr_lf = "\r\n".join(self.rsaKeyPEM.splitlines()) + key = RSA.importKey(pem_cr_lf) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def test_import_empty(self): + self.assertRaises(ValueError, RSA.import_key, b"") + + ### + def testExportKey1(self): + key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + derKey = key.export_key("DER") + self.assertEqual(derKey, self.rsaKeyDER) + + def testExportKey2(self): + key = RSA.construct([self.n, self.e]) + derKey = key.export_key("DER") + self.assertEqual(derKey, self.rsaPublicKeyDER) + + def testExportKey3(self): + key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + pemKey = key.export_key("PEM") + self.assertEqual(pemKey, b(self.rsaKeyPEM)) + + def testExportKey4(self): + key = RSA.construct([self.n, self.e]) + pemKey = key.export_key("PEM") + self.assertEqual(pemKey, b(self.rsaPublicKeyPEM)) + + def testExportKey5(self): + key = RSA.construct([self.n, self.e]) + openssh_1 = key.export_key("OpenSSH").split() + openssh_2 = self.rsaPublicKeyOpenSSH.split() + self.assertEqual(openssh_1[0], openssh_2[0]) + self.assertEqual(openssh_1[1], openssh_2[1]) + + def testExportKey7(self): + key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + derKey = key.export_key("DER", pkcs=8) + self.assertEqual(derKey, self.rsaKeyDER8) + + def testExportKey8(self): + key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + pemKey = key.export_key("PEM", pkcs=8) + self.assertEqual(pemKey, b(self.rsaKeyPEM8)) + + def testExportKey9(self): + key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + self.assertRaises(ValueError, key.export_key, "invalid-format") + + def testExportKey10(self): + # Export and re-import the encrypted key. It must match. + # PEM envelope, PKCS#1, old PEM encryption + key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + outkey = key.export_key('PEM', 'test') + self.assertTrue(tostr(outkey).find('4,ENCRYPTED')!=-1) + self.assertTrue(tostr(outkey).find('BEGIN RSA PRIVATE KEY')!=-1) + inkey = RSA.importKey(outkey, 'test') + self.assertEqual(key.n, inkey.n) + self.assertEqual(key.e, inkey.e) + self.assertEqual(key.d, inkey.d) + + def testExportKey11(self): + # Export and re-import the encrypted key. It must match. + # PEM envelope, PKCS#1, old PEM encryption + key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + outkey = key.export_key('PEM', 'test', pkcs=1) + self.assertTrue(tostr(outkey).find('4,ENCRYPTED')!=-1) + self.assertTrue(tostr(outkey).find('BEGIN RSA PRIVATE KEY')!=-1) + inkey = RSA.importKey(outkey, 'test') + self.assertEqual(key.n, inkey.n) + self.assertEqual(key.e, inkey.e) + self.assertEqual(key.d, inkey.d) + + def testExportKey12(self): + # Export and re-import the encrypted key. It must match. + # PEM envelope, PKCS#8, old PEM encryption + key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + outkey = key.export_key('PEM', 'test', pkcs=8) + self.assertTrue(tostr(outkey).find('4,ENCRYPTED')!=-1) + self.assertTrue(tostr(outkey).find('BEGIN PRIVATE KEY')!=-1) + inkey = RSA.importKey(outkey, 'test') + self.assertEqual(key.n, inkey.n) + self.assertEqual(key.e, inkey.e) + self.assertEqual(key.d, inkey.d) + + def testExportKey13(self): + # Export and re-import the encrypted key. It must match. + # PEM envelope, PKCS#8, PKCS#8 encryption + key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + outkey = key.export_key('PEM', 'test', pkcs=8, + protection='PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC') + self.assertTrue(tostr(outkey).find('4,ENCRYPTED')==-1) + self.assertTrue(tostr(outkey).find('BEGIN ENCRYPTED PRIVATE KEY')!=-1) + inkey = RSA.importKey(outkey, 'test') + self.assertEqual(key.n, inkey.n) + self.assertEqual(key.e, inkey.e) + self.assertEqual(key.d, inkey.d) + + def testExportKey14(self): + # Export and re-import the encrypted key. It must match. + # DER envelope, PKCS#8, PKCS#8 encryption + key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + outkey = key.export_key('DER', 'test', pkcs=8) + inkey = RSA.importKey(outkey, 'test') + self.assertEqual(key.n, inkey.n) + self.assertEqual(key.e, inkey.e) + self.assertEqual(key.d, inkey.d) + + def testExportKey15(self): + # Verify that that error an condition is detected when trying to + # use a password with DER encoding and PKCS#1. + key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + self.assertRaises(ValueError, key.export_key, 'DER', 'test', 1) + + def testExportKey16(self): + # Export and re-import the encrypted key. It must match. + # PEM envelope, PKCS#8, PKCS#8 encryption with parameters + key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + outkey = key.export_key('PEM', 'test', pkcs=8, + protection='PBKDF2WithHMAC-SHA512AndAES256-CBC', + prot_params={'iteration_count':123} + ) + self.assertTrue(tostr(outkey).find('4,ENCRYPTED')==-1) + self.assertTrue(tostr(outkey).find('BEGIN ENCRYPTED PRIVATE KEY')!=-1) + + # Verify the iteration count + der = PEM.decode(tostr(outkey))[0] + seq1 = asn1.DerSequence().decode(der) + seq2 = asn1.DerSequence().decode(seq1[0]) + seq3 = asn1.DerSequence().decode(seq2[1]) + seq4 = asn1.DerSequence().decode(seq3[0]) + seq5 = asn1.DerSequence().decode(seq4[1]) + self.assertEqual(seq5[1], 123) + + inkey = RSA.importKey(outkey, 'test') + self.assertEqual(key.n, inkey.n) + self.assertEqual(key.e, inkey.e) + self.assertEqual(key.d, inkey.d) + + def test_import_key(self): + """Verify that import_key is an alias to importKey""" + key = RSA.import_key(self.rsaPublicKeyDER) + self.assertFalse(key.has_private()) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + + def test_import_key_ba_mv(self): + """Verify that import_key can be used on bytearrays and memoryviews""" + key = RSA.import_key(bytearray(self.rsaPublicKeyDER)) + key = RSA.import_key(memoryview(self.rsaPublicKeyDER)) + + def test_exportKey(self): + key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + self.assertEqual(key.export_key(), key.exportKey()) + + +class ImportKeyFromX509Cert(unittest.TestCase): + + def test_x509v1(self): + + # Sample V1 certificate with a 1024 bit RSA key + x509_v1_cert = """ +-----BEGIN CERTIFICATE----- +MIICOjCCAaMCAQEwDQYJKoZIhvcNAQEEBQAwfjENMAsGA1UEChMEQWNtZTELMAkG +A1UECxMCUkQxHDAaBgkqhkiG9w0BCQEWDXNwYW1AYWNtZS5vcmcxEzARBgNVBAcT +Ck1ldHJvcG9saXMxETAPBgNVBAgTCE5ldyBZb3JrMQswCQYDVQQGEwJVUzENMAsG +A1UEAxMEdGVzdDAeFw0xNDA3MTExOTU3MjRaFw0xNzA0MDYxOTU3MjRaME0xCzAJ +BgNVBAYTAlVTMREwDwYDVQQIEwhOZXcgWW9yazENMAsGA1UEChMEQWNtZTELMAkG +A1UECxMCUkQxDzANBgNVBAMTBmxhdHZpYTCBnzANBgkqhkiG9w0BAQEFAAOBjQAw +gYkCgYEAyG+kytdRj3TFbRmHDYp3TXugVQ81chew0qeOxZWOz80IjtWpgdOaCvKW +NCuc8wUR9BWrEQW+39SaRMLiQfQtyFSQZijc3nsEBu/Lo4uWZ0W/FHDRVSvkJA/V +Ex5NL5ikI+wbUeCV5KajGNDalZ8F1pk32+CBs8h1xNx5DyxuEHUCAwEAATANBgkq +hkiG9w0BAQQFAAOBgQCVQF9Y//Q4Psy+umEM38pIlbZ2hxC5xNz/MbVPwuCkNcGn +KYNpQJP+JyVTsPpO8RLZsAQDzRueMI3S7fbbwTzAflN0z19wvblvu93xkaBytVok +9VBAH28olVhy9b1MMeg2WOt5sUEQaFNPnwwsyiY9+HsRpvpRnPSQF+kyYVsshQ== +-----END CERTIFICATE----- + """.strip() + + # RSA public key as dumped by openssl + exponent = 65537 + modulus_str = """ +00:c8:6f:a4:ca:d7:51:8f:74:c5:6d:19:87:0d:8a: +77:4d:7b:a0:55:0f:35:72:17:b0:d2:a7:8e:c5:95: +8e:cf:cd:08:8e:d5:a9:81:d3:9a:0a:f2:96:34:2b: +9c:f3:05:11:f4:15:ab:11:05:be:df:d4:9a:44:c2: +e2:41:f4:2d:c8:54:90:66:28:dc:de:7b:04:06:ef: +cb:a3:8b:96:67:45:bf:14:70:d1:55:2b:e4:24:0f: +d5:13:1e:4d:2f:98:a4:23:ec:1b:51:e0:95:e4:a6: +a3:18:d0:da:95:9f:05:d6:99:37:db:e0:81:b3:c8: +75:c4:dc:79:0f:2c:6e:10:75 + """ + modulus = int(re.sub("[^0-9a-f]","", modulus_str), 16) + + key = RSA.importKey(x509_v1_cert) + self.assertEqual(key.e, exponent) + self.assertEqual(key.n, modulus) + self.assertFalse(key.has_private()) + + def test_x509v3(self): + + # Sample V3 certificate with a 1024 bit RSA key + x509_v3_cert = """ +-----BEGIN CERTIFICATE----- +MIIEcjCCAlqgAwIBAgIBATANBgkqhkiG9w0BAQsFADBhMQswCQYDVQQGEwJVUzEL +MAkGA1UECAwCTUQxEjAQBgNVBAcMCUJhbHRpbW9yZTEQMA4GA1UEAwwHVGVzdCBD +QTEfMB0GCSqGSIb3DQEJARYQdGVzdEBleGFtcGxlLmNvbTAeFw0xNDA3MTIwOTM1 +MTJaFw0xNzA0MDcwOTM1MTJaMEQxCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJNRDES +MBAGA1UEBwwJQmFsdGltb3JlMRQwEgYDVQQDDAtUZXN0IFNlcnZlcjCBnzANBgkq +hkiG9w0BAQEFAAOBjQAwgYkCgYEA/S7GJV2OcFdyNMQ4K75KrYFtMEn3VnEFdPHa +jyS37XlMxSh0oS4GeTGVUCJInl5Cpsv8WQdh03FfeOdvzp5IZ46OcjeOPiWnmjgl +2G5j7e2bDH7RSchGV+OD6Fb1Agvuu2/9iy8fdf3rPQ/7eAddzKUrzwacVbnW+tg2 +QtSXKRcCAwEAAaOB1TCB0jAdBgNVHQ4EFgQU/WwCX7FfWMIPDFfJ+I8a2COG+l8w +HwYDVR0jBBgwFoAUa0hkif3RMaraiWtsOOZZlLu9wJwwCQYDVR0TBAIwADALBgNV +HQ8EBAMCBeAwSgYDVR0RBEMwQYILZXhhbXBsZS5jb22CD3d3dy5leGFtcGxlLmNv +bYIQbWFpbC5leGFtcGxlLmNvbYIPZnRwLmV4YW1wbGUuY29tMCwGCWCGSAGG+EIB +DQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTANBgkqhkiG9w0BAQsF +AAOCAgEAvO6xfdsGbnoK4My3eJthodTAjMjPwFVY133LH04QLcCv54TxKhtUg1fi +PgdjVe1HpTytPBfXy2bSZbXAN0abZCtw1rYrnn7o1g2pN8iypVq3zVn0iMTzQzxs +zEPO3bpR/UhNSf90PmCsS5rqZpAAnXSaAy1ClwHWk/0eG2pYkhE1m1ABVMN2lsAW +e9WxGk6IFqaI9O37NYQwmEypMs4DC+ECJEvbPFiqi3n0gbXCZJJ6omDA5xJldaYK +Oa7KR3s/qjBsu9UAiWpLBuFoSTHIF2aeRKRFmUdmzwo43eVPep65pY6eQ4AdL2RF +rqEuINbGlzI5oQyYhu71IwB+iPZXaZZPlwjLgOsuad/p2hOgDb5WxUi8FnDPursQ +ujfpIpmrOP/zpvvQWnwePI3lI+5n41kTBSbefXEdv6rXpHk3QRzB90uPxnXPdxSC +16ASA8bQT5an/1AgoE3k9CrcD2K0EmgaX0YI0HUhkyzbkg34EhpWJ6vvRUbRiNRo +9cIbt/ya9Y9u0Ja8GLXv6dwX0l0IdJMkL8KifXUFAVCujp1FBrr/gdmwQn8itANy ++qbnWSxmOvtaY0zcaFAcONuHva0h51/WqXOMO1eb8PhR4HIIYU8p1oBwQp7dSni8 +THDi1F+GG5PsymMDj5cWK42f+QzjVw5PrVmFqqrrEoMlx8DWh5Y= +-----END CERTIFICATE----- +""".strip() + + # RSA public key as dumped by openssl + exponent = 65537 + modulus_str = """ +00:fd:2e:c6:25:5d:8e:70:57:72:34:c4:38:2b:be: +4a:ad:81:6d:30:49:f7:56:71:05:74:f1:da:8f:24: +b7:ed:79:4c:c5:28:74:a1:2e:06:79:31:95:50:22: +48:9e:5e:42:a6:cb:fc:59:07:61:d3:71:5f:78:e7: +6f:ce:9e:48:67:8e:8e:72:37:8e:3e:25:a7:9a:38: +25:d8:6e:63:ed:ed:9b:0c:7e:d1:49:c8:46:57:e3: +83:e8:56:f5:02:0b:ee:bb:6f:fd:8b:2f:1f:75:fd: +eb:3d:0f:fb:78:07:5d:cc:a5:2b:cf:06:9c:55:b9: +d6:fa:d8:36:42:d4:97:29:17 + """ + modulus = int(re.sub("[^0-9a-f]","", modulus_str), 16) + + key = RSA.importKey(x509_v3_cert) + self.assertEqual(key.e, exponent) + self.assertEqual(key.n, modulus) + self.assertFalse(key.has_private()) + + +class TestImport_2048(unittest.TestCase): + + def test_import_openssh_public(self): + key_file_ref = load_file("rsa2048_private.pem") + key_file = load_file("rsa2048_public_openssh.txt") + + # Skip test if test vectors are not installed + if None in (key_file_ref, key_file): + return + + key_ref = RSA.import_key(key_file_ref).public_key() + key = RSA.import_key(key_file) + self.assertEqual(key_ref, key) + + def test_import_openssh_private_clear(self): + key_file = load_file("rsa2048_private_openssh.pem") + key_file_old = load_file("rsa2048_private_openssh_old.pem") + + # Skip test if test vectors are not installed + if None in (key_file_old, key_file): + return + + key = RSA.import_key(key_file) + key_old = RSA.import_key(key_file_old) + + self.assertEqual(key, key_old) + + def test_import_openssh_private_password(self): + key_file = load_file("rsa2048_private_openssh_pwd.pem") + key_file_old = load_file("rsa2048_private_openssh_pwd_old.pem") + + # Skip test if test vectors are not installed + if None in (key_file_old, key_file): + return + + key = RSA.import_key(key_file, b"password") + key_old = RSA.import_key(key_file_old) + self.assertEqual(key, key_old) + + def test_import_pkcs8_private(self): + key_file_ref = load_file("rsa2048_private.pem") + key_file = load_file("rsa2048_private_p8.der") + + # Skip test if test vectors are not installed + if None in (key_file_ref, key_file): + return + + key_ref = RSA.import_key(key_file_ref) + key = RSA.import_key(key_file, b'secret') + self.assertEqual(key_ref, key) + + + +if __name__ == '__main__': + unittest.main() + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(ImportKeyTests) + tests += list_test_cases(ImportKeyFromX509Cert) + tests += list_test_cases(TestImport_2048) + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Random/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Random/__init__.py new file mode 100644 index 000000000..53061cc05 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Random/__init__.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Random/__init__.py: Self-test for random number generation modules +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for random number generators""" + +__revision__ = "$Id$" + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.Random import test_random; tests += test_random.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Random/test_random.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Random/test_random.py new file mode 100644 index 000000000..8fadc535a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Random/test_random.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_generic.py: Self-test for the Crypto.Random.new() function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Random.new()""" + +import sys +import unittest +from Crypto.Util.py3compat import b + +class SimpleTest(unittest.TestCase): + def runTest(self): + """Crypto.Random.new()""" + # Import the Random module and try to use it + from Crypto import Random + randobj = Random.new() + x = randobj.read(16) + y = randobj.read(16) + self.assertNotEqual(x, y) + z = Random.get_random_bytes(16) + self.assertNotEqual(x, z) + self.assertNotEqual(y, z) + # Test the Random.random module, which + # implements a subset of Python's random API + # Not implemented: + # seed(), getstate(), setstate(), jumpahead() + # random(), uniform(), triangular(), betavariate() + # expovariate(), gammavariate(), gauss(), + # longnormvariate(), normalvariate(), + # vonmisesvariate(), paretovariate() + # weibullvariate() + # WichmannHill(), whseed(), SystemRandom() + from Crypto.Random import random + x = random.getrandbits(16*8) + y = random.getrandbits(16*8) + self.assertNotEqual(x, y) + # Test randrange + if x>y: + start = y + stop = x + else: + start = x + stop = y + for step in range(1,10): + x = random.randrange(start,stop,step) + y = random.randrange(start,stop,step) + self.assertNotEqual(x, y) + self.assertEqual(start <= x < stop, True) + self.assertEqual(start <= y < stop, True) + self.assertEqual((x - start) % step, 0) + self.assertEqual((y - start) % step, 0) + for i in range(10): + self.assertEqual(random.randrange(1,2), 1) + self.assertRaises(ValueError, random.randrange, start, start) + self.assertRaises(ValueError, random.randrange, stop, start, step) + self.assertRaises(TypeError, random.randrange, start, stop, step, step) + self.assertRaises(TypeError, random.randrange, start, stop, "1") + self.assertRaises(TypeError, random.randrange, "1", stop, step) + self.assertRaises(TypeError, random.randrange, 1, "2", step) + self.assertRaises(ValueError, random.randrange, start, stop, 0) + # Test randint + x = random.randint(start,stop) + y = random.randint(start,stop) + self.assertNotEqual(x, y) + self.assertEqual(start <= x <= stop, True) + self.assertEqual(start <= y <= stop, True) + for i in range(10): + self.assertEqual(random.randint(1,1), 1) + self.assertRaises(ValueError, random.randint, stop, start) + self.assertRaises(TypeError, random.randint, start, stop, step) + self.assertRaises(TypeError, random.randint, "1", stop) + self.assertRaises(TypeError, random.randint, 1, "2") + # Test choice + seq = range(10000) + x = random.choice(seq) + y = random.choice(seq) + self.assertNotEqual(x, y) + self.assertEqual(x in seq, True) + self.assertEqual(y in seq, True) + for i in range(10): + self.assertEqual(random.choice((1,2,3)) in (1,2,3), True) + self.assertEqual(random.choice([1,2,3]) in [1,2,3], True) + if sys.version_info[0] == 3: + self.assertEqual(random.choice(bytearray(b('123'))) in bytearray(b('123')), True) + self.assertEqual(1, random.choice([1])) + self.assertRaises(IndexError, random.choice, []) + self.assertRaises(TypeError, random.choice, 1) + # Test shuffle. Lacks random parameter to specify function. + # Make copies of seq + seq = range(500) + x = list(seq) + y = list(seq) + random.shuffle(x) + random.shuffle(y) + self.assertNotEqual(x, y) + self.assertEqual(len(seq), len(x)) + self.assertEqual(len(seq), len(y)) + for i in range(len(seq)): + self.assertEqual(x[i] in seq, True) + self.assertEqual(y[i] in seq, True) + self.assertEqual(seq[i] in x, True) + self.assertEqual(seq[i] in y, True) + z = [1] + random.shuffle(z) + self.assertEqual(z, [1]) + if sys.version_info[0] == 3: + z = bytearray(b('12')) + random.shuffle(z) + self.assertEqual(b('1') in z, True) + self.assertRaises(TypeError, random.shuffle, b('12')) + self.assertRaises(TypeError, random.shuffle, 1) + self.assertRaises(TypeError, random.shuffle, "11") + self.assertRaises(TypeError, random.shuffle, (1,2)) + # 2to3 wraps a list() around it, alas - but I want to shoot + # myself in the foot here! :D + # if sys.version_info[0] == 3: + # self.assertRaises(TypeError, random.shuffle, range(3)) + # Test sample + x = random.sample(seq, 20) + y = random.sample(seq, 20) + self.assertNotEqual(x, y) + for i in range(20): + self.assertEqual(x[i] in seq, True) + self.assertEqual(y[i] in seq, True) + z = random.sample([1], 1) + self.assertEqual(z, [1]) + z = random.sample((1,2,3), 1) + self.assertEqual(z[0] in (1,2,3), True) + z = random.sample("123", 1) + self.assertEqual(z[0] in "123", True) + z = random.sample(range(3), 1) + self.assertEqual(z[0] in range(3), True) + if sys.version_info[0] == 3: + z = random.sample(b("123"), 1) + self.assertEqual(z[0] in b("123"), True) + z = random.sample(bytearray(b("123")), 1) + self.assertEqual(z[0] in bytearray(b("123")), True) + self.assertRaises(TypeError, random.sample, 1) + +def get_tests(config={}): + return [SimpleTest()] + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Signature/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Signature/__init__.py new file mode 100644 index 000000000..83cf0f392 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Signature/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Signature/__init__.py: Self-test for signature modules +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for signature modules""" + +import unittest +from . import test_pkcs1_15, test_pss, test_dss, test_eddsa + + +def get_tests(config={}): + tests = [] + tests += test_pkcs1_15.get_tests(config=config) + tests += test_pss.get_tests(config=config) + tests += test_dss.get_tests(config=config) + tests += test_eddsa.get_tests(config=config) + return tests + + +if __name__ == '__main__': + def suite(): + return unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Signature/test_dss.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Signature/test_dss.py new file mode 100644 index 000000000..d3f8dfce2 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Signature/test_dss.py @@ -0,0 +1,1369 @@ +# +# SelfTest/Signature/test_dss.py: Self-test for DSS signatures +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import re +import unittest +from binascii import hexlify, unhexlify + +from Crypto.Util.py3compat import tobytes, bord, bchr + +from Crypto.Hash import (SHA1, SHA224, SHA256, SHA384, SHA512, + SHA3_224, SHA3_256, SHA3_384, SHA3_512) +from Crypto.Signature import DSS +from Crypto.PublicKey import DSA, ECC +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors, load_test_vectors_wycheproof +from Crypto.Util.number import bytes_to_long, long_to_bytes + + +def t2b(hexstring): + ws = hexstring.replace(" ", "").replace("\n", "") + return unhexlify(tobytes(ws)) + + +def t2l(hexstring): + ws = hexstring.replace(" ", "").replace("\n", "") + return int(ws, 16) + + +def load_hash_by_name(hash_name): + return __import__("Crypto.Hash." + hash_name, globals(), locals(), ["new"]) + + +class StrRNG: + + def __init__(self, randomness): + length = len(randomness) + self._idx = 0 + # Fix required to get the right K (see how randint() works!) + self._randomness = long_to_bytes(bytes_to_long(randomness) - 1, length) + + def __call__(self, n): + out = self._randomness[self._idx:self._idx + n] + self._idx += n + return out + + +class FIPS_DSA_Tests(unittest.TestCase): + + # 1st 1024 bit key from SigGen.txt + P = 0xa8f9cd201e5e35d892f85f80e4db2599a5676a3b1d4f190330ed3256b26d0e80a0e49a8fffaaad2a24f472d2573241d4d6d6c7480c80b4c67bb4479c15ada7ea8424d2502fa01472e760241713dab025ae1b02e1703a1435f62ddf4ee4c1b664066eb22f2e3bf28bb70a2a76e4fd5ebe2d1229681b5b06439ac9c7e9d8bde283 + Q = 0xf85f0f83ac4df7ea0cdf8f469bfeeaea14156495 + G = 0x2b3152ff6c62f14622b8f48e59f8af46883b38e79b8c74deeae9df131f8b856e3ad6c8455dab87cc0da8ac973417ce4f7878557d6cdf40b35b4a0ca3eb310c6a95d68ce284ad4e25ea28591611ee08b8444bd64b25f3f7c572410ddfb39cc728b9c936f85f419129869929cdb909a6a3a99bbe089216368171bd0ba81de4fe33 + X = 0xc53eae6d45323164c7d07af5715703744a63fc3a + Y = 0x313fd9ebca91574e1c2eebe1517c57e0c21b0209872140c5328761bbb2450b33f1b18b409ce9ab7c4cd8fda3391e8e34868357c199e16a6b2eba06d6749def791d79e95d3a4d09b24c392ad89dbf100995ae19c01062056bb14bce005e8731efde175f95b975089bdcdaea562b32786d96f5a31aedf75364008ad4fffebb970b + + key_pub = DSA.construct((Y, G, P, Q)) + key_priv = DSA.construct((Y, G, P, Q, X)) + + def shortDescription(self): + return "FIPS DSA Tests" + + def test_loopback(self): + hashed_msg = SHA512.new(b"test") + signer = DSS.new(self.key_priv, 'fips-186-3') + signature = signer.sign(hashed_msg) + + verifier = DSS.new(self.key_pub, 'fips-186-3') + verifier.verify(hashed_msg, signature) + + def test_negative_unapproved_hashes(self): + """Verify that unapproved hashes are rejected""" + + from Crypto.Hash import RIPEMD160 + + self.description = "Unapproved hash (RIPEMD160) test" + hash_obj = RIPEMD160.new() + signer = DSS.new(self.key_priv, 'fips-186-3') + self.assertRaises(ValueError, signer.sign, hash_obj) + self.assertRaises(ValueError, signer.verify, hash_obj, b"\x00" * 40) + + def test_negative_unknown_modes_encodings(self): + """Verify that unknown modes/encodings are rejected""" + + self.description = "Unknown mode test" + self.assertRaises(ValueError, DSS.new, self.key_priv, 'fips-186-0') + + self.description = "Unknown encoding test" + self.assertRaises(ValueError, DSS.new, self.key_priv, 'fips-186-3', 'xml') + + def test_asn1_encoding(self): + """Verify ASN.1 encoding""" + + self.description = "ASN.1 encoding test" + hash_obj = SHA1.new() + signer = DSS.new(self.key_priv, 'fips-186-3', 'der') + signature = signer.sign(hash_obj) + + # Verify that output looks like a DER SEQUENCE + self.assertEqual(bord(signature[0]), 48) + signer.verify(hash_obj, signature) + + # Verify that ASN.1 parsing fails as expected + signature = bchr(7) + signature[1:] + self.assertRaises(ValueError, signer.verify, hash_obj, signature) + + def test_sign_verify(self): + """Verify public/private method""" + + self.description = "can_sign() test" + signer = DSS.new(self.key_priv, 'fips-186-3') + self.assertTrue(signer.can_sign()) + + signer = DSS.new(self.key_pub, 'fips-186-3') + self.assertFalse(signer.can_sign()) + + try: + signer.sign(SHA256.new(b'xyz')) + except TypeError as e: + msg = str(e) + else: + msg = "" + self.assertTrue("Private key is needed" in msg) + + +class FIPS_DSA_Tests_KAT(unittest.TestCase): + pass + + +test_vectors_verify = load_test_vectors(("Signature", "DSA"), + "FIPS_186_3_SigVer.rsp", + "Signature Verification 186-3", + {'result': lambda x: x}) or [] + +for idx, tv in enumerate(test_vectors_verify): + + if isinstance(tv, str): + res = re.match(r"\[mod = L=([0-9]+), N=([0-9]+), ([a-zA-Z0-9-]+)\]", tv) + assert(res) + hash_name = res.group(3).replace("-", "") + hash_module = load_hash_by_name(hash_name) + continue + + if hasattr(tv, "p"): + modulus = tv.p + generator = tv.g + suborder = tv.q + continue + + hash_obj = hash_module.new(tv.msg) + + comps = [bytes_to_long(x) for x in (tv.y, generator, modulus, suborder)] + key = DSA.construct(comps, False) # type: ignore + verifier = DSS.new(key, 'fips-186-3') + + def positive_test(self, verifier=verifier, hash_obj=hash_obj, signature=tv.r+tv.s): + verifier.verify(hash_obj, signature) + + def negative_test(self, verifier=verifier, hash_obj=hash_obj, signature=tv.r+tv.s): + self.assertRaises(ValueError, verifier.verify, hash_obj, signature) + + if tv.result == 'p': + setattr(FIPS_DSA_Tests_KAT, "test_verify_positive_%d" % idx, positive_test) + else: + setattr(FIPS_DSA_Tests_KAT, "test_verify_negative_%d" % idx, negative_test) + + +test_vectors_sign = load_test_vectors(("Signature", "DSA"), + "FIPS_186_3_SigGen.txt", + "Signature Creation 186-3", + {}) or [] + +for idx, tv in enumerate(test_vectors_sign): + + if isinstance(tv, str): + res = re.match(r"\[mod = L=([0-9]+), N=([0-9]+), ([a-zA-Z0-9-]+)\]", tv) + assert(res) + hash_name = res.group(3).replace("-", "") + hash_module = load_hash_by_name(hash_name) + continue + + if hasattr(tv, "p"): + modulus = tv.p + generator = tv.g + suborder = tv.q + continue + + hash_obj = hash_module.new(tv.msg) + comps_dsa = [bytes_to_long(x) for x in (tv.y, generator, modulus, suborder, tv.x)] + key = DSA.construct(comps_dsa, False) # type: ignore + signer = DSS.new(key, 'fips-186-3', randfunc=StrRNG(tv.k)) + + def new_test(self, signer=signer, hash_obj=hash_obj, signature=tv.r+tv.s): + self.assertEqual(signer.sign(hash_obj), signature) + setattr(FIPS_DSA_Tests_KAT, "test_sign_%d" % idx, new_test) + + +class FIPS_ECDSA_Tests(unittest.TestCase): + + key_priv = ECC.generate(curve="P-256") + key_pub = key_priv.public_key() + + def shortDescription(self): + return "FIPS ECDSA Tests" + + def test_loopback(self): + hashed_msg = SHA512.new(b"test") + signer = DSS.new(self.key_priv, 'fips-186-3') + signature = signer.sign(hashed_msg) + + verifier = DSS.new(self.key_pub, 'fips-186-3') + verifier.verify(hashed_msg, signature) + + def test_negative_unapproved_hashes(self): + """Verify that unapproved hashes are rejected""" + + from Crypto.Hash import SHA1 + + self.description = "Unapproved hash (SHA-1) test" + hash_obj = SHA1.new() + signer = DSS.new(self.key_priv, 'fips-186-3') + self.assertRaises(ValueError, signer.sign, hash_obj) + self.assertRaises(ValueError, signer.verify, hash_obj, b"\x00" * 40) + + def test_negative_eddsa_key(self): + key = ECC.generate(curve="ed25519") + self.assertRaises(ValueError, DSS.new, key, 'fips-186-3') + + def test_sign_verify(self): + """Verify public/private method""" + + self.description = "can_sign() test" + signer = DSS.new(self.key_priv, 'fips-186-3') + self.assertTrue(signer.can_sign()) + + signer = DSS.new(self.key_pub, 'fips-186-3') + self.assertFalse(signer.can_sign()) + self.assertRaises(TypeError, signer.sign, SHA256.new(b'xyz')) + + try: + signer.sign(SHA256.new(b'xyz')) + except TypeError as e: + msg = str(e) + else: + msg = "" + self.assertTrue("Private key is needed" in msg) + + def test_negative_unknown_modes_encodings(self): + """Verify that unknown modes/encodings are rejected""" + + self.description = "Unknown mode test" + self.assertRaises(ValueError, DSS.new, self.key_priv, 'fips-186-0') + + self.description = "Unknown encoding test" + self.assertRaises(ValueError, DSS.new, self.key_priv, 'fips-186-3', 'xml') + + def test_asn1_encoding(self): + """Verify ASN.1 encoding""" + + self.description = "ASN.1 encoding test" + hash_obj = SHA256.new() + signer = DSS.new(self.key_priv, 'fips-186-3', 'der') + signature = signer.sign(hash_obj) + + # Verify that output looks like a DER SEQUENCE + self.assertEqual(bord(signature[0]), 48) + signer.verify(hash_obj, signature) + + # Verify that ASN.1 parsing fails as expected + signature = bchr(7) + signature[1:] + self.assertRaises(ValueError, signer.verify, hash_obj, signature) + + +class FIPS_ECDSA_Tests_KAT(unittest.TestCase): + pass + + +test_vectors_verify = load_test_vectors(("Signature", "ECDSA"), + "SigVer.rsp", + "ECDSA Signature Verification 186-3", + {'result': lambda x: x, + 'qx': lambda x: int(x, 16), + 'qy': lambda x: int(x, 16), + }) or [] +test_vectors_verify += load_test_vectors(("Signature", "ECDSA"), + "SigVer_TruncatedSHAs.rsp", + "ECDSA Signature Verification 186-3", + {'result': lambda x: x, + 'qx': lambda x: int(x, 16), + 'qy': lambda x: int(x, 16), + }) or [] + + +for idx, tv in enumerate(test_vectors_verify): + + if isinstance(tv, str): + res = re.match(r"\[(P-[0-9]+),(SHA-[0-9]+)\]", tv) + assert res + curve_name = res.group(1) + hash_name = res.group(2).replace("-", "") + if hash_name in ("SHA512224", "SHA512256"): + truncate = hash_name[-3:] + hash_name = hash_name[:-3] + else: + truncate = None + hash_module = load_hash_by_name(hash_name) + continue + + if truncate is None: + hash_obj = hash_module.new(tv.msg) + else: + hash_obj = hash_module.new(tv.msg, truncate=truncate) + ecc_key = ECC.construct(curve=curve_name, point_x=tv.qx, point_y=tv.qy) + verifier = DSS.new(ecc_key, 'fips-186-3') + + def positive_test(self, verifier=verifier, hash_obj=hash_obj, signature=tv.r+tv.s): + verifier.verify(hash_obj, signature) + + def negative_test(self, verifier=verifier, hash_obj=hash_obj, signature=tv.r+tv.s): + self.assertRaises(ValueError, verifier.verify, hash_obj, signature) + + if tv.result.startswith('p'): + setattr(FIPS_ECDSA_Tests_KAT, "test_verify_positive_%d" % idx, positive_test) + else: + setattr(FIPS_ECDSA_Tests_KAT, "test_verify_negative_%d" % idx, negative_test) + + +test_vectors_sign = load_test_vectors(("Signature", "ECDSA"), + "SigGen.txt", + "ECDSA Signature Verification 186-3", + {'d': lambda x: int(x, 16)}) or [] + +for idx, tv in enumerate(test_vectors_sign): + + if isinstance(tv, str): + res = re.match(r"\[(P-[0-9]+),(SHA-[0-9]+)\]", tv) + assert res + curve_name = res.group(1) + hash_name = res.group(2).replace("-", "") + hash_module = load_hash_by_name(hash_name) + continue + + hash_obj = hash_module.new(tv.msg) + ecc_key = ECC.construct(curve=curve_name, d=tv.d) + signer = DSS.new(ecc_key, 'fips-186-3', randfunc=StrRNG(tv.k)) + + def sign_test(self, signer=signer, hash_obj=hash_obj, signature=tv.r+tv.s): + self.assertEqual(signer.sign(hash_obj), signature) + setattr(FIPS_ECDSA_Tests_KAT, "test_sign_%d" % idx, sign_test) + + +class Det_DSA_Tests(unittest.TestCase): + """Tests from rfc6979""" + + # Each key is (p, q, g, x, y, desc) + keys = [ + ( + """ + 86F5CA03DCFEB225063FF830A0C769B9DD9D6153AD91D7CE27F787C43278B447 + E6533B86B18BED6E8A48B784A14C252C5BE0DBF60B86D6385BD2F12FB763ED88 + 73ABFD3F5BA2E0A8C0A59082EAC056935E529DAF7C610467899C77ADEDFC846C + 881870B7B19B2B58F9BE0521A17002E3BDD6B86685EE90B3D9A1B02B782B1779""", + "996F967F6C8E388D9E28D01E205FBA957A5698B1", + """ + 07B0F92546150B62514BB771E2A0C0CE387F03BDA6C56B505209FF25FD3C133D + 89BBCD97E904E09114D9A7DEFDEADFC9078EA544D2E401AEECC40BB9FBBF78FD + 87995A10A1C27CB7789B594BA7EFB5C4326A9FE59A070E136DB77175464ADCA4 + 17BE5DCE2F40D10A46A3A3943F26AB7FD9C0398FF8C76EE0A56826A8A88F1DBD""", + "411602CB19A6CCC34494D79D98EF1E7ED5AF25F7", + """ + 5DF5E01DED31D0297E274E1691C192FE5868FEF9E19A84776454B100CF16F653 + 92195A38B90523E2542EE61871C0440CB87C322FC4B4D2EC5E1E7EC766E1BE8D + 4CE935437DC11C3C8FD426338933EBFE739CB3465F4D3668C5E473508253B1E6 + 82F65CBDC4FAE93C2EA212390E54905A86E2223170B44EAA7DA5DD9FFCFB7F3B""", + "DSA1024" + ), + ( + """ + 9DB6FB5951B66BB6FE1E140F1D2CE5502374161FD6538DF1648218642F0B5C48 + C8F7A41AADFA187324B87674FA1822B00F1ECF8136943D7C55757264E5A1A44F + FE012E9936E00C1D3E9310B01C7D179805D3058B2A9F4BB6F9716BFE6117C6B5 + B3CC4D9BE341104AD4A80AD6C94E005F4B993E14F091EB51743BF33050C38DE2 + 35567E1B34C3D6A5C0CEAA1A0F368213C3D19843D0B4B09DCB9FC72D39C8DE41 + F1BF14D4BB4563CA28371621CAD3324B6A2D392145BEBFAC748805236F5CA2FE + 92B871CD8F9C36D3292B5509CA8CAA77A2ADFC7BFD77DDA6F71125A7456FEA15 + 3E433256A2261C6A06ED3693797E7995FAD5AABBCFBE3EDA2741E375404AE25B""", + "F2C3119374CE76C9356990B465374A17F23F9ED35089BD969F61C6DDE9998C1F", + """ + 5C7FF6B06F8F143FE8288433493E4769C4D988ACE5BE25A0E24809670716C613 + D7B0CEE6932F8FAA7C44D2CB24523DA53FBE4F6EC3595892D1AA58C4328A06C4 + 6A15662E7EAA703A1DECF8BBB2D05DBE2EB956C142A338661D10461C0D135472 + 085057F3494309FFA73C611F78B32ADBB5740C361C9F35BE90997DB2014E2EF5 + AA61782F52ABEB8BD6432C4DD097BC5423B285DAFB60DC364E8161F4A2A35ACA + 3A10B1C4D203CC76A470A33AFDCBDD92959859ABD8B56E1725252D78EAC66E71 + BA9AE3F1DD2487199874393CD4D832186800654760E1E34C09E4D155179F9EC0 + DC4473F996BDCE6EED1CABED8B6F116F7AD9CF505DF0F998E34AB27514B0FFE7""", + "69C7548C21D0DFEA6B9A51C9EAD4E27C33D3B3F180316E5BCAB92C933F0E4DBC", + """ + 667098C654426C78D7F8201EAC6C203EF030D43605032C2F1FA937E5237DBD94 + 9F34A0A2564FE126DC8B715C5141802CE0979C8246463C40E6B6BDAA2513FA61 + 1728716C2E4FD53BC95B89E69949D96512E873B9C8F8DFD499CC312882561ADE + CB31F658E934C0C197F2C4D96B05CBAD67381E7B768891E4DA3843D24D94CDFB + 5126E9B8BF21E8358EE0E0A30EF13FD6A664C0DCE3731F7FB49A4845A4FD8254 + 687972A2D382599C9BAC4E0ED7998193078913032558134976410B89D2C171D1 + 23AC35FD977219597AA7D15C1A9A428E59194F75C721EBCBCFAE44696A499AFA + 74E04299F132026601638CB87AB79190D4A0986315DA8EEC6561C938996BEADF""", + "DSA2048" + ), + ] + + # This is a sequence of items: + # message, k, r, s, hash module + signatures = [ + ( + "sample", + "7BDB6B0FF756E1BB5D53583EF979082F9AD5BD5B", + "2E1A0C2562B2912CAAF89186FB0F42001585DA55", + "29EFB6B0AFF2D7A68EB70CA313022253B9A88DF5", + SHA1, + 'DSA1024' + ), + ( + "sample", + "562097C06782D60C3037BA7BE104774344687649", + "4BC3B686AEA70145856814A6F1BB53346F02101E", + "410697B92295D994D21EDD2F4ADA85566F6F94C1", + SHA224, + 'DSA1024' + ), + ( + "sample", + "519BA0546D0C39202A7D34D7DFA5E760B318BCFB", + "81F2F5850BE5BC123C43F71A3033E9384611C545", + "4CDD914B65EB6C66A8AAAD27299BEE6B035F5E89", + SHA256, + 'DSA1024' + ), + ( + "sample", + "95897CD7BBB944AA932DBC579C1C09EB6FCFC595", + "07F2108557EE0E3921BC1774F1CA9B410B4CE65A", + "54DF70456C86FAC10FAB47C1949AB83F2C6F7595", + SHA384, + 'DSA1024' + ), + ( + "sample", + "09ECE7CA27D0F5A4DD4E556C9DF1D21D28104F8B", + "16C3491F9B8C3FBBDD5E7A7B667057F0D8EE8E1B", + "02C36A127A7B89EDBB72E4FFBC71DABC7D4FC69C", + SHA512, + 'DSA1024' + ), + ( + "test", + "5C842DF4F9E344EE09F056838B42C7A17F4A6433", + "42AB2052FD43E123F0607F115052A67DCD9C5C77", + "183916B0230D45B9931491D4C6B0BD2FB4AAF088", + SHA1, + 'DSA1024' + ), + ( + "test", + "4598B8EFC1A53BC8AECD58D1ABBB0C0C71E67297", + "6868E9964E36C1689F6037F91F28D5F2C30610F2", + "49CEC3ACDC83018C5BD2674ECAAD35B8CD22940F", + SHA224, + 'DSA1024' + ), + ( + "test", + "5A67592E8128E03A417B0484410FB72C0B630E1A", + "22518C127299B0F6FDC9872B282B9E70D0790812", + "6837EC18F150D55DE95B5E29BE7AF5D01E4FE160", + SHA256, + 'DSA1024' + ), + ( + "test", + "220156B761F6CA5E6C9F1B9CF9C24BE25F98CD89", + "854CF929B58D73C3CBFDC421E8D5430CD6DB5E66", + "91D0E0F53E22F898D158380676A871A157CDA622", + SHA384, + 'DSA1024' + ), + ( + "test", + "65D2C2EEB175E370F28C75BFCDC028D22C7DBE9C", + "8EA47E475BA8AC6F2D821DA3BD212D11A3DEB9A0", + "7C670C7AD72B6C050C109E1790008097125433E8", + SHA512, + 'DSA1024' + ), + ( + "sample", + "888FA6F7738A41BDC9846466ABDB8174C0338250AE50CE955CA16230F9CBD53E", + "3A1B2DBD7489D6ED7E608FD036C83AF396E290DBD602408E8677DAABD6E7445A", + "D26FCBA19FA3E3058FFC02CA1596CDBB6E0D20CB37B06054F7E36DED0CDBBCCF", + SHA1, + 'DSA2048' + ), + ( + "sample", + "BC372967702082E1AA4FCE892209F71AE4AD25A6DFD869334E6F153BD0C4D806", + "DC9F4DEADA8D8FF588E98FED0AB690FFCE858DC8C79376450EB6B76C24537E2C", + "A65A9C3BC7BABE286B195D5DA68616DA8D47FA0097F36DD19F517327DC848CEC", + SHA224, + 'DSA2048' + ), + ( + "sample", + "8926A27C40484216F052F4427CFD5647338B7B3939BC6573AF4333569D597C52", + "EACE8BDBBE353C432A795D9EC556C6D021F7A03F42C36E9BC87E4AC7932CC809", + "7081E175455F9247B812B74583E9E94F9EA79BD640DC962533B0680793A38D53", + SHA256, + 'DSA2048' + ), + ( + "sample", + "C345D5AB3DA0A5BCB7EC8F8FB7A7E96069E03B206371EF7D83E39068EC564920", + "B2DA945E91858834FD9BF616EBAC151EDBC4B45D27D0DD4A7F6A22739F45C00B", + "19048B63D9FD6BCA1D9BAE3664E1BCB97F7276C306130969F63F38FA8319021B", + SHA384, + 'DSA2048' + ), + ( + "sample", + "5A12994431785485B3F5F067221517791B85A597B7A9436995C89ED0374668FC", + "2016ED092DC5FB669B8EFB3D1F31A91EECB199879BE0CF78F02BA062CB4C942E", + "D0C76F84B5F091E141572A639A4FB8C230807EEA7D55C8A154A224400AFF2351", + SHA512, + 'DSA2048' + ), + ( + "test", + "6EEA486F9D41A037B2C640BC5645694FF8FF4B98D066A25F76BE641CCB24BA4F", + "C18270A93CFC6063F57A4DFA86024F700D980E4CF4E2CB65A504397273D98EA0", + "414F22E5F31A8B6D33295C7539C1C1BA3A6160D7D68D50AC0D3A5BEAC2884FAA", + SHA1, + 'DSA2048' + ), + ( + "test", + "06BD4C05ED74719106223BE33F2D95DA6B3B541DAD7BFBD7AC508213B6DA6670", + "272ABA31572F6CC55E30BF616B7A265312018DD325BE031BE0CC82AA17870EA3", + "E9CC286A52CCE201586722D36D1E917EB96A4EBDB47932F9576AC645B3A60806", + SHA224, + 'DSA2048' + ), + ( + "test", + "1D6CE6DDA1C5D37307839CD03AB0A5CBB18E60D800937D67DFB4479AAC8DEAD7", + "8190012A1969F9957D56FCCAAD223186F423398D58EF5B3CEFD5A4146A4476F0", + "7452A53F7075D417B4B013B278D1BB8BBD21863F5E7B1CEE679CF2188E1AB19E", + SHA256, + 'DSA2048' + ), + ( + "test", + "206E61F73DBE1B2DC8BE736B22B079E9DACD974DB00EEBBC5B64CAD39CF9F91C", + "239E66DDBE8F8C230A3D071D601B6FFBDFB5901F94D444C6AF56F732BEB954BE", + "6BD737513D5E72FE85D1C750E0F73921FE299B945AAD1C802F15C26A43D34961", + SHA384, + 'DSA2048' + ), + ( + "test", + "AFF1651E4CD6036D57AA8B2A05CCF1A9D5A40166340ECBBDC55BE10B568AA0AA", + "89EC4BB1400ECCFF8E7D9AA515CD1DE7803F2DAFF09693EE7FD1353E90A68307", + "C9F0BDABCC0D880BB137A994CC7F3980CE91CC10FAF529FC46565B15CEA854E1", + SHA512, + 'DSA2048' + ) + ] + + def setUp(self): + # Convert DSA key components from hex strings to integers + # Each key is (p, q, g, x, y, desc) + + from collections import namedtuple + + TestKey = namedtuple('TestKey', 'p q g x y') + new_keys = {} + for k in self.keys: + tk = TestKey(*[t2l(y) for y in k[:-1]]) + new_keys[k[-1]] = tk + self.keys = new_keys + + # Convert signature encoding + TestSig = namedtuple('TestSig', 'message nonce result module test_key') + new_signatures = [] + for message, nonce, r, s, module, test_key in self.signatures: + tsig = TestSig( + tobytes(message), + t2l(nonce), + t2b(r) + t2b(s), + module, + self.keys[test_key] + ) + new_signatures.append(tsig) + self.signatures = new_signatures + + def test1(self): + q = 0x4000000000000000000020108A2E0CC0D99F8A5EF + x = 0x09A4D6792295A7F730FC3F2B49CBC0F62E862272F + p = 2 * q + 1 + y = pow(2, x, p) + key = DSA.construct([pow(y, 2, p), 2, p, q, x], False) + signer = DSS.new(key, 'deterministic-rfc6979') + + # Test _int2octets + self.assertEqual(hexlify(signer._int2octets(x)), + b'009a4d6792295a7f730fc3f2b49cbc0f62e862272f') + + # Test _bits2octets + h1 = SHA256.new(b"sample").digest() + self.assertEqual(hexlify(signer._bits2octets(h1)), + b'01795edf0d54db760f156d0dac04c0322b3a204224') + + def test2(self): + + for sig in self.signatures: + tk = sig.test_key + key = DSA.construct([tk.y, tk.g, tk.p, tk.q, tk.x], False) + signer = DSS.new(key, 'deterministic-rfc6979') + + hash_obj = sig.module.new(sig.message) + result = signer.sign(hash_obj) + self.assertEqual(sig.result, result) + + +class Det_ECDSA_Tests(unittest.TestCase): + + key_priv_p192 = ECC.construct(curve="P-192", d=0x6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4) + key_pub_p192 = key_priv_p192.public_key() + + key_priv_p224 = ECC.construct(curve="P-224", d=0xF220266E1105BFE3083E03EC7A3A654651F45E37167E88600BF257C1) + key_pub_p224 = key_priv_p224.public_key() + + key_priv_p256 = ECC.construct(curve="P-256", d=0xC9AFA9D845BA75166B5C215767B1D6934E50C3DB36E89B127B8A622B120F6721) + key_pub_p256 = key_priv_p256.public_key() + + key_priv_p384 = ECC.construct(curve="P-384", d=0x6B9D3DAD2E1B8C1C05B19875B6659F4DE23C3B667BF297BA9AA47740787137D896D5724E4C70A825F872C9EA60D2EDF5) + key_pub_p384 = key_priv_p384.public_key() + + key_priv_p521 = ECC.construct(curve="P-521", d=0x0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538) + key_pub_p521 = key_priv_p521.public_key() + + # This is a sequence of items: + # message, k, r, s, hash module + # taken from RFC6979 + signatures_p192_ = ( + ( + "sample", + "37D7CA00D2C7B0E5E412AC03BD44BA837FDD5B28CD3B0021", + "98C6BD12B23EAF5E2A2045132086BE3EB8EBD62ABF6698FF", + "57A22B07DEA9530F8DE9471B1DC6624472E8E2844BC25B64", + SHA1 + ), + ( + "sample", + "4381526B3FC1E7128F202E194505592F01D5FF4C5AF015D8", + "A1F00DAD97AEEC91C95585F36200C65F3C01812AA60378F5", + "E07EC1304C7C6C9DEBBE980B9692668F81D4DE7922A0F97A", + SHA224 + ), + ( + "sample", + "32B1B6D7D42A05CB449065727A84804FB1A3E34D8F261496", + "4B0B8CE98A92866A2820E20AA6B75B56382E0F9BFD5ECB55", + "CCDB006926EA9565CBADC840829D8C384E06DE1F1E381B85", + SHA256 + ), + ( + "sample", + "4730005C4FCB01834C063A7B6760096DBE284B8252EF4311", + "DA63BF0B9ABCF948FBB1E9167F136145F7A20426DCC287D5", + "C3AA2C960972BD7A2003A57E1C4C77F0578F8AE95E31EC5E", + SHA384 + ), + ( + "sample", + "A2AC7AB055E4F20692D49209544C203A7D1F2C0BFBC75DB1", + "4D60C5AB1996BD848343B31C00850205E2EA6922DAC2E4B8", + "3F6E837448F027A1BF4B34E796E32A811CBB4050908D8F67", + SHA512 + ), + ( + "test", + "D9CF9C3D3297D3260773A1DA7418DB5537AB8DD93DE7FA25", + "0F2141A0EBBC44D2E1AF90A50EBCFCE5E197B3B7D4DE036D", + "EB18BC9E1F3D7387500CB99CF5F7C157070A8961E38700B7", + SHA1 + ), + ( + "test", + "F5DC805F76EF851800700CCE82E7B98D8911B7D510059FBE", + "6945A1C1D1B2206B8145548F633BB61CEF04891BAF26ED34", + "B7FB7FDFC339C0B9BD61A9F5A8EAF9BE58FC5CBA2CB15293", + SHA224 + ), + ( + "test", + "5C4CE89CF56D9E7C77C8585339B006B97B5F0680B4306C6C", + "3A718BD8B4926C3B52EE6BBE67EF79B18CB6EB62B1AD97AE", + "5662E6848A4A19B1F1AE2F72ACD4B8BBE50F1EAC65D9124F", + SHA256 + ), + ( + "test", + "5AFEFB5D3393261B828DB6C91FBC68C230727B030C975693", + "B234B60B4DB75A733E19280A7A6034BD6B1EE88AF5332367", + "7994090B2D59BB782BE57E74A44C9A1C700413F8ABEFE77A", + SHA384 + ), + ( + "test", + "0758753A5254759C7CFBAD2E2D9B0792EEE44136C9480527", + "FE4F4AE86A58B6507946715934FE2D8FF9D95B6B098FE739", + "74CF5605C98FBA0E1EF34D4B5A1577A7DCF59457CAE52290", + SHA512 + ) + ) + + signatures_p224_ = ( + ( + "sample", + "7EEFADD91110D8DE6C2C470831387C50D3357F7F4D477054B8B426BC", + "22226F9D40A96E19C4A301CE5B74B115303C0F3A4FD30FC257FB57AC", + "66D1CDD83E3AF75605DD6E2FEFF196D30AA7ED7A2EDF7AF475403D69", + SHA1 + ), + ( + "sample", + "C1D1F2F10881088301880506805FEB4825FE09ACB6816C36991AA06D", + "1CDFE6662DDE1E4A1EC4CDEDF6A1F5A2FB7FBD9145C12113E6ABFD3E", + "A6694FD7718A21053F225D3F46197CA699D45006C06F871808F43EBC", + SHA224 + ), + ( + "sample", + "AD3029E0278F80643DE33917CE6908C70A8FF50A411F06E41DEDFCDC", + "61AA3DA010E8E8406C656BC477A7A7189895E7E840CDFE8FF42307BA", + "BC814050DAB5D23770879494F9E0A680DC1AF7161991BDE692B10101", + SHA256 + ), + ( + "sample", + "52B40F5A9D3D13040F494E83D3906C6079F29981035C7BD51E5CAC40", + "0B115E5E36F0F9EC81F1325A5952878D745E19D7BB3EABFABA77E953", + "830F34CCDFE826CCFDC81EB4129772E20E122348A2BBD889A1B1AF1D", + SHA384 + ), + ( + "sample", + "9DB103FFEDEDF9CFDBA05184F925400C1653B8501BAB89CEA0FBEC14", + "074BD1D979D5F32BF958DDC61E4FB4872ADCAFEB2256497CDAC30397", + "A4CECA196C3D5A1FF31027B33185DC8EE43F288B21AB342E5D8EB084", + SHA512 + ), + ( + "test", + "2519178F82C3F0E4F87ED5883A4E114E5B7A6E374043D8EFD329C253", + "DEAA646EC2AF2EA8AD53ED66B2E2DDAA49A12EFD8356561451F3E21C", + "95987796F6CF2062AB8135271DE56AE55366C045F6D9593F53787BD2", + SHA1 + ), + ( + "test", + "DF8B38D40DCA3E077D0AC520BF56B6D565134D9B5F2EAE0D34900524", + "C441CE8E261DED634E4CF84910E4C5D1D22C5CF3B732BB204DBEF019", + "902F42847A63BDC5F6046ADA114953120F99442D76510150F372A3F4", + SHA224 + ), + ( + "test", + "FF86F57924DA248D6E44E8154EB69F0AE2AEBAEE9931D0B5A969F904", + "AD04DDE87B84747A243A631EA47A1BA6D1FAA059149AD2440DE6FBA6", + "178D49B1AE90E3D8B629BE3DB5683915F4E8C99FDF6E666CF37ADCFD", + SHA256 + ), + ( + "test", + "7046742B839478C1B5BD31DB2E862AD868E1A45C863585B5F22BDC2D", + "389B92682E399B26518A95506B52C03BC9379A9DADF3391A21FB0EA4", + "414A718ED3249FF6DBC5B50C27F71F01F070944DA22AB1F78F559AAB", + SHA384 + ), + ( + "test", + "E39C2AA4EA6BE2306C72126D40ED77BF9739BB4D6EF2BBB1DCB6169D", + "049F050477C5ADD858CAC56208394B5A55BAEBBE887FDF765047C17C", + "077EB13E7005929CEFA3CD0403C7CDCC077ADF4E44F3C41B2F60ECFF", + SHA512 + ) + ) + + signatures_p256_ = ( + ( + "sample", + "882905F1227FD620FBF2ABF21244F0BA83D0DC3A9103DBBEE43A1FB858109DB4", + "61340C88C3AAEBEB4F6D667F672CA9759A6CCAA9FA8811313039EE4A35471D32", + "6D7F147DAC089441BB2E2FE8F7A3FA264B9C475098FDCF6E00D7C996E1B8B7EB", + SHA1 + ), + ( + "sample", + "103F90EE9DC52E5E7FB5132B7033C63066D194321491862059967C715985D473", + "53B2FFF5D1752B2C689DF257C04C40A587FABABB3F6FC2702F1343AF7CA9AA3F", + "B9AFB64FDC03DC1A131C7D2386D11E349F070AA432A4ACC918BEA988BF75C74C", + SHA224 + ), + ( + "sample", + "A6E3C57DD01ABE90086538398355DD4C3B17AA873382B0F24D6129493D8AAD60", + "EFD48B2AACB6A8FD1140DD9CD45E81D69D2C877B56AAF991C34D0EA84EAF3716", + "F7CB1C942D657C41D436C7A1B6E29F65F3E900DBB9AFF4064DC4AB2F843ACDA8", + SHA256 + ), + ( + "sample", + "09F634B188CEFD98E7EC88B1AA9852D734D0BC272F7D2A47DECC6EBEB375AAD4", + "0EAFEA039B20E9B42309FB1D89E213057CBF973DC0CFC8F129EDDDC800EF7719", + "4861F0491E6998B9455193E34E7B0D284DDD7149A74B95B9261F13ABDE940954", + SHA384 + ), + ( + "sample", + "5FA81C63109BADB88C1F367B47DA606DA28CAD69AA22C4FE6AD7DF73A7173AA5", + "8496A60B5E9B47C825488827E0495B0E3FA109EC4568FD3F8D1097678EB97F00", + "2362AB1ADBE2B8ADF9CB9EDAB740EA6049C028114F2460F96554F61FAE3302FE", + SHA512 + ), + ( + "test", + "8C9520267C55D6B980DF741E56B4ADEE114D84FBFA2E62137954164028632A2E", + "0CBCC86FD6ABD1D99E703E1EC50069EE5C0B4BA4B9AC60E409E8EC5910D81A89", + "01B9D7B73DFAA60D5651EC4591A0136F87653E0FD780C3B1BC872FFDEAE479B1", + SHA1 + ), + ( + "test", + "669F4426F2688B8BE0DB3A6BD1989BDAEFFF84B649EEB84F3DD26080F667FAA7", + "C37EDB6F0AE79D47C3C27E962FA269BB4F441770357E114EE511F662EC34A692", + "C820053A05791E521FCAAD6042D40AEA1D6B1A540138558F47D0719800E18F2D", + SHA224 + ), + ( + "test", + "D16B6AE827F17175E040871A1C7EC3500192C4C92677336EC2537ACAEE0008E0", + "F1ABB023518351CD71D881567B1EA663ED3EFCF6C5132B354F28D3B0B7D38367", + "019F4113742A2B14BD25926B49C649155F267E60D3814B4C0CC84250E46F0083", + SHA256 + ), + ( + "test", + "16AEFFA357260B04B1DD199693960740066C1A8F3E8EDD79070AA914D361B3B8", + "83910E8B48BB0C74244EBDF7F07A1C5413D61472BD941EF3920E623FBCCEBEB6", + "8DDBEC54CF8CD5874883841D712142A56A8D0F218F5003CB0296B6B509619F2C", + SHA384 + ), + ( + "test", + "6915D11632ACA3C40D5D51C08DAF9C555933819548784480E93499000D9F0B7F", + "461D93F31B6540894788FD206C07CFA0CC35F46FA3C91816FFF1040AD1581A04", + "39AF9F15DE0DB8D97E72719C74820D304CE5226E32DEDAE67519E840D1194E55", + SHA512 + ) + ) + + signatures_p384_ = ( + ( + "sample", + "4471EF7518BB2C7C20F62EAE1C387AD0C5E8E470995DB4ACF694466E6AB096630F29E5938D25106C3C340045A2DB01A7", + "EC748D839243D6FBEF4FC5C4859A7DFFD7F3ABDDF72014540C16D73309834FA37B9BA002899F6FDA3A4A9386790D4EB2", + "A3BCFA947BEEF4732BF247AC17F71676CB31A847B9FF0CBC9C9ED4C1A5B3FACF26F49CA031D4857570CCB5CA4424A443", + SHA1 + ), + ( + "sample", + "A4E4D2F0E729EB786B31FC20AD5D849E304450E0AE8E3E341134A5C1AFA03CAB8083EE4E3C45B06A5899EA56C51B5879", + "42356E76B55A6D9B4631C865445DBE54E056D3B3431766D0509244793C3F9366450F76EE3DE43F5A125333A6BE060122", + "9DA0C81787064021E78DF658F2FBB0B042BF304665DB721F077A4298B095E4834C082C03D83028EFBF93A3C23940CA8D", + SHA224 + ), + ( + "sample", + "180AE9F9AEC5438A44BC159A1FCB277C7BE54FA20E7CF404B490650A8ACC414E375572342863C899F9F2EDF9747A9B60", + "21B13D1E013C7FA1392D03C5F99AF8B30C570C6F98D4EA8E354B63A21D3DAA33BDE1E888E63355D92FA2B3C36D8FB2CD", + "F3AA443FB107745BF4BD77CB3891674632068A10CA67E3D45DB2266FA7D1FEEBEFDC63ECCD1AC42EC0CB8668A4FA0AB0", + SHA256 + ), + ( + "sample", + "94ED910D1A099DAD3254E9242AE85ABDE4BA15168EAF0CA87A555FD56D10FBCA2907E3E83BA95368623B8C4686915CF9", + "94EDBB92A5ECB8AAD4736E56C691916B3F88140666CE9FA73D64C4EA95AD133C81A648152E44ACF96E36DD1E80FABE46", + "99EF4AEB15F178CEA1FE40DB2603138F130E740A19624526203B6351D0A3A94FA329C145786E679E7B82C71A38628AC8", + SHA384 + ), + ( + "sample", + "92FC3C7183A883E24216D1141F1A8976C5B0DD797DFA597E3D7B32198BD35331A4E966532593A52980D0E3AAA5E10EC3", + "ED0959D5880AB2D869AE7F6C2915C6D60F96507F9CB3E047C0046861DA4A799CFE30F35CC900056D7C99CD7882433709", + "512C8CCEEE3890A84058CE1E22DBC2198F42323CE8ACA9135329F03C068E5112DC7CC3EF3446DEFCEB01A45C2667FDD5", + SHA512 + ), + ( + "test", + "66CC2C8F4D303FC962E5FF6A27BD79F84EC812DDAE58CF5243B64A4AD8094D47EC3727F3A3C186C15054492E30698497", + "4BC35D3A50EF4E30576F58CD96CE6BF638025EE624004A1F7789A8B8E43D0678ACD9D29876DAF46638645F7F404B11C7", + "D5A6326C494ED3FF614703878961C0FDE7B2C278F9A65FD8C4B7186201A2991695BA1C84541327E966FA7B50F7382282", + SHA1 + ), + ( + "test", + "18FA39DB95AA5F561F30FA3591DC59C0FA3653A80DAFFA0B48D1A4C6DFCBFF6E3D33BE4DC5EB8886A8ECD093F2935726", + "E8C9D0B6EA72A0E7837FEA1D14A1A9557F29FAA45D3E7EE888FC5BF954B5E62464A9A817C47FF78B8C11066B24080E72", + "07041D4A7A0379AC7232FF72E6F77B6DDB8F09B16CCE0EC3286B2BD43FA8C6141C53EA5ABEF0D8231077A04540A96B66", + SHA224 + ), + ( + "test", + "0CFAC37587532347DC3389FDC98286BBA8C73807285B184C83E62E26C401C0FAA48DD070BA79921A3457ABFF2D630AD7", + "6D6DEFAC9AB64DABAFE36C6BF510352A4CC27001263638E5B16D9BB51D451559F918EEDAF2293BE5B475CC8F0188636B", + "2D46F3BECBCC523D5F1A1256BF0C9B024D879BA9E838144C8BA6BAEB4B53B47D51AB373F9845C0514EEFB14024787265", + SHA256 + ), + ( + "test", + "015EE46A5BF88773ED9123A5AB0807962D193719503C527B031B4C2D225092ADA71F4A459BC0DA98ADB95837DB8312EA", + "8203B63D3C853E8D77227FB377BCF7B7B772E97892A80F36AB775D509D7A5FEB0542A7F0812998DA8F1DD3CA3CF023DB", + "DDD0760448D42D8A43AF45AF836FCE4DE8BE06B485E9B61B827C2F13173923E06A739F040649A667BF3B828246BAA5A5", + SHA384 + ), + ( + "test", + "3780C4F67CB15518B6ACAE34C9F83568D2E12E47DEAB6C50A4E4EE5319D1E8CE0E2CC8A136036DC4B9C00E6888F66B6C", + "A0D5D090C9980FAF3C2CE57B7AE951D31977DD11C775D314AF55F76C676447D06FB6495CD21B4B6E340FC236584FB277", + "976984E59B4C77B0E8E4460DCA3D9F20E07B9BB1F63BEEFAF576F6B2E8B224634A2092CD3792E0159AD9CEE37659C736", + SHA512 + ), + ) + + signatures_p521_ = ( + ( + "sample", + "0089C071B419E1C2820962321787258469511958E80582E95D8378E0C2CCDB3CB42BEDE42F50E3FA3C71F5A76724281D31D9C89F0F91FC1BE4918DB1C03A5838D0F9", + "00343B6EC45728975EA5CBA6659BBB6062A5FF89EEA58BE3C80B619F322C87910FE092F7D45BB0F8EEE01ED3F20BABEC079D202AE677B243AB40B5431D497C55D75D", + "00E7B0E675A9B24413D448B8CC119D2BF7B2D2DF032741C096634D6D65D0DBE3D5694625FB9E8104D3B842C1B0E2D0B98BEA19341E8676AEF66AE4EBA3D5475D5D16", + SHA1 + ), + ( + "sample", + "0121415EC2CD7726330A61F7F3FA5DE14BE9436019C4DB8CB4041F3B54CF31BE0493EE3F427FB906393D895A19C9523F3A1D54BB8702BD4AA9C99DAB2597B92113F3", + "01776331CFCDF927D666E032E00CF776187BC9FDD8E69D0DABB4109FFE1B5E2A30715F4CC923A4A5E94D2503E9ACFED92857B7F31D7152E0F8C00C15FF3D87E2ED2E", + "0050CB5265417FE2320BBB5A122B8E1A32BD699089851128E360E620A30C7E17BA41A666AF126CE100E5799B153B60528D5300D08489CA9178FB610A2006C254B41F", + SHA224 + ), + ( + "sample", + "00EDF38AFCAAECAB4383358B34D67C9F2216C8382AAEA44A3DAD5FDC9C32575761793FEF24EB0FC276DFC4F6E3EC476752F043CF01415387470BCBD8678ED2C7E1A0", + "01511BB4D675114FE266FC4372B87682BAECC01D3CC62CF2303C92B3526012659D16876E25C7C1E57648F23B73564D67F61C6F14D527D54972810421E7D87589E1A7", + "004A171143A83163D6DF460AAF61522695F207A58B95C0644D87E52AA1A347916E4F7A72930B1BC06DBE22CE3F58264AFD23704CBB63B29B931F7DE6C9D949A7ECFC", + SHA256 + ), + ( + "sample", + "01546A108BC23A15D6F21872F7DED661FA8431DDBD922D0DCDB77CC878C8553FFAD064C95A920A750AC9137E527390D2D92F153E66196966EA554D9ADFCB109C4211", + "01EA842A0E17D2DE4F92C15315C63DDF72685C18195C2BB95E572B9C5136CA4B4B576AD712A52BE9730627D16054BA40CC0B8D3FF035B12AE75168397F5D50C67451", + "01F21A3CEE066E1961025FB048BD5FE2B7924D0CD797BABE0A83B66F1E35EEAF5FDE143FA85DC394A7DEE766523393784484BDF3E00114A1C857CDE1AA203DB65D61", + SHA384 + ), + ( + "sample", + "01DAE2EA071F8110DC26882D4D5EAE0621A3256FC8847FB9022E2B7D28E6F10198B1574FDD03A9053C08A1854A168AA5A57470EC97DD5CE090124EF52A2F7ECBFFD3", + "00C328FAFCBD79DD77850370C46325D987CB525569FB63C5D3BC53950E6D4C5F174E25A1EE9017B5D450606ADD152B534931D7D4E8455CC91F9B15BF05EC36E377FA", + "00617CCE7CF5064806C467F678D3B4080D6F1CC50AF26CA209417308281B68AF282623EAA63E5B5C0723D8B8C37FF0777B1A20F8CCB1DCCC43997F1EE0E44DA4A67A", + SHA512 + ), + ( + "test", + "00BB9F2BF4FE1038CCF4DABD7139A56F6FD8BB1386561BD3C6A4FC818B20DF5DDBA80795A947107A1AB9D12DAA615B1ADE4F7A9DC05E8E6311150F47F5C57CE8B222", + "013BAD9F29ABE20DE37EBEB823C252CA0F63361284015A3BF430A46AAA80B87B0693F0694BD88AFE4E661FC33B094CD3B7963BED5A727ED8BD6A3A202ABE009D0367", + "01E9BB81FF7944CA409AD138DBBEE228E1AFCC0C890FC78EC8604639CB0DBDC90F717A99EAD9D272855D00162EE9527567DD6A92CBD629805C0445282BBC916797FF", + SHA1 + ), + ( + "test", + "0040D09FCF3C8A5F62CF4FB223CBBB2B9937F6B0577C27020A99602C25A01136987E452988781484EDBBCF1C47E554E7FC901BC3085E5206D9F619CFF07E73D6F706", + "01C7ED902E123E6815546065A2C4AF977B22AA8EADDB68B2C1110E7EA44D42086BFE4A34B67DDC0E17E96536E358219B23A706C6A6E16BA77B65E1C595D43CAE17FB", + "0177336676304FCB343CE028B38E7B4FBA76C1C1B277DA18CAD2A8478B2A9A9F5BEC0F3BA04F35DB3E4263569EC6AADE8C92746E4C82F8299AE1B8F1739F8FD519A4", + SHA224 + ), + ( + "test", + "001DE74955EFAABC4C4F17F8E84D881D1310B5392D7700275F82F145C61E843841AF09035BF7A6210F5A431A6A9E81C9323354A9E69135D44EBD2FCAA7731B909258", + "000E871C4A14F993C6C7369501900C4BC1E9C7B0B4BA44E04868B30B41D8071042EB28C4C250411D0CE08CD197E4188EA4876F279F90B3D8D74A3C76E6F1E4656AA8", + "00CD52DBAA33B063C3A6CD8058A1FB0A46A4754B034FCC644766CA14DA8CA5CA9FDE00E88C1AD60CCBA759025299079D7A427EC3CC5B619BFBC828E7769BCD694E86", + SHA256 + ), + ( + "test", + "01F1FC4A349A7DA9A9E116BFDD055DC08E78252FF8E23AC276AC88B1770AE0B5DCEB1ED14A4916B769A523CE1E90BA22846AF11DF8B300C38818F713DADD85DE0C88", + "014BEE21A18B6D8B3C93FAB08D43E739707953244FDBE924FA926D76669E7AC8C89DF62ED8975C2D8397A65A49DCC09F6B0AC62272741924D479354D74FF6075578C", + "0133330865C067A0EAF72362A65E2D7BC4E461E8C8995C3B6226A21BD1AA78F0ED94FE536A0DCA35534F0CD1510C41525D163FE9D74D134881E35141ED5E8E95B979", + SHA384 + ), + ( + "test", + "016200813020EC986863BEDFC1B121F605C1215645018AEA1A7B215A564DE9EB1B38A67AA1128B80CE391C4FB71187654AAA3431027BFC7F395766CA988C964DC56D", + "013E99020ABF5CEE7525D16B69B229652AB6BDF2AFFCAEF38773B4B7D08725F10CDB93482FDCC54EDCEE91ECA4166B2A7C6265EF0CE2BD7051B7CEF945BABD47EE6D", + "01FBD0013C674AA79CB39849527916CE301C66EA7CE8B80682786AD60F98F7E78A19CA69EFF5C57400E3B3A0AD66CE0978214D13BAF4E9AC60752F7B155E2DE4DCE3", + SHA512 + ), + ) + + signatures_p192 = [] + for a, b, c, d, e in signatures_p192_: + new_tv = (tobytes(a), unhexlify(b), unhexlify(c), unhexlify(d), e) + signatures_p192.append(new_tv) + + signatures_p224 = [] + for a, b, c, d, e in signatures_p224_: + new_tv = (tobytes(a), unhexlify(b), unhexlify(c), unhexlify(d), e) + signatures_p224.append(new_tv) + + signatures_p256 = [] + for a, b, c, d, e in signatures_p256_: + new_tv = (tobytes(a), unhexlify(b), unhexlify(c), unhexlify(d), e) + signatures_p256.append(new_tv) + + signatures_p384 = [] + for a, b, c, d, e in signatures_p384_: + new_tv = (tobytes(a), unhexlify(b), unhexlify(c), unhexlify(d), e) + signatures_p384.append(new_tv) + + signatures_p521 = [] + for a, b, c, d, e in signatures_p521_: + new_tv = (tobytes(a), unhexlify(b), unhexlify(c), unhexlify(d), e) + signatures_p521.append(new_tv) + + def shortDescription(self): + return "Deterministic ECDSA Tests" + + def test_loopback_p192(self): + hashed_msg = SHA512.new(b"test") + signer = DSS.new(self.key_priv_p192, 'deterministic-rfc6979') + signature = signer.sign(hashed_msg) + + verifier = DSS.new(self.key_pub_p192, 'deterministic-rfc6979') + verifier.verify(hashed_msg, signature) + + def test_loopback_p224(self): + hashed_msg = SHA512.new(b"test") + signer = DSS.new(self.key_priv_p224, 'deterministic-rfc6979') + signature = signer.sign(hashed_msg) + + verifier = DSS.new(self.key_pub_p224, 'deterministic-rfc6979') + verifier.verify(hashed_msg, signature) + + def test_loopback_p256(self): + hashed_msg = SHA512.new(b"test") + signer = DSS.new(self.key_priv_p256, 'deterministic-rfc6979') + signature = signer.sign(hashed_msg) + + verifier = DSS.new(self.key_pub_p256, 'deterministic-rfc6979') + verifier.verify(hashed_msg, signature) + + def test_loopback_p384(self): + hashed_msg = SHA512.new(b"test") + signer = DSS.new(self.key_priv_p384, 'deterministic-rfc6979') + signature = signer.sign(hashed_msg) + + verifier = DSS.new(self.key_pub_p384, 'deterministic-rfc6979') + verifier.verify(hashed_msg, signature) + + def test_loopback_p521(self): + hashed_msg = SHA512.new(b"test") + signer = DSS.new(self.key_priv_p521, 'deterministic-rfc6979') + signature = signer.sign(hashed_msg) + + verifier = DSS.new(self.key_pub_p521, 'deterministic-rfc6979') + verifier.verify(hashed_msg, signature) + + def test_data_rfc6979_p192(self): + signer = DSS.new(self.key_priv_p192, 'deterministic-rfc6979') + for message, k, r, s, module in self.signatures_p192: + hash_obj = module.new(message) + result = signer.sign(hash_obj) + self.assertEqual(r + s, result) + + def test_data_rfc6979_p224(self): + signer = DSS.new(self.key_priv_p224, 'deterministic-rfc6979') + for message, k, r, s, module in self.signatures_p224: + hash_obj = module.new(message) + result = signer.sign(hash_obj) + self.assertEqual(r + s, result) + + def test_data_rfc6979_p256(self): + signer = DSS.new(self.key_priv_p256, 'deterministic-rfc6979') + for message, k, r, s, module in self.signatures_p256: + hash_obj = module.new(message) + result = signer.sign(hash_obj) + self.assertEqual(r + s, result) + + def test_data_rfc6979_p384(self): + signer = DSS.new(self.key_priv_p384, 'deterministic-rfc6979') + for message, k, r, s, module in self.signatures_p384: + hash_obj = module.new(message) + result = signer.sign(hash_obj) + self.assertEqual(r + s, result) + + def test_data_rfc6979_p521(self): + signer = DSS.new(self.key_priv_p521, 'deterministic-rfc6979') + for message, k, r, s, module in self.signatures_p521: + hash_obj = module.new(message) + result = signer.sign(hash_obj) + self.assertEqual(r + s, result) + + +def get_hash_module(hash_name): + if hash_name == "SHA-512": + hash_module = SHA512 + elif hash_name == "SHA-512/224": + hash_module = SHA512.new(truncate="224") + elif hash_name == "SHA-512/256": + hash_module = SHA512.new(truncate="256") + elif hash_name == "SHA-384": + hash_module = SHA384 + elif hash_name == "SHA-256": + hash_module = SHA256 + elif hash_name == "SHA-224": + hash_module = SHA224 + elif hash_name == "SHA-1": + hash_module = SHA1 + elif hash_name == "SHA3-224": + hash_module = SHA3_224 + elif hash_name == "SHA3-256": + hash_module = SHA3_256 + elif hash_name == "SHA3-384": + hash_module = SHA3_384 + elif hash_name == "SHA3-512": + hash_module = SHA3_512 + else: + raise ValueError("Unknown hash algorithm: " + hash_name) + return hash_module + + +class TestVectorsDSAWycheproof(unittest.TestCase): + + def __init__(self, wycheproof_warnings, slow_tests): + unittest.TestCase.__init__(self) + self._wycheproof_warnings = wycheproof_warnings + self._slow_tests = slow_tests + self._id = "None" + self.tv = [] + + def setUp(self): + + def filter_dsa(group): + return DSA.import_key(group['keyPem']) + + def filter_sha(group): + return get_hash_module(group['sha']) + + def filter_type(group): + sig_type = group['type'] + if sig_type != 'DsaVerify': + raise ValueError("Unknown signature type " + sig_type) + return sig_type + + result = load_test_vectors_wycheproof(("Signature", "wycheproof"), + "dsa_test.json", + "Wycheproof DSA signature", + group_tag={'key': filter_dsa, + 'hash_module': filter_sha, + 'sig_type': filter_type}) + self.tv += result + + def shortDescription(self): + return self._id + + def warn(self, tv): + if tv.warning and self._wycheproof_warnings: + import warnings + warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) + + def test_verify(self, tv): + self._id = "Wycheproof DSA Test #" + str(tv.id) + + hashed_msg = tv.hash_module.new(tv.msg) + signer = DSS.new(tv.key, 'fips-186-3', encoding='der') + try: + signature = signer.verify(hashed_msg, tv.sig) + except ValueError as e: + if tv.warning: + return + assert not tv.valid + else: + assert tv.valid + self.warn(tv) + + def runTest(self): + for tv in self.tv: + self.test_verify(tv) + + +class TestVectorsECDSAWycheproof(unittest.TestCase): + + def __init__(self, wycheproof_warnings, slow_tests): + unittest.TestCase.__init__(self) + self._wycheproof_warnings = wycheproof_warnings + self._slow_tests = slow_tests + self._id = "None" + + def add_tests(self, filename): + + def filter_ecc(group): + # These are the only curves we accept to skip + if group['key']['curve'] in ('secp224k1', 'secp256k1', + 'brainpoolP224r1', 'brainpoolP224t1', + 'brainpoolP256r1', 'brainpoolP256t1', + 'brainpoolP320r1', 'brainpoolP320t1', + 'brainpoolP384r1', 'brainpoolP384t1', + 'brainpoolP512r1', 'brainpoolP512t1', + ): + return None + return ECC.import_key(group['keyPem']) + + def filter_sha(group): + return get_hash_module(group['sha']) + + def filter_encoding(group): + encoding_name = group['type'] + if encoding_name == "EcdsaVerify": + return "der" + elif encoding_name == "EcdsaP1363Verify": + return "binary" + else: + raise ValueError("Unknown signature type " + encoding_name) + + result = load_test_vectors_wycheproof(("Signature", "wycheproof"), + filename, + "Wycheproof ECDSA signature (%s)" % filename, + group_tag={'key': filter_ecc, + 'hash_module': filter_sha, + 'encoding': filter_encoding, + }) + self.tv += result + + def setUp(self): + self.tv = [] + self.add_tests("ecdsa_secp224r1_sha224_p1363_test.json") + self.add_tests("ecdsa_secp224r1_sha224_test.json") + if self._slow_tests: + self.add_tests("ecdsa_secp224r1_sha256_p1363_test.json") + self.add_tests("ecdsa_secp224r1_sha256_test.json") + self.add_tests("ecdsa_secp224r1_sha3_224_test.json") + self.add_tests("ecdsa_secp224r1_sha3_256_test.json") + self.add_tests("ecdsa_secp224r1_sha3_512_test.json") + self.add_tests("ecdsa_secp224r1_sha512_p1363_test.json") + self.add_tests("ecdsa_secp224r1_sha512_test.json") + self.add_tests("ecdsa_secp256r1_sha256_p1363_test.json") + self.add_tests("ecdsa_secp256r1_sha256_test.json") + self.add_tests("ecdsa_secp256r1_sha3_256_test.json") + self.add_tests("ecdsa_secp256r1_sha3_512_test.json") + self.add_tests("ecdsa_secp256r1_sha512_p1363_test.json") + self.add_tests("ecdsa_secp256r1_sha512_test.json") + if self._slow_tests: + self.add_tests("ecdsa_secp384r1_sha3_384_test.json") + self.add_tests("ecdsa_secp384r1_sha3_512_test.json") + self.add_tests("ecdsa_secp384r1_sha384_p1363_test.json") + self.add_tests("ecdsa_secp384r1_sha384_test.json") + self.add_tests("ecdsa_secp384r1_sha512_p1363_test.json") + self.add_tests("ecdsa_secp384r1_sha512_test.json") + if self._slow_tests: + self.add_tests("ecdsa_secp521r1_sha3_512_test.json") + self.add_tests("ecdsa_secp521r1_sha512_p1363_test.json") + self.add_tests("ecdsa_secp521r1_sha512_test.json") + self.add_tests("ecdsa_test.json") + self.add_tests("ecdsa_webcrypto_test.json") + + def shortDescription(self): + return self._id + + def warn(self, tv): + if tv.warning and self._wycheproof_warnings: + import warnings + warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) + + def test_verify(self, tv): + self._id = "Wycheproof ECDSA Test #%d (%s, %s)" % (tv.id, tv.comment, tv.filename) + + # Skip tests with unsupported curves + if tv.key is None: + return + + hashed_msg = tv.hash_module.new(tv.msg) + signer = DSS.new(tv.key, 'fips-186-3', encoding=tv.encoding) + try: + signature = signer.verify(hashed_msg, tv.sig) + except ValueError as e: + if tv.warning: + return + if tv.comment == "k*G has a large x-coordinate": + return + assert not tv.valid + else: + assert tv.valid + self.warn(tv) + + def runTest(self): + for tv in self.tv: + self.test_verify(tv) + + +def get_tests(config={}): + wycheproof_warnings = config.get('wycheproof_warnings') + + tests = [] + tests += list_test_cases(FIPS_DSA_Tests) + tests += list_test_cases(FIPS_ECDSA_Tests) + tests += list_test_cases(Det_DSA_Tests) + tests += list_test_cases(Det_ECDSA_Tests) + + slow_tests = config.get('slow_tests') + if slow_tests: + tests += list_test_cases(FIPS_DSA_Tests_KAT) + tests += list_test_cases(FIPS_ECDSA_Tests_KAT) + + tests += [TestVectorsDSAWycheproof(wycheproof_warnings, slow_tests)] + tests += [TestVectorsECDSAWycheproof(wycheproof_warnings, slow_tests)] + + return tests + + +if __name__ == '__main__': + def suite(): + return unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Signature/test_eddsa.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Signature/test_eddsa.py new file mode 100644 index 000000000..6a9a9b0c2 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Signature/test_eddsa.py @@ -0,0 +1,578 @@ +# +# Copyright (c) 2022, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +from binascii import unhexlify + +from Crypto.PublicKey import ECC +from Crypto.Signature import eddsa +from Crypto.Hash import SHA512, SHAKE256 +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors_wycheproof +from Crypto.Util.number import bytes_to_long + +rfc8032_tv_str = ( + # 7.1 Ed25519 + ( + "9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60", + "d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a", + "", + None, + "", + "e5564300c360ac729086e2cc806e828a" + "84877f1eb8e5d974d873e06522490155" + "5fb8821590a33bacc61e39701cf9b46b" + "d25bf5f0595bbe24655141438e7a100b" + ), + ( + "4ccd089b28ff96da9db6c346ec114e0f5b8a319f35aba624da8cf6ed4fb8a6fb", + "3d4017c3e843895a92b70aa74d1b7ebc9c982ccf2ec4968cc0cd55f12af4660c", + "72", + None, + "", + "92a009a9f0d4cab8720e820b5f642540" + "a2b27b5416503f8fb3762223ebdb69da" + "085ac1e43e15996e458f3613d0f11d8c" + "387b2eaeb4302aeeb00d291612bb0c00" + ), + ( + "c5aa8df43f9f837bedb7442f31dcb7b166d38535076f094b85ce3a2e0b4458f7", + "fc51cd8e6218a1a38da47ed00230f0580816ed13ba3303ac5deb911548908025", + "af82", + None, + "", + "6291d657deec24024827e69c3abe01a3" + "0ce548a284743a445e3680d7db5ac3ac" + "18ff9b538d16f290ae67f760984dc659" + "4a7c15e9716ed28dc027beceea1ec40a" + ), + ( + "f5e5767cf153319517630f226876b86c8160cc583bc013744c6bf255f5cc0ee5", + "278117fc144c72340f67d0f2316e8386ceffbf2b2428c9c51fef7c597f1d426e", + "08b8b2b733424243760fe426a4b54908" + "632110a66c2f6591eabd3345e3e4eb98" + "fa6e264bf09efe12ee50f8f54e9f77b1" + "e355f6c50544e23fb1433ddf73be84d8" + "79de7c0046dc4996d9e773f4bc9efe57" + "38829adb26c81b37c93a1b270b20329d" + "658675fc6ea534e0810a4432826bf58c" + "941efb65d57a338bbd2e26640f89ffbc" + "1a858efcb8550ee3a5e1998bd177e93a" + "7363c344fe6b199ee5d02e82d522c4fe" + "ba15452f80288a821a579116ec6dad2b" + "3b310da903401aa62100ab5d1a36553e" + "06203b33890cc9b832f79ef80560ccb9" + "a39ce767967ed628c6ad573cb116dbef" + "efd75499da96bd68a8a97b928a8bbc10" + "3b6621fcde2beca1231d206be6cd9ec7" + "aff6f6c94fcd7204ed3455c68c83f4a4" + "1da4af2b74ef5c53f1d8ac70bdcb7ed1" + "85ce81bd84359d44254d95629e9855a9" + "4a7c1958d1f8ada5d0532ed8a5aa3fb2" + "d17ba70eb6248e594e1a2297acbbb39d" + "502f1a8c6eb6f1ce22b3de1a1f40cc24" + "554119a831a9aad6079cad88425de6bd" + "e1a9187ebb6092cf67bf2b13fd65f270" + "88d78b7e883c8759d2c4f5c65adb7553" + "878ad575f9fad878e80a0c9ba63bcbcc" + "2732e69485bbc9c90bfbd62481d9089b" + "eccf80cfe2df16a2cf65bd92dd597b07" + "07e0917af48bbb75fed413d238f5555a" + "7a569d80c3414a8d0859dc65a46128ba" + "b27af87a71314f318c782b23ebfe808b" + "82b0ce26401d2e22f04d83d1255dc51a" + "ddd3b75a2b1ae0784504df543af8969b" + "e3ea7082ff7fc9888c144da2af58429e" + "c96031dbcad3dad9af0dcbaaaf268cb8" + "fcffead94f3c7ca495e056a9b47acdb7" + "51fb73e666c6c655ade8297297d07ad1" + "ba5e43f1bca32301651339e22904cc8c" + "42f58c30c04aafdb038dda0847dd988d" + "cda6f3bfd15c4b4c4525004aa06eeff8" + "ca61783aacec57fb3d1f92b0fe2fd1a8" + "5f6724517b65e614ad6808d6f6ee34df" + "f7310fdc82aebfd904b01e1dc54b2927" + "094b2db68d6f903b68401adebf5a7e08" + "d78ff4ef5d63653a65040cf9bfd4aca7" + "984a74d37145986780fc0b16ac451649" + "de6188a7dbdf191f64b5fc5e2ab47b57" + "f7f7276cd419c17a3ca8e1b939ae49e4" + "88acba6b965610b5480109c8b17b80e1" + "b7b750dfc7598d5d5011fd2dcc5600a3" + "2ef5b52a1ecc820e308aa342721aac09" + "43bf6686b64b2579376504ccc493d97e" + "6aed3fb0f9cd71a43dd497f01f17c0e2" + "cb3797aa2a2f256656168e6c496afc5f" + "b93246f6b1116398a346f1a641f3b041" + "e989f7914f90cc2c7fff357876e506b5" + "0d334ba77c225bc307ba537152f3f161" + "0e4eafe595f6d9d90d11faa933a15ef1" + "369546868a7f3a45a96768d40fd9d034" + "12c091c6315cf4fde7cb68606937380d" + "b2eaaa707b4c4185c32eddcdd306705e" + "4dc1ffc872eeee475a64dfac86aba41c" + "0618983f8741c5ef68d3a101e8a3b8ca" + "c60c905c15fc910840b94c00a0b9d0", + None, + "", + "0aab4c900501b3e24d7cdf4663326a3a" + "87df5e4843b2cbdb67cbf6e460fec350" + "aa5371b1508f9f4528ecea23c436d94b" + "5e8fcd4f681e30a6ac00a9704a188a03" + ), + # 7.2 Ed25519ctx + ( + "0305334e381af78f141cb666f6199f57" + "bc3495335a256a95bd2a55bf546663f6", + "dfc9425e4f968f7f0c29f0259cf5f9ae" + "d6851c2bb4ad8bfb860cfee0ab248292", + "f726936d19c800494e3fdaff20b276a8", + None, + "666f6f", + "55a4cc2f70a54e04288c5f4cd1e45a7b" + "b520b36292911876cada7323198dd87a" + "8b36950b95130022907a7fb7c4e9b2d5" + "f6cca685a587b4b21f4b888e4e7edb0d" + ), + ( + "0305334e381af78f141cb666f6199f57" + "bc3495335a256a95bd2a55bf546663f6", + "dfc9425e4f968f7f0c29f0259cf5f9ae" + "d6851c2bb4ad8bfb860cfee0ab248292", + "f726936d19c800494e3fdaff20b276a8", + None, + "626172", + "fc60d5872fc46b3aa69f8b5b4351d580" + "8f92bcc044606db097abab6dbcb1aee3" + "216c48e8b3b66431b5b186d1d28f8ee1" + "5a5ca2df6668346291c2043d4eb3e90d" + ), + ( + "0305334e381af78f141cb666f6199f57" + "bc3495335a256a95bd2a55bf546663f6", + "dfc9425e4f968f7f0c29f0259cf5f9ae" + "d6851c2bb4ad8bfb860cfee0ab248292", + "508e9e6882b979fea900f62adceaca35", + None, + "666f6f", + "8b70c1cc8310e1de20ac53ce28ae6e72" + "07f33c3295e03bb5c0732a1d20dc6490" + "8922a8b052cf99b7c4fe107a5abb5b2c" + "4085ae75890d02df26269d8945f84b0b" + ), + ( + "ab9c2853ce297ddab85c993b3ae14bca" + "d39b2c682beabc27d6d4eb20711d6560", + "0f1d1274943b91415889152e893d80e9" + "3275a1fc0b65fd71b4b0dda10ad7d772", + "f726936d19c800494e3fdaff20b276a8", + None, + "666f6f", + "21655b5f1aa965996b3f97b3c849eafb" + "a922a0a62992f73b3d1b73106a84ad85" + "e9b86a7b6005ea868337ff2d20a7f5fb" + "d4cd10b0be49a68da2b2e0dc0ad8960f" + ), + # 7.3 Ed25519ph + ( + "833fe62409237b9d62ec77587520911e" + "9a759cec1d19755b7da901b96dca3d42", + "ec172b93ad5e563bf4932c70e1245034" + "c35467ef2efd4d64ebf819683467e2bf", + "616263", + SHA512, + "", + "98a70222f0b8121aa9d30f813d683f80" + "9e462b469c7ff87639499bb94e6dae41" + "31f85042463c2a355a2003d062adf5aa" + "a10b8c61e636062aaad11c2a26083406" + ), + # 7.4 Ed448 + ( + "6c82a562cb808d10d632be89c8513ebf6c929f34ddfa8c9f63c9960ef6e348a3" + "528c8a3fcc2f044e39a3fc5b94492f8f032e7549a20098f95b", + "5fd7449b59b461fd2ce787ec616ad46a1da1342485a70e1f8a0ea75d80e96778" + "edf124769b46c7061bd6783df1e50f6cd1fa1abeafe8256180", + "", + None, + "", + "533a37f6bbe457251f023c0d88f976ae2dfb504a843e34d2074fd823d41a591f" + "2b233f034f628281f2fd7a22ddd47d7828c59bd0a21bfd3980ff0d2028d4b18a" + "9df63e006c5d1c2d345b925d8dc00b4104852db99ac5c7cdda8530a113a0f4db" + "b61149f05a7363268c71d95808ff2e652600" + ), + ( + "c4eab05d357007c632f3dbb48489924d552b08fe0c353a0d4a1f00acda2c463a" + "fbea67c5e8d2877c5e3bc397a659949ef8021e954e0a12274e", + "43ba28f430cdff456ae531545f7ecd0ac834a55d9358c0372bfa0c6c6798c086" + "6aea01eb00742802b8438ea4cb82169c235160627b4c3a9480", + "03", + None, + "", + "26b8f91727bd62897af15e41eb43c377efb9c610d48f2335cb0bd0087810f435" + "2541b143c4b981b7e18f62de8ccdf633fc1bf037ab7cd779805e0dbcc0aae1cb" + "cee1afb2e027df36bc04dcecbf154336c19f0af7e0a6472905e799f1953d2a0f" + "f3348ab21aa4adafd1d234441cf807c03a00", + ), + ( + "c4eab05d357007c632f3dbb48489924d552b08fe0c353a0d4a1f00acda2c463a" + "fbea67c5e8d2877c5e3bc397a659949ef8021e954e0a12274e", + "43ba28f430cdff456ae531545f7ecd0ac834a55d9358c0372bfa0c6c6798c086" + "6aea01eb00742802b8438ea4cb82169c235160627b4c3a9480", + "03", + None, + "666f6f", + "d4f8f6131770dd46f40867d6fd5d5055de43541f8c5e35abbcd001b32a89f7d2" + "151f7647f11d8ca2ae279fb842d607217fce6e042f6815ea000c85741de5c8da" + "1144a6a1aba7f96de42505d7a7298524fda538fccbbb754f578c1cad10d54d0d" + "5428407e85dcbc98a49155c13764e66c3c00", + ), + ( + "cd23d24f714274e744343237b93290f511f6425f98e64459ff203e8985083ffd" + "f60500553abc0e05cd02184bdb89c4ccd67e187951267eb328", + "dcea9e78f35a1bf3499a831b10b86c90aac01cd84b67a0109b55a36e9328b1e3" + "65fce161d71ce7131a543ea4cb5f7e9f1d8b00696447001400", + "0c3e544074ec63b0265e0c", + None, + "", + "1f0a8888ce25e8d458a21130879b840a9089d999aaba039eaf3e3afa090a09d3" + "89dba82c4ff2ae8ac5cdfb7c55e94d5d961a29fe0109941e00b8dbdeea6d3b05" + "1068df7254c0cdc129cbe62db2dc957dbb47b51fd3f213fb8698f064774250a5" + "028961c9bf8ffd973fe5d5c206492b140e00", + ), + ( + "258cdd4ada32ed9c9ff54e63756ae582fb8fab2ac721f2c8e676a72768513d93" + "9f63dddb55609133f29adf86ec9929dccb52c1c5fd2ff7e21b", + "3ba16da0c6f2cc1f30187740756f5e798d6bc5fc015d7c63cc9510ee3fd44adc" + "24d8e968b6e46e6f94d19b945361726bd75e149ef09817f580", + "64a65f3cdedcdd66811e2915", + None, + "", + "7eeeab7c4e50fb799b418ee5e3197ff6bf15d43a14c34389b59dd1a7b1b85b4a" + "e90438aca634bea45e3a2695f1270f07fdcdf7c62b8efeaf00b45c2c96ba457e" + "b1a8bf075a3db28e5c24f6b923ed4ad747c3c9e03c7079efb87cb110d3a99861" + "e72003cbae6d6b8b827e4e6c143064ff3c00", + ), + ( + "7ef4e84544236752fbb56b8f31a23a10e42814f5f55ca037cdcc11c64c9a3b29" + "49c1bb60700314611732a6c2fea98eebc0266a11a93970100e", + "b3da079b0aa493a5772029f0467baebee5a8112d9d3a22532361da294f7bb381" + "5c5dc59e176b4d9f381ca0938e13c6c07b174be65dfa578e80", + "64a65f3cdedcdd66811e2915e7", + None, + "", + "6a12066f55331b6c22acd5d5bfc5d71228fbda80ae8dec26bdd306743c5027cb" + "4890810c162c027468675ecf645a83176c0d7323a2ccde2d80efe5a1268e8aca" + "1d6fbc194d3f77c44986eb4ab4177919ad8bec33eb47bbb5fc6e28196fd1caf5" + "6b4e7e0ba5519234d047155ac727a1053100", + ), + ( + "d65df341ad13e008567688baedda8e9dcdc17dc024974ea5b4227b6530e339bf" + "f21f99e68ca6968f3cca6dfe0fb9f4fab4fa135d5542ea3f01", + "df9705f58edbab802c7f8363cfe5560ab1c6132c20a9f1dd163483a26f8ac53a" + "39d6808bf4a1dfbd261b099bb03b3fb50906cb28bd8a081f00", + "bd0f6a3747cd561bdddf4640a332461a4a30a12a434cd0bf40d766d9c6d458e5" + "512204a30c17d1f50b5079631f64eb3112182da3005835461113718d1a5ef944", + None, + "", + "554bc2480860b49eab8532d2a533b7d578ef473eeb58c98bb2d0e1ce488a98b1" + "8dfde9b9b90775e67f47d4a1c3482058efc9f40d2ca033a0801b63d45b3b722e" + "f552bad3b4ccb667da350192b61c508cf7b6b5adadc2c8d9a446ef003fb05cba" + "5f30e88e36ec2703b349ca229c2670833900", + ), + ( + "2ec5fe3c17045abdb136a5e6a913e32ab75ae68b53d2fc149b77e504132d3756" + "9b7e766ba74a19bd6162343a21c8590aa9cebca9014c636df5", + "79756f014dcfe2079f5dd9e718be4171e2ef2486a08f25186f6bff43a9936b9b" + "fe12402b08ae65798a3d81e22e9ec80e7690862ef3d4ed3a00", + "15777532b0bdd0d1389f636c5f6b9ba734c90af572877e2d272dd078aa1e567c" + "fa80e12928bb542330e8409f3174504107ecd5efac61ae7504dabe2a602ede89" + "e5cca6257a7c77e27a702b3ae39fc769fc54f2395ae6a1178cab4738e543072f" + "c1c177fe71e92e25bf03e4ecb72f47b64d0465aaea4c7fad372536c8ba516a60" + "39c3c2a39f0e4d832be432dfa9a706a6e5c7e19f397964ca4258002f7c0541b5" + "90316dbc5622b6b2a6fe7a4abffd96105eca76ea7b98816af0748c10df048ce0" + "12d901015a51f189f3888145c03650aa23ce894c3bd889e030d565071c59f409" + "a9981b51878fd6fc110624dcbcde0bf7a69ccce38fabdf86f3bef6044819de11", + None, + "", + "c650ddbb0601c19ca11439e1640dd931f43c518ea5bea70d3dcde5f4191fe53f" + "00cf966546b72bcc7d58be2b9badef28743954e3a44a23f880e8d4f1cfce2d7a" + "61452d26da05896f0a50da66a239a8a188b6d825b3305ad77b73fbac0836ecc6" + "0987fd08527c1a8e80d5823e65cafe2a3d00", + ), + ( + "872d093780f5d3730df7c212664b37b8a0f24f56810daa8382cd4fa3f77634ec" + "44dc54f1c2ed9bea86fafb7632d8be199ea165f5ad55dd9ce8", + "a81b2e8a70a5ac94ffdbcc9badfc3feb0801f258578bb114ad44ece1ec0e799d" + "a08effb81c5d685c0c56f64eecaef8cdf11cc38737838cf400", + "6ddf802e1aae4986935f7f981ba3f0351d6273c0a0c22c9c0e8339168e675412" + "a3debfaf435ed651558007db4384b650fcc07e3b586a27a4f7a00ac8a6fec2cd" + "86ae4bf1570c41e6a40c931db27b2faa15a8cedd52cff7362c4e6e23daec0fbc" + "3a79b6806e316efcc7b68119bf46bc76a26067a53f296dafdbdc11c77f7777e9" + "72660cf4b6a9b369a6665f02e0cc9b6edfad136b4fabe723d2813db3136cfde9" + "b6d044322fee2947952e031b73ab5c603349b307bdc27bc6cb8b8bbd7bd32321" + "9b8033a581b59eadebb09b3c4f3d2277d4f0343624acc817804728b25ab79717" + "2b4c5c21a22f9c7839d64300232eb66e53f31c723fa37fe387c7d3e50bdf9813" + "a30e5bb12cf4cd930c40cfb4e1fc622592a49588794494d56d24ea4b40c89fc0" + "596cc9ebb961c8cb10adde976a5d602b1c3f85b9b9a001ed3c6a4d3b1437f520" + "96cd1956d042a597d561a596ecd3d1735a8d570ea0ec27225a2c4aaff26306d1" + "526c1af3ca6d9cf5a2c98f47e1c46db9a33234cfd4d81f2c98538a09ebe76998" + "d0d8fd25997c7d255c6d66ece6fa56f11144950f027795e653008f4bd7ca2dee" + "85d8e90f3dc315130ce2a00375a318c7c3d97be2c8ce5b6db41a6254ff264fa6" + "155baee3b0773c0f497c573f19bb4f4240281f0b1f4f7be857a4e59d416c06b4" + "c50fa09e1810ddc6b1467baeac5a3668d11b6ecaa901440016f389f80acc4db9" + "77025e7f5924388c7e340a732e554440e76570f8dd71b7d640b3450d1fd5f041" + "0a18f9a3494f707c717b79b4bf75c98400b096b21653b5d217cf3565c9597456" + "f70703497a078763829bc01bb1cbc8fa04eadc9a6e3f6699587a9e75c94e5bab" + "0036e0b2e711392cff0047d0d6b05bd2a588bc109718954259f1d86678a579a3" + "120f19cfb2963f177aeb70f2d4844826262e51b80271272068ef5b3856fa8535" + "aa2a88b2d41f2a0e2fda7624c2850272ac4a2f561f8f2f7a318bfd5caf969614" + "9e4ac824ad3460538fdc25421beec2cc6818162d06bbed0c40a387192349db67" + "a118bada6cd5ab0140ee273204f628aad1c135f770279a651e24d8c14d75a605" + "9d76b96a6fd857def5e0b354b27ab937a5815d16b5fae407ff18222c6d1ed263" + "be68c95f32d908bd895cd76207ae726487567f9a67dad79abec316f683b17f2d" + "02bf07e0ac8b5bc6162cf94697b3c27cd1fea49b27f23ba2901871962506520c" + "392da8b6ad0d99f7013fbc06c2c17a569500c8a7696481c1cd33e9b14e40b82e" + "79a5f5db82571ba97bae3ad3e0479515bb0e2b0f3bfcd1fd33034efc6245eddd" + "7ee2086ddae2600d8ca73e214e8c2b0bdb2b047c6a464a562ed77b73d2d841c4" + "b34973551257713b753632efba348169abc90a68f42611a40126d7cb21b58695" + "568186f7e569d2ff0f9e745d0487dd2eb997cafc5abf9dd102e62ff66cba87", + None, + "", + "e301345a41a39a4d72fff8df69c98075a0cc082b802fc9b2b6bc503f926b65bd" + "df7f4c8f1cb49f6396afc8a70abe6d8aef0db478d4c6b2970076c6a0484fe76d" + "76b3a97625d79f1ce240e7c576750d295528286f719b413de9ada3e8eb78ed57" + "3603ce30d8bb761785dc30dbc320869e1a00" + ), + # 7.5 Ed448ph + ( + "833fe62409237b9d62ec77587520911e9a759cec1d19755b7da901b96dca3d42" + "ef7822e0d5104127dc05d6dbefde69e3ab2cec7c867c6e2c49", + "259b71c19f83ef77a7abd26524cbdb3161b590a48f7d17de3ee0ba9c52beb743" + "c09428a131d6b1b57303d90d8132c276d5ed3d5d01c0f53880", + "616263", + SHAKE256, + "", + "822f6901f7480f3d5f562c592994d9693602875614483256505600bbc281ae38" + "1f54d6bce2ea911574932f52a4e6cadd78769375ec3ffd1b801a0d9b3f4030cd" + "433964b6457ea39476511214f97469b57dd32dbc560a9a94d00bff07620464a3" + "ad203df7dc7ce360c3cd3696d9d9fab90f00" + ), + ( + "833fe62409237b9d62ec77587520911e9a759cec1d19755b7da901b96dca3d42" + "ef7822e0d5104127dc05d6dbefde69e3ab2cec7c867c6e2c49", + "259b71c19f83ef77a7abd26524cbdb3161b590a48f7d17de3ee0ba9c52beb743" + "c09428a131d6b1b57303d90d8132c276d5ed3d5d01c0f53880", + "616263", + SHAKE256, + "666f6f", + "c32299d46ec8ff02b54540982814dce9a05812f81962b649d528095916a2aa48" + "1065b1580423ef927ecf0af5888f90da0f6a9a85ad5dc3f280d91224ba9911a3" + "653d00e484e2ce232521481c8658df304bb7745a73514cdb9bf3e15784ab7128" + "4f8d0704a608c54a6b62d97beb511d132100", + ), +) + + +rfc8032_tv_bytes = [] +for tv_str in rfc8032_tv_str: + rfc8032_tv_bytes.append([unhexlify(i) if isinstance(i, str) else i for i in tv_str]) + + +class TestEdDSA(unittest.TestCase): + + def test_sign(self): + for sk, _, msg, hashmod, ctx, exp_signature in rfc8032_tv_bytes: + key = eddsa.import_private_key(sk) + signer = eddsa.new(key, 'rfc8032', context=ctx) + if hashmod is None: + # PureEdDSA + signature = signer.sign(msg) + else: + # HashEdDSA + hashobj = hashmod.new(msg) + signature = signer.sign(hashobj) + self.assertEqual(exp_signature, signature) + + def test_verify(self): + for _, pk, msg, hashmod, ctx, exp_signature in rfc8032_tv_bytes: + key = eddsa.import_public_key(pk) + verifier = eddsa.new(key, 'rfc8032', context=ctx) + if hashmod is None: + # PureEdDSA + verifier.verify(msg, exp_signature) + else: + # HashEdDSA + hashobj = hashmod.new(msg) + verifier.verify(hashobj, exp_signature) + + def test_negative(self): + key = ECC.generate(curve="ed25519") + self.assertRaises(ValueError, eddsa.new, key, 'rfc9999') + + nist_key = ECC.generate(curve="p256") + self.assertRaises(ValueError, eddsa.new, nist_key, 'rfc8032') + + +class TestExport_Ed25519(unittest.TestCase): + + def test_raw(self): + key = ECC.generate(curve="Ed25519") + x, y = key.pointQ.xy + raw = bytearray(key._export_eddsa()) + sign_x = raw[31] >> 7 + raw[31] &= 0x7F + yt = bytes_to_long(raw[::-1]) + self.assertEqual(y, yt) + self.assertEqual(x & 1, sign_x) + + key = ECC.construct(point_x=0, point_y=1, curve="Ed25519") + out = key._export_eddsa() + self.assertEqual(b'\x01' + b'\x00' * 31, out) + + +class TestExport_Ed448(unittest.TestCase): + + def test_raw(self): + key = ECC.generate(curve="Ed448") + x, y = key.pointQ.xy + raw = bytearray(key._export_eddsa()) + sign_x = raw[56] >> 7 + raw[56] &= 0x7F + yt = bytes_to_long(raw[::-1]) + self.assertEqual(y, yt) + self.assertEqual(x & 1, sign_x) + + key = ECC.construct(point_x=0, point_y=1, curve="Ed448") + out = key._export_eddsa() + self.assertEqual(b'\x01' + b'\x00' * 56, out) + + +class TestImport_Ed25519(unittest.TestCase): + + def test_raw(self): + Px = 24407857220263921307776619664228778204996144802740950419837658238229122415920 + Py = 56480760040633817885061096979765646085062883740629155052073094891081309750690 + encoded = b'\xa2\x05\xd6\x00\xe1 \xe1\xc0\xff\x96\xee?V\x8e\xba/\xd3\x89\x06\xd7\xc4c\xe8$\xc2d\xd7a1\xfa\xde|' + key = eddsa.import_public_key(encoded) + self.assertEqual(Py, key.pointQ.y) + self.assertEqual(Px, key.pointQ.x) + + encoded = b'\x01' + b'\x00' * 31 + key = eddsa.import_public_key(encoded) + self.assertEqual(1, key.pointQ.y) + self.assertEqual(0, key.pointQ.x) + + +class TestImport_Ed448(unittest.TestCase): + + def test_raw(self): + Px = 0x153f42025aba3b0daecaa5cd79458b3146c7c9378c16c17b4a59bc3561113d90c169045bc12966c3f93e140c2ca0a3acc33d9205b9daf9b1 + Py = 0x38f5c0015d3dedd576c232810dd90373b5b1d631a12894c043b7be529cbae03ede177d8fa490b56131dbcb2465d2aba777ef839fc1719b25 + encoded = unhexlify("259b71c19f83ef77a7abd26524cbdb31" + "61b590a48f7d17de3ee0ba9c52beb743" + "c09428a131d6b1b57303d90d8132c276" + "d5ed3d5d01c0f53880") + key = eddsa.import_public_key(encoded) + self.assertEqual(Py, key.pointQ.y) + self.assertEqual(Px, key.pointQ.x) + + encoded = b'\x01' + b'\x00' * 56 + key = eddsa.import_public_key(encoded) + self.assertEqual(1, key.pointQ.y) + self.assertEqual(0, key.pointQ.x) + + +class TestVectorsEdDSAWycheproof(unittest.TestCase): + + def add_tests(self, filename): + + def pk(group): + elem = group['key']['pk'] + return unhexlify(elem) + + def sk(group): + elem = group['key']['sk'] + return unhexlify(elem) + + result = load_test_vectors_wycheproof(("Signature", "wycheproof"), + filename, + "Wycheproof ECDSA signature (%s)" + % filename, + group_tag={'pk': pk, 'sk': sk}) + self.tv += result + + def setUp(self): + self.tv = [] + self.add_tests("eddsa_test.json") + self.add_tests("ed448_test.json") + + def test_sign(self, tv): + if not tv.valid: + return + + self._id = "Wycheproof EdDSA Sign Test #%d (%s, %s)" % (tv.id, tv.comment, tv.filename) + key = eddsa.import_private_key(tv.sk) + signer = eddsa.new(key, 'rfc8032') + signature = signer.sign(tv.msg) + self.assertEqual(signature, tv.sig) + + def test_verify(self, tv): + self._id = "Wycheproof EdDSA Verify Test #%d (%s, %s)" % (tv.id, tv.comment, tv.filename) + key = eddsa.import_public_key(tv.pk) + verifier = eddsa.new(key, 'rfc8032') + try: + verifier.verify(tv.msg, tv.sig) + except ValueError: + assert not tv.valid + else: + assert tv.valid + + def runTest(self): + for tv in self.tv: + self.test_sign(tv) + self.test_verify(tv) + + +def get_tests(config={}): + + tests = [] + tests += list_test_cases(TestExport_Ed25519) + tests += list_test_cases(TestExport_Ed448) + tests += list_test_cases(TestImport_Ed25519) + tests += list_test_cases(TestImport_Ed448) + tests += list_test_cases(TestEdDSA) + tests += [TestVectorsEdDSAWycheproof()] + return tests + + +if __name__ == '__main__': + def suite(): + return unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Signature/test_pkcs1_15.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Signature/test_pkcs1_15.py new file mode 100644 index 000000000..8e2c6ee61 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Signature/test_pkcs1_15.py @@ -0,0 +1,348 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import json +import unittest +from binascii import unhexlify + +from Crypto.Util.py3compat import bchr +from Crypto.Util.number import bytes_to_long +from Crypto.Util.strxor import strxor +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors, load_test_vectors_wycheproof + +from Crypto.Hash import (SHA1, SHA224, SHA256, SHA384, SHA512, SHA3_384, + SHA3_224, SHA3_256, SHA3_512) +from Crypto.PublicKey import RSA +from Crypto.Signature import pkcs1_15 +from Crypto.Signature import PKCS1_v1_5 + +from Crypto.Util._file_system import pycryptodome_filename +from Crypto.Util.strxor import strxor + + +def load_hash_by_name(hash_name): + return __import__("Crypto.Hash." + hash_name, globals(), locals(), ["new"]) + + +class FIPS_PKCS1_Verify_Tests(unittest.TestCase): + + def shortDescription(self): + return "FIPS PKCS1 Tests (Verify)" + + def test_can_sign(self): + test_public_key = RSA.generate(1024).public_key() + verifier = pkcs1_15.new(test_public_key) + self.assertEqual(verifier.can_sign(), False) + + +class FIPS_PKCS1_Verify_Tests_KAT(unittest.TestCase): + pass + + +test_vectors_verify = load_test_vectors(("Signature", "PKCS1-v1.5"), + "SigVer15_186-3.rsp", + "Signature Verification 186-3", + {'shaalg': lambda x: x, + 'd': lambda x: int(x), + 'result': lambda x: x}) or [] + + +for count, tv in enumerate(test_vectors_verify): + if isinstance(tv, str): + continue + if hasattr(tv, "n"): + modulus = tv.n + continue + + hash_module = load_hash_by_name(tv.shaalg.upper()) + hash_obj = hash_module.new(tv.msg) + public_key = RSA.construct([bytes_to_long(x) for x in (modulus, tv.e)]) # type: ignore + verifier = pkcs1_15.new(public_key) + + def positive_test(self, hash_obj=hash_obj, verifier=verifier, signature=tv.s): + verifier.verify(hash_obj, signature) + + def negative_test(self, hash_obj=hash_obj, verifier=verifier, signature=tv.s): + self.assertRaises(ValueError, verifier.verify, hash_obj, signature) + + if tv.result == 'f': + setattr(FIPS_PKCS1_Verify_Tests_KAT, "test_negative_%d" % count, negative_test) + else: + setattr(FIPS_PKCS1_Verify_Tests_KAT, "test_positive_%d" % count, positive_test) + + +class FIPS_PKCS1_Sign_Tests(unittest.TestCase): + + def shortDescription(self): + return "FIPS PKCS1 Tests (Sign)" + + def test_can_sign(self): + test_private_key = RSA.generate(1024) + signer = pkcs1_15.new(test_private_key) + self.assertEqual(signer.can_sign(), True) + + +class FIPS_PKCS1_Sign_Tests_KAT(unittest.TestCase): + pass + + +test_vectors_sign = load_test_vectors(("Signature", "PKCS1-v1.5"), + "SigGen15_186-2.txt", + "Signature Generation 186-2", + {'shaalg': lambda x: x}) or [] + +test_vectors_sign += load_test_vectors(("Signature", "PKCS1-v1.5"), + "SigGen15_186-3.txt", + "Signature Generation 186-3", + {'shaalg': lambda x: x}) or [] + +for count, tv in enumerate(test_vectors_sign): + if isinstance(tv, str): + continue + if hasattr(tv, "n"): + modulus = tv.n + continue + if hasattr(tv, "e"): + private_key = RSA.construct([bytes_to_long(x) for x in (modulus, tv.e, tv.d)]) # type: ignore + signer = pkcs1_15.new(private_key) + continue + + hash_module = load_hash_by_name(tv.shaalg.upper()) + hash_obj = hash_module.new(tv.msg) + + def new_test(self, hash_obj=hash_obj, signer=signer, result=tv.s): + signature = signer.sign(hash_obj) + self.assertEqual(signature, result) + + setattr(FIPS_PKCS1_Sign_Tests_KAT, "test_%d" % count, new_test) + + +class PKCS1_15_NoParams(unittest.TestCase): + """Verify that PKCS#1 v1.5 signatures pass even without NULL parameters in + the algorithm identifier (PyCrypto/LP bug #1119552).""" + + rsakey = """-----BEGIN RSA PRIVATE KEY----- + MIIBOwIBAAJBAL8eJ5AKoIsjURpcEoGubZMxLD7+kT+TLr7UkvEtFrRhDDKMtuII + q19FrL4pUIMymPMSLBn3hJLe30Dw48GQM4UCAwEAAQJACUSDEp8RTe32ftq8IwG8 + Wojl5mAd1wFiIOrZ/Uv8b963WJOJiuQcVN29vxU5+My9GPZ7RA3hrDBEAoHUDPrI + OQIhAPIPLz4dphiD9imAkivY31Rc5AfHJiQRA7XixTcjEkojAiEAyh/pJHks/Mlr + +rdPNEpotBjfV4M4BkgGAA/ipcmaAjcCIQCHvhwwKVBLzzTscT2HeUdEeBMoiXXK + JACAr3sJQJGxIQIgarRp+m1WSKV1MciwMaTOnbU7wxFs9DP1pva76lYBzgUCIQC9 + n0CnZCJ6IZYqSt0H5N7+Q+2Ro64nuwV/OSQfM6sBwQ== + -----END RSA PRIVATE KEY-----""" + + msg = b"This is a test\x0a" + + # PKCS1 v1.5 signature of the message computed using SHA-1. + # The digestAlgorithm SEQUENCE does NOT contain the NULL parameter. + sig_str = "a287a13517f716e72fb14eea8e33a8db4a4643314607e7ca3e3e28"\ + "1893db74013dda8b855fd99f6fecedcb25fcb7a434f35cd0a101f8"\ + "b19348e0bd7b6f152dfc" + signature = unhexlify(sig_str) + + def runTest(self): + verifier = pkcs1_15.new(RSA.importKey(self.rsakey)) + hashed = SHA1.new(self.msg) + verifier.verify(hashed, self.signature) + + +class PKCS1_Legacy_Module_Tests(unittest.TestCase): + """Verify that the legacy module Crypto.Signature.PKCS1_v1_5 + behaves as expected. The only difference is that the verify() + method returns True/False and does not raise exceptions.""" + + def shortDescription(self): + return "Test legacy Crypto.Signature.PKCS1_v1_5" + + def runTest(self): + key = RSA.importKey(PKCS1_15_NoParams.rsakey) + hashed = SHA1.new(b"Test") + good_signature = PKCS1_v1_5.new(key).sign(hashed) + verifier = PKCS1_v1_5.new(key.public_key()) + + self.assertEqual(verifier.verify(hashed, good_signature), True) + + # Flip a few bits in the signature + bad_signature = strxor(good_signature, bchr(1) * len(good_signature)) + self.assertEqual(verifier.verify(hashed, bad_signature), False) + + +class PKCS1_All_Hashes_Tests(unittest.TestCase): + + def shortDescription(self): + return "Test PKCS#1v1.5 signature in combination with all hashes" + + def runTest(self): + + key = RSA.generate(1024) + signer = pkcs1_15.new(key) + hash_names = ("MD2", "MD4", "MD5", "RIPEMD160", "SHA1", + "SHA224", "SHA256", "SHA384", "SHA512", + "SHA3_224", "SHA3_256", "SHA3_384", "SHA3_512") + + for name in hash_names: + hashed = load_hash_by_name(name).new(b"Test") + signer.sign(hashed) + + from Crypto.Hash import BLAKE2b, BLAKE2s + for hash_size in (20, 32, 48, 64): + hashed_b = BLAKE2b.new(digest_bytes=hash_size, data=b"Test") + signer.sign(hashed_b) + for hash_size in (16, 20, 28, 32): + hashed_s = BLAKE2s.new(digest_bytes=hash_size, data=b"Test") + signer.sign(hashed_s) + + +class TestVectorsWycheproof(unittest.TestCase): + + def __init__(self, wycheproof_warnings): + unittest.TestCase.__init__(self) + self._wycheproof_warnings = wycheproof_warnings + self._id = "None" + + def setUp(self): + self.tv = [] + self.add_tests("rsa_sig_gen_misc_test.json") + self.add_tests("rsa_signature_2048_sha224_test.json") + self.add_tests("rsa_signature_2048_sha256_test.json") + self.add_tests("rsa_signature_2048_sha384_test.json") + self.add_tests("rsa_signature_2048_sha3_224_test.json") + self.add_tests("rsa_signature_2048_sha3_256_test.json") + self.add_tests("rsa_signature_2048_sha3_384_test.json") + self.add_tests("rsa_signature_2048_sha3_512_test.json") + self.add_tests("rsa_signature_2048_sha512_test.json") + self.add_tests("rsa_signature_2048_sha512_224_test.json") + self.add_tests("rsa_signature_2048_sha512_256_test.json") + self.add_tests("rsa_signature_3072_sha256_test.json") + self.add_tests("rsa_signature_3072_sha384_test.json") + self.add_tests("rsa_signature_3072_sha3_256_test.json") + self.add_tests("rsa_signature_3072_sha3_384_test.json") + self.add_tests("rsa_signature_3072_sha3_512_test.json") + self.add_tests("rsa_signature_3072_sha512_test.json") + self.add_tests("rsa_signature_3072_sha512_256_test.json") + self.add_tests("rsa_signature_4096_sha384_test.json") + self.add_tests("rsa_signature_4096_sha512_test.json") + self.add_tests("rsa_signature_4096_sha512_256_test.json") + self.add_tests("rsa_signature_test.json") + + def add_tests(self, filename): + + def filter_rsa(group): + return RSA.import_key(group['keyPem']) + + def filter_sha(group): + hash_name = group['sha'] + if hash_name == "SHA-512": + return SHA512 + elif hash_name == "SHA-512/224": + return SHA512.new(truncate="224") + elif hash_name == "SHA-512/256": + return SHA512.new(truncate="256") + elif hash_name == "SHA3-512": + return SHA3_512 + elif hash_name == "SHA-384": + return SHA384 + elif hash_name == "SHA3-384": + return SHA3_384 + elif hash_name == "SHA-256": + return SHA256 + elif hash_name == "SHA3-256": + return SHA3_256 + elif hash_name == "SHA-224": + return SHA224 + elif hash_name == "SHA3-224": + return SHA3_224 + elif hash_name == "SHA-1": + return SHA1 + else: + raise ValueError("Unknown hash algorithm: " + hash_name) + + def filter_type(group): + type_name = group['type'] + if type_name not in ("RsassaPkcs1Verify", "RsassaPkcs1Generate"): + raise ValueError("Unknown type name " + type_name) + + result = load_test_vectors_wycheproof(("Signature", "wycheproof"), + filename, + "Wycheproof PKCS#1v1.5 signature (%s)" % filename, + group_tag={'rsa_key': filter_rsa, + 'hash_mod': filter_sha, + 'type': filter_type}) + return result + + def shortDescription(self): + return self._id + + def warn(self, tv): + if tv.warning and self._wycheproof_warnings: + import warnings + warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) + + def test_verify(self, tv): + self._id = "Wycheproof RSA PKCS$#1 Test #" + str(tv.id) + + hashed_msg = tv.hash_module.new(tv.msg) + signer = pkcs1_15.new(tv.key) + try: + signature = signer.verify(hashed_msg, tv.sig) + except ValueError as e: + if tv.warning: + return + assert not tv.valid + else: + assert tv.valid + self.warn(tv) + + def runTest(self): + for tv in self.tv: + self.test_verify(tv) + + +def get_tests(config={}): + wycheproof_warnings = config.get('wycheproof_warnings') + + tests = [] + tests += list_test_cases(FIPS_PKCS1_Verify_Tests) + tests += list_test_cases(FIPS_PKCS1_Sign_Tests) + tests += list_test_cases(PKCS1_15_NoParams) + tests += list_test_cases(PKCS1_Legacy_Module_Tests) + tests += list_test_cases(PKCS1_All_Hashes_Tests) + tests += [ TestVectorsWycheproof(wycheproof_warnings) ] + + if config.get('slow_tests'): + tests += list_test_cases(FIPS_PKCS1_Verify_Tests_KAT) + tests += list_test_cases(FIPS_PKCS1_Sign_Tests_KAT) + + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Signature/test_pss.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Signature/test_pss.py new file mode 100644 index 000000000..535474bec --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Signature/test_pss.py @@ -0,0 +1,377 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest + +from Crypto.Util.py3compat import b, bchr +from Crypto.Util.number import bytes_to_long +from Crypto.Util.strxor import strxor +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.SelfTest.loader import load_test_vectors, load_test_vectors_wycheproof + +from Crypto.Hash import SHA1, SHA224, SHA256, SHA384, SHA512 +from Crypto.PublicKey import RSA +from Crypto.Signature import pss +from Crypto.Signature import PKCS1_PSS + +from Crypto.Signature.pss import MGF1 + + +def load_hash_by_name(hash_name): + return __import__("Crypto.Hash." + hash_name, globals(), locals(), ["new"]) + + +class PRNG(object): + + def __init__(self, stream): + self.stream = stream + self.idx = 0 + + def __call__(self, rnd_size): + result = self.stream[self.idx:self.idx + rnd_size] + self.idx += rnd_size + return result + + +class PSS_Tests(unittest.TestCase): + + rsa_key = b'-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEAsvI34FgiTK8+txBvmooNGpNwk23YTU51dwNZi5yha3W4lA/Q\nvcZrDalkmD7ekWQwnduxVKa6pRSI13KBgeUOIqJoGXSWhntEtY3FEwvWOHW5AE7Q\njUzTzCiYT6TVaCcpa/7YLai+p6ai2g5f5Zfh4jSawa9uYeuggFygQq4IVW796MgV\nyqxYMM/arEj+/sKz3Viua9Rp9fFosertCYCX4DUTgW0mX9bwEnEOgjSI3pLOPXz1\n8vx+DRZS5wMCmwCUa0sKonLn3cAUPq+sGix7+eo7T0Z12MU8ud7IYVX/75r3cXiF\nPaYE2q8Le0kgOApIXbb+x74x0rNgyIh1yGygkwIDAQABAoIBABz4t1A0pLT6qHI2\nEIOaNz3mwhK0dZEqkz0GB1Dhtoax5ATgvKCFB98J3lYB08IBURe1snOsnMpOVUtg\naBRSM+QqnCUG6bnzKjAkuFP5liDE+oNQv1YpKp9CsUovuzdmI8Au3ewihl+ZTIN2\nUVNYMEOR1b5m+z2SSwWNOYsiJwpBrT7zkpdlDyjat7FiiPhMMIMXjhQFVxURMIcB\njUBtPzGvV/PG90cVDWi1wRGeeP1dDqti/jsnvykQ15KW1MqGrpeNKRmDdTy/Ucl1\nWIoYklKw3U456lgZ/rDTDB818+Tlnk35z4yF7d5ANPM8CKfqOPcnO1BCKVFzf4eq\n54wvUtkCgYEA1Zv2lp06l7rXMsvNtyYQjbFChezRDRnPwZmN4NCdRtTgGG1G0Ryd\nYz6WWoPGqZp0b4LAaaHd3W2GTcpXF8WXMKfMX1W+tMAxMozfsXRKMcHoypwuS5wT\nfJRXJCG4pvd57AB0iVUEJW2we+uGKU5Zxcx//id2nXGCpoRyViIplQsCgYEA1nVC\neHupHChht0Fh4N09cGqZHZzuwXjOUMzR3Vsfz+4WzVS3NvIgN4g5YgmQFOeKwo5y\niRq5yvubcNdFvf85eHWClg0zPAyxJCVUWigCrrOanGEhJo6re4idJvNVzu4Ucg0v\n6B3SJ1HsCda+ZSNz24bSyqRep8A+RoAaoVSFx5kCgYEAn3RvXPs9s+obnqWYiPF3\nRe5etE6Vt2vfNKwFxx6zaR6bsmBQjuUHcABWiHb6I71S0bMPI0tbrWGG8ibrYKl1\nNTLtUvVVCOS3VP7oNTWT9RTFTAnOXU7DFSo+6o/poWn3r36ff6zhDXeWWMr2OXtt\ndEQ1/2lCGEGVv+v61eVmmQUCgYABFHITPTwqwiFL1O5zPWnzyPWgaovhOYSAb6eW\n38CXQXGn8wdBJZL39J2lWrr4//l45VK6UgIhfYbY2JynSkO10ZGow8RARygVMILu\nOUlaK9lZdDvAf/NpGdUAvzTtZ9F+iYZ2OsA2JnlzyzsGM1l//3vMPWukmJk3ral0\nqoJJ8QKBgGRG3eVHnIegBbFVuMDp2NTcfuSuDVUQ1fGAwtPiFa8u81IodJnMk2pq\niXu2+0ytNA/M+SVrAnE2AgIzcaJbtr0p2srkuVM7KMWnG1vWFNjtXN8fAhf/joOv\nD+NmPL/N4uE57e40tbiU/H7KdyZaDt+5QiTmdhuyAe6CBjKsF2jy\n-----END RSA PRIVATE KEY-----' + msg = b'AAA' + tag = b'\x00[c5\xd8\xb0\x8b!D\x81\x83\x07\xc0\xdd\xb9\xb4\xb2`\x92\xe7\x02\xf1\xe1P\xea\xc3\xf0\xe3>\xddX5\xdd\x8e\xc5\x89\xef\xf3\xc2\xdc\xfeP\x02\x7f\x12+\xc9\xaf\xbb\xec\xfe\xb0\xa5\xb9\x08\x11P\x8fL\xee5\x9b\xb0k{=_\xd2\x14\xfb\x01R\xb7\xfe\x14}b\x03\x8d5Y\x89~}\xfc\xf2l\xd01-\xbd\xeb\x11\xcdV\x11\xe9l\x19k/o5\xa2\x0f\x15\xe7Q$\t=\xec\x1dAB\x19\xa5P\x9a\xaf\xa3G\x86"\xd6~\xf0j\xfcqkbs\x13\x84b\xe4\xbdm(\xed`\xa4F\xfb\x8f.\xe1\x8c)/_\x9eS\x98\xa4v\xb8\xdc\xfe\xf7/D\x18\x19\xb3T\x97:\xe2\x96s\xe8<\xa2\xb4\xb9\xf8/' + + def test_positive_1(self): + key = RSA.import_key(self.rsa_key) + h = SHA256.new(self.msg) + verifier = pss.new(key) + verifier.verify(h, self.tag) + + def test_negative_1(self): + key = RSA.import_key(self.rsa_key) + h = SHA256.new(self.msg + b'A') + verifier = pss.new(key) + tag = bytearray(self.tag) + self.assertRaises(ValueError, verifier.verify, h, tag) + + def test_negative_2(self): + key = RSA.import_key(self.rsa_key) + h = SHA256.new(self.msg) + verifier = pss.new(key, salt_bytes=1000) + tag = bytearray(self.tag) + self.assertRaises(ValueError, verifier.verify, h, tag) + + +class FIPS_PKCS1_Verify_Tests(unittest.TestCase): + + def shortDescription(self): + return "FIPS PKCS1 Tests (Verify)" + + def verify_positive(self, hashmod, message, public_key, salt, signature): + prng = PRNG(salt) + hashed = hashmod.new(message) + verifier = pss.new(public_key, salt_bytes=len(salt), rand_func=prng) + verifier.verify(hashed, signature) + + def verify_negative(self, hashmod, message, public_key, salt, signature): + prng = PRNG(salt) + hashed = hashmod.new(message) + verifier = pss.new(public_key, salt_bytes=len(salt), rand_func=prng) + self.assertRaises(ValueError, verifier.verify, hashed, signature) + + def test_can_sign(self): + test_public_key = RSA.generate(1024).public_key() + verifier = pss.new(test_public_key) + self.assertEqual(verifier.can_sign(), False) + + +class FIPS_PKCS1_Verify_Tests_KAT(unittest.TestCase): + pass + + +test_vectors_verify = load_test_vectors(("Signature", "PKCS1-PSS"), + "SigVerPSS_186-3.rsp", + "Signature Verification 186-3", + {'shaalg': lambda x: x, + 'result': lambda x: x}) or [] + + +for count, tv in enumerate(test_vectors_verify): + if isinstance(tv, str): + continue + if hasattr(tv, "n"): + modulus = tv.n + continue + if hasattr(tv, "p"): + continue + + hash_module = load_hash_by_name(tv.shaalg.upper()) + hash_obj = hash_module.new(tv.msg) + public_key = RSA.construct([bytes_to_long(x) for x in (modulus, tv.e)]) # type: ignore + if tv.saltval != b("\x00"): + prng = PRNG(tv.saltval) + verifier = pss.new(public_key, salt_bytes=len(tv.saltval), rand_func=prng) + else: + verifier = pss.new(public_key, salt_bytes=0) + + def positive_test(self, hash_obj=hash_obj, verifier=verifier, signature=tv.s): + verifier.verify(hash_obj, signature) + + def negative_test(self, hash_obj=hash_obj, verifier=verifier, signature=tv.s): + self.assertRaises(ValueError, verifier.verify, hash_obj, signature) + + if tv.result == 'p': + setattr(FIPS_PKCS1_Verify_Tests_KAT, "test_positive_%d" % count, positive_test) + else: + setattr(FIPS_PKCS1_Verify_Tests_KAT, "test_negative_%d" % count, negative_test) + + +class FIPS_PKCS1_Sign_Tests(unittest.TestCase): + + def shortDescription(self): + return "FIPS PKCS1 Tests (Sign)" + + def test_can_sign(self): + test_private_key = RSA.generate(1024) + signer = pss.new(test_private_key) + self.assertEqual(signer.can_sign(), True) + + +class FIPS_PKCS1_Sign_Tests_KAT(unittest.TestCase): + pass + + +test_vectors_sign = load_test_vectors(("Signature", "PKCS1-PSS"), + "SigGenPSS_186-2.txt", + "Signature Generation 186-2", + {'shaalg': lambda x: x}) or [] + +test_vectors_sign += load_test_vectors(("Signature", "PKCS1-PSS"), + "SigGenPSS_186-3.txt", + "Signature Generation 186-3", + {'shaalg': lambda x: x}) or [] + +for count, tv in enumerate(test_vectors_sign): + if isinstance(tv, str): + continue + if hasattr(tv, "n"): + modulus = tv.n + continue + if hasattr(tv, "e"): + private_key = RSA.construct([bytes_to_long(x) for x in (modulus, tv.e, tv.d)]) # type: ignore + continue + + hash_module = load_hash_by_name(tv.shaalg.upper()) + hash_obj = hash_module.new(tv.msg) + if tv.saltval != b("\x00"): + prng = PRNG(tv.saltval) + signer = pss.new(private_key, salt_bytes=len(tv.saltval), rand_func=prng) + else: + signer = pss.new(private_key, salt_bytes=0) + + def new_test(self, hash_obj=hash_obj, signer=signer, result=tv.s): + signature = signer.sign(hash_obj) + self.assertEqual(signature, result) + + setattr(FIPS_PKCS1_Sign_Tests_KAT, "test_%d" % count, new_test) + + +class PKCS1_Legacy_Module_Tests(unittest.TestCase): + """Verify that the legacy module Crypto.Signature.PKCS1_PSS + behaves as expected. The only difference is that the verify() + method returns True/False and does not raise exceptions.""" + + def shortDescription(self): + return "Test legacy Crypto.Signature.PKCS1_PSS" + + def runTest(self): + key = RSA.generate(1024) + hashed = SHA1.new(b("Test")) + good_signature = PKCS1_PSS.new(key).sign(hashed) + verifier = PKCS1_PSS.new(key.public_key()) + + self.assertEqual(verifier.verify(hashed, good_signature), True) + + # Flip a few bits in the signature + bad_signature = strxor(good_signature, bchr(1) * len(good_signature)) + self.assertEqual(verifier.verify(hashed, bad_signature), False) + + +class PKCS1_All_Hashes_Tests(unittest.TestCase): + + def shortDescription(self): + return "Test PKCS#1 PSS signature in combination with all hashes" + + def runTest(self): + + key = RSA.generate(1280) + signer = pss.new(key) + hash_names = ("MD2", "MD4", "MD5", "RIPEMD160", "SHA1", + "SHA224", "SHA256", "SHA384", "SHA512", + "SHA3_224", "SHA3_256", "SHA3_384", "SHA3_512") + + for name in hash_names: + hashed = load_hash_by_name(name).new(b("Test")) + signer.sign(hashed) + + from Crypto.Hash import BLAKE2b, BLAKE2s + for hash_size in (20, 32, 48, 64): + hashed_b = BLAKE2b.new(digest_bytes=hash_size, data=b("Test")) + signer.sign(hashed_b) + for hash_size in (16, 20, 28, 32): + hashed_s = BLAKE2s.new(digest_bytes=hash_size, data=b("Test")) + signer.sign(hashed_s) + + +def get_hash_module(hash_name): + if hash_name == "SHA-512": + hash_module = SHA512 + elif hash_name == "SHA-512/224": + hash_module = SHA512.new(truncate="224") + elif hash_name == "SHA-512/256": + hash_module = SHA512.new(truncate="256") + elif hash_name == "SHA-384": + hash_module = SHA384 + elif hash_name == "SHA-256": + hash_module = SHA256 + elif hash_name == "SHA-224": + hash_module = SHA224 + elif hash_name == "SHA-1": + hash_module = SHA1 + else: + raise ValueError("Unknown hash algorithm: " + hash_name) + return hash_module + + +class TestVectorsPSSWycheproof(unittest.TestCase): + + def __init__(self, wycheproof_warnings): + unittest.TestCase.__init__(self) + self._wycheproof_warnings = wycheproof_warnings + self._id = "None" + + def add_tests(self, filename): + + def filter_rsa(group): + return RSA.import_key(group['keyPem']) + + def filter_sha(group): + return get_hash_module(group['sha']) + + def filter_type(group): + type_name = group['type'] + if type_name not in ("RsassaPssVerify", ): + raise ValueError("Unknown type name " + type_name) + + def filter_slen(group): + return group['sLen'] + + def filter_mgf(group): + mgf = group['mgf'] + if mgf not in ("MGF1", ): + raise ValueError("Unknown MGF " + mgf) + mgf1_hash = get_hash_module(group['mgfSha']) + + def mgf(x, y, mh=mgf1_hash): + return MGF1(x, y, mh) + + return mgf + + result = load_test_vectors_wycheproof(("Signature", "wycheproof"), + filename, + "Wycheproof PSS signature (%s)" % filename, + group_tag={'key': filter_rsa, + 'hash_module': filter_sha, + 'sLen': filter_slen, + 'mgf': filter_mgf, + 'type': filter_type}) + return result + + def setUp(self): + self.tv = [] + self.add_tests("rsa_pss_2048_sha1_mgf1_20_test.json") + self.add_tests("rsa_pss_2048_sha256_mgf1_0_test.json") + self.add_tests("rsa_pss_2048_sha256_mgf1_32_test.json") + self.add_tests("rsa_pss_2048_sha512_256_mgf1_28_test.json") + self.add_tests("rsa_pss_2048_sha512_256_mgf1_32_test.json") + self.add_tests("rsa_pss_3072_sha256_mgf1_32_test.json") + self.add_tests("rsa_pss_4096_sha256_mgf1_32_test.json") + self.add_tests("rsa_pss_4096_sha512_mgf1_32_test.json") + self.add_tests("rsa_pss_misc_test.json") + + def shortDescription(self): + return self._id + + def warn(self, tv): + if tv.warning and self._wycheproof_warnings: + import warnings + warnings.warn("Wycheproof warning: %s (%s)" % (self._id, tv.comment)) + + def test_verify(self, tv): + self._id = "Wycheproof RSA PSS Test #%d (%s)" % (tv.id, tv.comment) + + hashed_msg = tv.hash_module.new(tv.msg) + signer = pss.new(tv.key, mask_func=tv.mgf, salt_bytes=tv.sLen) + try: + signature = signer.verify(hashed_msg, tv.sig) + except ValueError as e: + if tv.warning: + return + assert not tv.valid + else: + assert tv.valid + self.warn(tv) + + def runTest(self): + for tv in self.tv: + self.test_verify(tv) + + +def get_tests(config={}): + wycheproof_warnings = config.get('wycheproof_warnings') + + tests = [] + tests += list_test_cases(PSS_Tests) + tests += list_test_cases(FIPS_PKCS1_Verify_Tests) + tests += list_test_cases(FIPS_PKCS1_Sign_Tests) + tests += list_test_cases(PKCS1_Legacy_Module_Tests) + tests += list_test_cases(PKCS1_All_Hashes_Tests) + + if config.get('slow_tests'): + tests += list_test_cases(FIPS_PKCS1_Verify_Tests_KAT) + tests += list_test_cases(FIPS_PKCS1_Sign_Tests_KAT) + + tests += [TestVectorsPSSWycheproof(wycheproof_warnings)] + + return tests + + +if __name__ == '__main__': + def suite(): + return unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Util/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Util/__init__.py new file mode 100644 index 000000000..ee993db69 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Util/__init__.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/__init__.py: Self-test for utility modules +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for utility modules""" + +__revision__ = "$Id$" + +import os + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.Util import test_number; tests += test_number.get_tests(config=config) + from Crypto.SelfTest.Util import test_Counter; tests += test_Counter.get_tests(config=config) + from Crypto.SelfTest.Util import test_Padding; tests += test_Padding.get_tests(config=config) + from Crypto.SelfTest.Util import test_strxor; tests += test_strxor.get_tests(config=config) + from Crypto.SelfTest.Util import test_asn1; tests += test_asn1.get_tests(config=config) + from Crypto.SelfTest.Util import test_rfc1751; tests += test_rfc1751.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Util/test_Counter.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Util/test_Counter.py new file mode 100644 index 000000000..8837a32c8 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Util/test_Counter.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_Counter: Self-test for the Crypto.Util.Counter module +# +# Written in 2009 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-tests for Crypto.Util.Counter""" + +from Crypto.Util.py3compat import * + +import unittest + +class CounterTests(unittest.TestCase): + def setUp(self): + global Counter + from Crypto.Util import Counter + + def test_BE(self): + """Big endian""" + c = Counter.new(128) + c = Counter.new(128, little_endian=False) + + def test_LE(self): + """Little endian""" + c = Counter.new(128, little_endian=True) + + def test_nbits(self): + c = Counter.new(nbits=128) + self.assertRaises(ValueError, Counter.new, 129) + + def test_prefix(self): + c = Counter.new(128, prefix=b("xx")) + + def test_suffix(self): + c = Counter.new(128, suffix=b("xx")) + + def test_iv(self): + c = Counter.new(128, initial_value=2) + self.assertRaises(ValueError, Counter.new, 16, initial_value=0x1FFFF) + +def get_tests(config={}): + from Crypto.SelfTest.st_common import list_test_cases + return list_test_cases(CounterTests) + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Util/test_Padding.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Util/test_Padding.py new file mode 100644 index 000000000..12e2ec614 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Util/test_Padding.py @@ -0,0 +1,154 @@ +# +# SelfTest/Util/test_Padding.py: Self-test for padding functions +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +from binascii import unhexlify as uh + +from Crypto.Util.py3compat import * +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Util.Padding import pad, unpad + +class PKCS7_Tests(unittest.TestCase): + + def test1(self): + padded = pad(b(""), 4) + self.assertTrue(padded == uh(b("04040404"))) + padded = pad(b(""), 4, 'pkcs7') + self.assertTrue(padded == uh(b("04040404"))) + back = unpad(padded, 4) + self.assertTrue(back == b("")) + + def test2(self): + padded = pad(uh(b("12345678")), 4) + self.assertTrue(padded == uh(b("1234567804040404"))) + back = unpad(padded, 4) + self.assertTrue(back == uh(b("12345678"))) + + def test3(self): + padded = pad(uh(b("123456")), 4) + self.assertTrue(padded == uh(b("12345601"))) + back = unpad(padded, 4) + self.assertTrue(back == uh(b("123456"))) + + def test4(self): + padded = pad(uh(b("1234567890")), 4) + self.assertTrue(padded == uh(b("1234567890030303"))) + back = unpad(padded, 4) + self.assertTrue(back == uh(b("1234567890"))) + + def testn1(self): + self.assertRaises(ValueError, pad, uh(b("12")), 4, 'pkcs8') + + def testn2(self): + self.assertRaises(ValueError, unpad, b("\0\0\0"), 4) + self.assertRaises(ValueError, unpad, b(""), 4) + + def testn3(self): + self.assertRaises(ValueError, unpad, b("123456\x02"), 4) + self.assertRaises(ValueError, unpad, b("123456\x00"), 4) + self.assertRaises(ValueError, unpad, b("123456\x05\x05\x05\x05\x05"), 4) + +class X923_Tests(unittest.TestCase): + + def test1(self): + padded = pad(b(""), 4, 'x923') + self.assertTrue(padded == uh(b("00000004"))) + back = unpad(padded, 4, 'x923') + self.assertTrue(back == b("")) + + def test2(self): + padded = pad(uh(b("12345678")), 4, 'x923') + self.assertTrue(padded == uh(b("1234567800000004"))) + back = unpad(padded, 4, 'x923') + self.assertTrue(back == uh(b("12345678"))) + + def test3(self): + padded = pad(uh(b("123456")), 4, 'x923') + self.assertTrue(padded == uh(b("12345601"))) + back = unpad(padded, 4, 'x923') + self.assertTrue(back == uh(b("123456"))) + + def test4(self): + padded = pad(uh(b("1234567890")), 4, 'x923') + self.assertTrue(padded == uh(b("1234567890000003"))) + back = unpad(padded, 4, 'x923') + self.assertTrue(back == uh(b("1234567890"))) + + def testn1(self): + self.assertRaises(ValueError, unpad, b("123456\x02"), 4, 'x923') + self.assertRaises(ValueError, unpad, b("123456\x00"), 4, 'x923') + self.assertRaises(ValueError, unpad, b("123456\x00\x00\x00\x00\x05"), 4, 'x923') + self.assertRaises(ValueError, unpad, b(""), 4, 'x923') + +class ISO7816_Tests(unittest.TestCase): + + def test1(self): + padded = pad(b(""), 4, 'iso7816') + self.assertTrue(padded == uh(b("80000000"))) + back = unpad(padded, 4, 'iso7816') + self.assertTrue(back == b("")) + + def test2(self): + padded = pad(uh(b("12345678")), 4, 'iso7816') + self.assertTrue(padded == uh(b("1234567880000000"))) + back = unpad(padded, 4, 'iso7816') + self.assertTrue(back == uh(b("12345678"))) + + def test3(self): + padded = pad(uh(b("123456")), 4, 'iso7816') + self.assertTrue(padded == uh(b("12345680"))) + #import pdb; pdb.set_trace() + back = unpad(padded, 4, 'iso7816') + self.assertTrue(back == uh(b("123456"))) + + def test4(self): + padded = pad(uh(b("1234567890")), 4, 'iso7816') + self.assertTrue(padded == uh(b("1234567890800000"))) + back = unpad(padded, 4, 'iso7816') + self.assertTrue(back == uh(b("1234567890"))) + + def testn1(self): + self.assertRaises(ValueError, unpad, b("123456\x81"), 4, 'iso7816') + self.assertRaises(ValueError, unpad, b(""), 4, 'iso7816') + +def get_tests(config={}): + tests = [] + tests += list_test_cases(PKCS7_Tests) + tests += list_test_cases(X923_Tests) + tests += list_test_cases(ISO7816_Tests) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Util/test_asn1.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Util/test_asn1.py new file mode 100644 index 000000000..e441ba0a3 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Util/test_asn1.py @@ -0,0 +1,851 @@ +# +# SelfTest/Util/test_asn.py: Self-test for the Crypto.Util.asn1 module +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Self-tests for Crypto.Util.asn1""" + +import unittest + +from Crypto.Util.py3compat import * +from Crypto.Util.asn1 import (DerObject, DerSetOf, DerInteger, + DerBitString, + DerObjectId, DerNull, DerOctetString, + DerSequence, DerBoolean) + +class DerObjectTests(unittest.TestCase): + + def testObjInit1(self): + # Fail with invalid tag format (must be 1 byte) + self.assertRaises(ValueError, DerObject, b('\x00\x99')) + # Fail with invalid implicit tag (must be <0x1F) + self.assertRaises(ValueError, DerObject, 0x1F) + + # ------ + + def testObjEncode1(self): + # No payload + der = DerObject(b('\x02')) + self.assertEqual(der.encode(), b('\x02\x00')) + # Small payload (primitive) + der.payload = b('\x45') + self.assertEqual(der.encode(), b('\x02\x01\x45')) + # Invariant + self.assertEqual(der.encode(), b('\x02\x01\x45')) + # Initialize with numerical tag + der = DerObject(0x04) + der.payload = b('\x45') + self.assertEqual(der.encode(), b('\x04\x01\x45')) + # Initialize with constructed type + der = DerObject(b('\x10'), constructed=True) + self.assertEqual(der.encode(), b('\x30\x00')) + + def testObjEncode2(self): + # Initialize with payload + der = DerObject(0x03, b('\x12\x12')) + self.assertEqual(der.encode(), b('\x03\x02\x12\x12')) + + def testObjEncode3(self): + # Long payload + der = DerObject(b('\x10')) + der.payload = b("0")*128 + self.assertEqual(der.encode(), b('\x10\x81\x80' + "0"*128)) + + def testObjEncode4(self): + # Implicit tags (constructed) + der = DerObject(0x10, implicit=1, constructed=True) + der.payload = b('ppll') + self.assertEqual(der.encode(), b('\xa1\x04ppll')) + # Implicit tags (primitive) + der = DerObject(0x02, implicit=0x1E, constructed=False) + der.payload = b('ppll') + self.assertEqual(der.encode(), b('\x9E\x04ppll')) + + def testObjEncode5(self): + # Encode type with explicit tag + der = DerObject(0x10, explicit=5) + der.payload = b("xxll") + self.assertEqual(der.encode(), b("\xa5\x06\x10\x04xxll")) + + # ----- + + def testObjDecode1(self): + # Decode short payload + der = DerObject(0x02) + der.decode(b('\x02\x02\x01\x02')) + self.assertEqual(der.payload, b("\x01\x02")) + self.assertEqual(der._tag_octet, 0x02) + + def testObjDecode2(self): + # Decode long payload + der = DerObject(0x02) + der.decode(b('\x02\x81\x80' + "1"*128)) + self.assertEqual(der.payload, b("1")*128) + self.assertEqual(der._tag_octet, 0x02) + + def testObjDecode3(self): + # Decode payload with too much data gives error + der = DerObject(0x02) + self.assertRaises(ValueError, der.decode, b('\x02\x02\x01\x02\xFF')) + # Decode payload with too little data gives error + der = DerObject(0x02) + self.assertRaises(ValueError, der.decode, b('\x02\x02\x01')) + + def testObjDecode4(self): + # Decode implicit tag (primitive) + der = DerObject(0x02, constructed=False, implicit=0xF) + self.assertRaises(ValueError, der.decode, b('\x02\x02\x01\x02')) + der.decode(b('\x8F\x01\x00')) + self.assertEqual(der.payload, b('\x00')) + # Decode implicit tag (constructed) + der = DerObject(0x02, constructed=True, implicit=0xF) + self.assertRaises(ValueError, der.decode, b('\x02\x02\x01\x02')) + der.decode(b('\xAF\x01\x00')) + self.assertEqual(der.payload, b('\x00')) + + def testObjDecode5(self): + # Decode payload with unexpected tag gives error + der = DerObject(0x02) + self.assertRaises(ValueError, der.decode, b('\x03\x02\x01\x02')) + + def testObjDecode6(self): + # Arbitrary DER object + der = DerObject() + der.decode(b('\x65\x01\x88')) + self.assertEqual(der._tag_octet, 0x65) + self.assertEqual(der.payload, b('\x88')) + + def testObjDecode7(self): + # Decode explicit tag + der = DerObject(0x10, explicit=5) + der.decode(b("\xa5\x06\x10\x04xxll")) + self.assertEqual(der._inner_tag_octet, 0x10) + self.assertEqual(der.payload, b('xxll')) + + # Explicit tag may be 0 + der = DerObject(0x10, explicit=0) + der.decode(b("\xa0\x06\x10\x04xxll")) + self.assertEqual(der._inner_tag_octet, 0x10) + self.assertEqual(der.payload, b('xxll')) + + def testObjDecode8(self): + # Verify that decode returns the object + der = DerObject(0x02) + self.assertEqual(der, der.decode(b('\x02\x02\x01\x02'))) + +class DerIntegerTests(unittest.TestCase): + + def testInit1(self): + der = DerInteger(1) + self.assertEqual(der.encode(), b('\x02\x01\x01')) + + def testEncode1(self): + # Single-byte integers + # Value 0 + der = DerInteger(0) + self.assertEqual(der.encode(), b('\x02\x01\x00')) + # Value 1 + der = DerInteger(1) + self.assertEqual(der.encode(), b('\x02\x01\x01')) + # Value 127 + der = DerInteger(127) + self.assertEqual(der.encode(), b('\x02\x01\x7F')) + + def testEncode2(self): + # Multi-byte integers + # Value 128 + der = DerInteger(128) + self.assertEqual(der.encode(), b('\x02\x02\x00\x80')) + # Value 0x180 + der = DerInteger(0x180) + self.assertEqual(der.encode(), b('\x02\x02\x01\x80')) + # One very long integer + der = DerInteger(2**2048) + self.assertEqual(der.encode(), + b('\x02\x82\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00')) + + def testEncode3(self): + # Negative integers + # Value -1 + der = DerInteger(-1) + self.assertEqual(der.encode(), b('\x02\x01\xFF')) + # Value -128 + der = DerInteger(-128) + self.assertEqual(der.encode(), b('\x02\x01\x80')) + # Value + der = DerInteger(-87873) + self.assertEqual(der.encode(), b('\x02\x03\xFE\xA8\xBF')) + + def testEncode4(self): + # Explicit encoding + number = DerInteger(0x34, explicit=3) + self.assertEqual(number.encode(), b('\xa3\x03\x02\x01\x34')) + + # ----- + + def testDecode1(self): + # Single-byte integer + der = DerInteger() + # Value 0 + der.decode(b('\x02\x01\x00')) + self.assertEqual(der.value, 0) + # Value 1 + der.decode(b('\x02\x01\x01')) + self.assertEqual(der.value, 1) + # Value 127 + der.decode(b('\x02\x01\x7F')) + self.assertEqual(der.value, 127) + + def testDecode2(self): + # Multi-byte integer + der = DerInteger() + # Value 0x180L + der.decode(b('\x02\x02\x01\x80')) + self.assertEqual(der.value,0x180) + # One very long integer + der.decode( + b('\x02\x82\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00')) + self.assertEqual(der.value,2**2048) + + def testDecode3(self): + # Negative integer + der = DerInteger() + # Value -1 + der.decode(b('\x02\x01\xFF')) + self.assertEqual(der.value, -1) + # Value -32768 + der.decode(b('\x02\x02\x80\x00')) + self.assertEqual(der.value, -32768) + + def testDecode5(self): + # We still accept BER integer format + der = DerInteger() + # Redundant leading zeroes + der.decode(b('\x02\x02\x00\x01')) + self.assertEqual(der.value, 1) + # Redundant leading 0xFF + der.decode(b('\x02\x02\xFF\xFF')) + self.assertEqual(der.value, -1) + # Empty payload + der.decode(b('\x02\x00')) + self.assertEqual(der.value, 0) + + def testDecode6(self): + # Explicit encoding + number = DerInteger(explicit=3) + number.decode(b('\xa3\x03\x02\x01\x34')) + self.assertEqual(number.value, 0x34) + + def testDecode7(self): + # Verify decode returns the DerInteger + der = DerInteger() + self.assertEqual(der, der.decode(b('\x02\x01\x7F'))) + + ### + + def testStrict1(self): + number = DerInteger() + + number.decode(b'\x02\x02\x00\x01') + number.decode(b'\x02\x02\x00\x7F') + self.assertRaises(ValueError, number.decode, b'\x02\x02\x00\x01', strict=True) + self.assertRaises(ValueError, number.decode, b'\x02\x02\x00\x7F', strict=True) + + ### + + def testErrDecode1(self): + # Wide length field + der = DerInteger() + self.assertRaises(ValueError, der.decode, b('\x02\x81\x01\x01')) + + +class DerSequenceTests(unittest.TestCase): + + def testInit1(self): + der = DerSequence([1, DerInteger(2), b('0\x00')]) + self.assertEqual(der.encode(), b('0\x08\x02\x01\x01\x02\x01\x020\x00')) + + def testEncode1(self): + # Empty sequence + der = DerSequence() + self.assertEqual(der.encode(), b('0\x00')) + self.assertFalse(der.hasOnlyInts()) + # One single-byte integer (zero) + der.append(0) + self.assertEqual(der.encode(), b('0\x03\x02\x01\x00')) + self.assertEqual(der.hasInts(),1) + self.assertEqual(der.hasInts(False),1) + self.assertTrue(der.hasOnlyInts()) + self.assertTrue(der.hasOnlyInts(False)) + # Invariant + self.assertEqual(der.encode(), b('0\x03\x02\x01\x00')) + + def testEncode2(self): + # Indexing + der = DerSequence() + der.append(0) + der[0] = 1 + self.assertEqual(len(der),1) + self.assertEqual(der[0],1) + self.assertEqual(der[-1],1) + self.assertEqual(der.encode(), b('0\x03\x02\x01\x01')) + # + der[:] = [1] + self.assertEqual(len(der),1) + self.assertEqual(der[0],1) + self.assertEqual(der.encode(), b('0\x03\x02\x01\x01')) + + def testEncode3(self): + # One multi-byte integer (non-zero) + der = DerSequence() + der.append(0x180) + self.assertEqual(der.encode(), b('0\x04\x02\x02\x01\x80')) + + def testEncode4(self): + # One very long integer + der = DerSequence() + der.append(2**2048) + self.assertEqual(der.encode(), b('0\x82\x01\x05')+ + b('\x02\x82\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00')) + + def testEncode5(self): + der = DerSequence() + der += 1 + der += b('\x30\x00') + self.assertEqual(der.encode(), b('\x30\x05\x02\x01\x01\x30\x00')) + + def testEncode6(self): + # Two positive integers + der = DerSequence() + der.append(0x180) + der.append(0xFF) + self.assertEqual(der.encode(), b('0\x08\x02\x02\x01\x80\x02\x02\x00\xff')) + self.assertTrue(der.hasOnlyInts()) + self.assertTrue(der.hasOnlyInts(False)) + # Two mixed integers + der = DerSequence() + der.append(2) + der.append(-2) + self.assertEqual(der.encode(), b('0\x06\x02\x01\x02\x02\x01\xFE')) + self.assertEqual(der.hasInts(), 1) + self.assertEqual(der.hasInts(False), 2) + self.assertFalse(der.hasOnlyInts()) + self.assertTrue(der.hasOnlyInts(False)) + # + der.append(0x01) + der[1:] = [9,8] + self.assertEqual(len(der),3) + self.assertEqual(der[1:],[9,8]) + self.assertEqual(der[1:-1],[9]) + self.assertEqual(der.encode(), b('0\x09\x02\x01\x02\x02\x01\x09\x02\x01\x08')) + + def testEncode7(self): + # One integer and another type (already encoded) + der = DerSequence() + der.append(0x180) + der.append(b('0\x03\x02\x01\x05')) + self.assertEqual(der.encode(), b('0\x09\x02\x02\x01\x800\x03\x02\x01\x05')) + self.assertFalse(der.hasOnlyInts()) + + def testEncode8(self): + # One integer and another type (yet to encode) + der = DerSequence() + der.append(0x180) + der.append(DerSequence([5])) + self.assertEqual(der.encode(), b('0\x09\x02\x02\x01\x800\x03\x02\x01\x05')) + self.assertFalse(der.hasOnlyInts()) + + #### + + def testDecode1(self): + # Empty sequence + der = DerSequence() + der.decode(b('0\x00')) + self.assertEqual(len(der),0) + # One single-byte integer (zero) + der.decode(b('0\x03\x02\x01\x00')) + self.assertEqual(len(der),1) + self.assertEqual(der[0],0) + # Invariant + der.decode(b('0\x03\x02\x01\x00')) + self.assertEqual(len(der),1) + self.assertEqual(der[0],0) + + def testDecode2(self): + # One single-byte integer (non-zero) + der = DerSequence() + der.decode(b('0\x03\x02\x01\x7f')) + self.assertEqual(len(der),1) + self.assertEqual(der[0],127) + + def testDecode4(self): + # One very long integer + der = DerSequence() + der.decode(b('0\x82\x01\x05')+ + b('\x02\x82\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00')) + self.assertEqual(len(der),1) + self.assertEqual(der[0],2**2048) + + def testDecode6(self): + # Two integers + der = DerSequence() + der.decode(b('0\x08\x02\x02\x01\x80\x02\x02\x00\xff')) + self.assertEqual(len(der),2) + self.assertEqual(der[0],0x180) + self.assertEqual(der[1],0xFF) + + def testDecode7(self): + # One integer and 2 other types + der = DerSequence() + der.decode(b('0\x0A\x02\x02\x01\x80\x24\x02\xb6\x63\x12\x00')) + self.assertEqual(len(der),3) + self.assertEqual(der[0],0x180) + self.assertEqual(der[1],b('\x24\x02\xb6\x63')) + self.assertEqual(der[2],b('\x12\x00')) + + def testDecode8(self): + # Only 2 other types + der = DerSequence() + der.decode(b('0\x06\x24\x02\xb6\x63\x12\x00')) + self.assertEqual(len(der),2) + self.assertEqual(der[0],b('\x24\x02\xb6\x63')) + self.assertEqual(der[1],b('\x12\x00')) + self.assertEqual(der.hasInts(), 0) + self.assertEqual(der.hasInts(False), 0) + self.assertFalse(der.hasOnlyInts()) + self.assertFalse(der.hasOnlyInts(False)) + + def testDecode9(self): + # Verify that decode returns itself + der = DerSequence() + self.assertEqual(der, der.decode(b('0\x06\x24\x02\xb6\x63\x12\x00'))) + + ### + + def testErrDecode1(self): + # Not a sequence + der = DerSequence() + self.assertRaises(ValueError, der.decode, b('')) + self.assertRaises(ValueError, der.decode, b('\x00')) + self.assertRaises(ValueError, der.decode, b('\x30')) + + def testErrDecode2(self): + der = DerSequence() + # Too much data + self.assertRaises(ValueError, der.decode, b('\x30\x00\x00')) + + def testErrDecode3(self): + # Wrong length format + der = DerSequence() + # Missing length in sub-item + self.assertRaises(ValueError, der.decode, b('\x30\x04\x02\x01\x01\x00')) + # Valid BER, but invalid DER length + self.assertRaises(ValueError, der.decode, b('\x30\x81\x03\x02\x01\x01')) + self.assertRaises(ValueError, der.decode, b('\x30\x04\x02\x81\x01\x01')) + + def test_expected_nr_elements(self): + der_bin = DerSequence([1, 2, 3]).encode() + + DerSequence().decode(der_bin, nr_elements=3) + DerSequence().decode(der_bin, nr_elements=(2,3)) + self.assertRaises(ValueError, DerSequence().decode, der_bin, nr_elements=1) + self.assertRaises(ValueError, DerSequence().decode, der_bin, nr_elements=(4,5)) + + def test_expected_only_integers(self): + + der_bin1 = DerSequence([1, 2, 3]).encode() + der_bin2 = DerSequence([1, 2, DerSequence([3, 4])]).encode() + + DerSequence().decode(der_bin1, only_ints_expected=True) + DerSequence().decode(der_bin1, only_ints_expected=False) + DerSequence().decode(der_bin2, only_ints_expected=False) + self.assertRaises(ValueError, DerSequence().decode, der_bin2, only_ints_expected=True) + + +class DerOctetStringTests(unittest.TestCase): + + def testInit1(self): + der = DerOctetString(b('\xFF')) + self.assertEqual(der.encode(), b('\x04\x01\xFF')) + + def testEncode1(self): + # Empty sequence + der = DerOctetString() + self.assertEqual(der.encode(), b('\x04\x00')) + # Small payload + der.payload = b('\x01\x02') + self.assertEqual(der.encode(), b('\x04\x02\x01\x02')) + + #### + + def testDecode1(self): + # Empty sequence + der = DerOctetString() + der.decode(b('\x04\x00')) + self.assertEqual(der.payload, b('')) + # Small payload + der.decode(b('\x04\x02\x01\x02')) + self.assertEqual(der.payload, b('\x01\x02')) + + def testDecode2(self): + # Verify that decode returns the object + der = DerOctetString() + self.assertEqual(der, der.decode(b('\x04\x00'))) + + def testErrDecode1(self): + # No leftovers allowed + der = DerOctetString() + self.assertRaises(ValueError, der.decode, b('\x04\x01\x01\xff')) + +class DerNullTests(unittest.TestCase): + + def testEncode1(self): + der = DerNull() + self.assertEqual(der.encode(), b('\x05\x00')) + + #### + + def testDecode1(self): + # Empty sequence + der = DerNull() + self.assertEqual(der, der.decode(b('\x05\x00'))) + +class DerObjectIdTests(unittest.TestCase): + + def testInit1(self): + der = DerObjectId("1.1") + self.assertEqual(der.encode(), b'\x06\x01)') + + def testEncode1(self): + der = DerObjectId('1.2.840.113549.1.1.1') + self.assertEqual(der.encode(), b'\x06\x09\x2A\x86\x48\x86\xF7\x0D\x01\x01\x01') + + der = DerObjectId() + der.value = '1.2.840.113549.1.1.1' + self.assertEqual(der.encode(), b'\x06\x09\x2A\x86\x48\x86\xF7\x0D\x01\x01\x01') + + der = DerObjectId('2.999.1234') + self.assertEqual(der.encode(), b'\x06\x04\x88\x37\x89\x52') + + def testEncode2(self): + der = DerObjectId('3.4') + self.assertRaises(ValueError, der.encode) + + der = DerObjectId('1.40') + self.assertRaises(ValueError, der.encode) + + #### + + def testDecode1(self): + # Empty sequence + der = DerObjectId() + der.decode(b'\x06\x09\x2A\x86\x48\x86\xF7\x0D\x01\x01\x01') + self.assertEqual(der.value, '1.2.840.113549.1.1.1') + + def testDecode2(self): + # Verify that decode returns the object + der = DerObjectId() + self.assertEqual(der, + der.decode(b'\x06\x09\x2A\x86\x48\x86\xF7\x0D\x01\x01\x01')) + + def testDecode3(self): + der = DerObjectId() + der.decode(b'\x06\x09\x2A\x86\x48\x86\xF7\x0D\x01\x00\x01') + self.assertEqual(der.value, '1.2.840.113549.1.0.1') + + def testDecode4(self): + der = DerObjectId() + der.decode(b'\x06\x04\x88\x37\x89\x52') + self.assertEqual(der.value, '2.999.1234') + + +class DerBitStringTests(unittest.TestCase): + + def testInit1(self): + der = DerBitString(b("\xFF")) + self.assertEqual(der.encode(), b('\x03\x02\x00\xFF')) + + def testInit2(self): + der = DerBitString(DerInteger(1)) + self.assertEqual(der.encode(), b('\x03\x04\x00\x02\x01\x01')) + + def testEncode1(self): + # Empty sequence + der = DerBitString() + self.assertEqual(der.encode(), b('\x03\x01\x00')) + # Small payload + der = DerBitString(b('\x01\x02')) + self.assertEqual(der.encode(), b('\x03\x03\x00\x01\x02')) + # Small payload + der = DerBitString() + der.value = b('\x01\x02') + self.assertEqual(der.encode(), b('\x03\x03\x00\x01\x02')) + + #### + + def testDecode1(self): + # Empty sequence + der = DerBitString() + der.decode(b('\x03\x00')) + self.assertEqual(der.value, b('')) + # Small payload + der.decode(b('\x03\x03\x00\x01\x02')) + self.assertEqual(der.value, b('\x01\x02')) + + def testDecode2(self): + # Verify that decode returns the object + der = DerBitString() + self.assertEqual(der, der.decode(b('\x03\x00'))) + + +class DerSetOfTests(unittest.TestCase): + + def testInit1(self): + der = DerSetOf([DerInteger(1), DerInteger(2)]) + self.assertEqual(der.encode(), b('1\x06\x02\x01\x01\x02\x01\x02')) + + def testEncode1(self): + # Empty set + der = DerSetOf() + self.assertEqual(der.encode(), b('1\x00')) + # One single-byte integer (zero) + der.add(0) + self.assertEqual(der.encode(), b('1\x03\x02\x01\x00')) + # Invariant + self.assertEqual(der.encode(), b('1\x03\x02\x01\x00')) + + def testEncode2(self): + # Two integers + der = DerSetOf() + der.add(0x180) + der.add(0xFF) + self.assertEqual(der.encode(), b('1\x08\x02\x02\x00\xff\x02\x02\x01\x80')) + # Initialize with integers + der = DerSetOf([0x180, 0xFF]) + self.assertEqual(der.encode(), b('1\x08\x02\x02\x00\xff\x02\x02\x01\x80')) + + def testEncode3(self): + # One integer and another type (no matter what it is) + der = DerSetOf() + der.add(0x180) + self.assertRaises(ValueError, der.add, b('\x00\x02\x00\x00')) + + def testEncode4(self): + # Only non integers + der = DerSetOf() + der.add(b('\x01\x00')) + der.add(b('\x01\x01\x01')) + self.assertEqual(der.encode(), b('1\x05\x01\x00\x01\x01\x01')) + + #### + + def testDecode1(self): + # Empty sequence + der = DerSetOf() + der.decode(b('1\x00')) + self.assertEqual(len(der),0) + # One single-byte integer (zero) + der.decode(b('1\x03\x02\x01\x00')) + self.assertEqual(len(der),1) + self.assertEqual(list(der),[0]) + + def testDecode2(self): + # Two integers + der = DerSetOf() + der.decode(b('1\x08\x02\x02\x01\x80\x02\x02\x00\xff')) + self.assertEqual(len(der),2) + l = list(der) + self.assertTrue(0x180 in l) + self.assertTrue(0xFF in l) + + def testDecode3(self): + # One integer and 2 other types + der = DerSetOf() + #import pdb; pdb.set_trace() + self.assertRaises(ValueError, der.decode, + b('0\x0A\x02\x02\x01\x80\x24\x02\xb6\x63\x12\x00')) + + def testDecode4(self): + # Verify that decode returns the object + der = DerSetOf() + self.assertEqual(der, + der.decode(b('1\x08\x02\x02\x01\x80\x02\x02\x00\xff'))) + + ### + + def testErrDecode1(self): + # No leftovers allowed + der = DerSetOf() + self.assertRaises(ValueError, der.decode, + b('1\x08\x02\x02\x01\x80\x02\x02\x00\xff\xAA')) + + +class DerBooleanTests(unittest.TestCase): + + def testEncode1(self): + der = DerBoolean(False) + self.assertEqual(der.encode(), b'\x01\x01\x00') + + def testEncode2(self): + der = DerBoolean(True) + self.assertEqual(der.encode(), b'\x01\x01\xFF') + + def testEncode3(self): + der = DerBoolean(False, implicit=0x12) + self.assertEqual(der.encode(), b'\x92\x01\x00') + + def testEncode4(self): + der = DerBoolean(False, explicit=0x05) + self.assertEqual(der.encode(), b'\xA5\x03\x01\x01\x00') + #### + + def testDecode1(self): + der = DerBoolean() + der.decode(b'\x01\x01\x00') + self.assertEqual(der.value, False) + + def testDecode2(self): + der = DerBoolean() + der.decode(b'\x01\x01\xFF') + self.assertEqual(der.value, True) + + def testDecode3(self): + der = DerBoolean(implicit=0x12) + der.decode(b'\x92\x01\x00') + self.assertEqual(der.value, False) + + def testDecode4(self): + der = DerBoolean(explicit=0x05) + der.decode(b'\xA5\x03\x01\x01\x00') + self.assertEqual(der.value, False) + + def testErrorDecode1(self): + der = DerBoolean() + # Wrong tag + self.assertRaises(ValueError, der.decode, b'\x02\x01\x00') + + def testErrorDecode2(self): + der = DerBoolean() + # Payload too long + self.assertRaises(ValueError, der.decode, b'\x01\x01\x00\xFF') + + +def get_tests(config={}): + from Crypto.SelfTest.st_common import list_test_cases + listTests = [] + listTests += list_test_cases(DerObjectTests) + listTests += list_test_cases(DerIntegerTests) + listTests += list_test_cases(DerSequenceTests) + listTests += list_test_cases(DerOctetStringTests) + listTests += list_test_cases(DerNullTests) + listTests += list_test_cases(DerObjectIdTests) + listTests += list_test_cases(DerBitStringTests) + listTests += list_test_cases(DerSetOfTests) + listTests += list_test_cases(DerBooleanTests) + return listTests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Util/test_number.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Util/test_number.py new file mode 100644 index 000000000..bb143f3ba --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Util/test_number.py @@ -0,0 +1,192 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_number.py: Self-test for parts of the Crypto.Util.number module +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-tests for (some of) Crypto.Util.number""" + +import math +import unittest + +from Crypto.Util.py3compat import * +from Crypto.SelfTest.st_common import list_test_cases + +from Crypto.Util import number +from Crypto.Util.number import long_to_bytes + + +class MyError(Exception): + """Dummy exception used for tests""" + +# NB: In some places, we compare tuples instead of just output values so that +# if any inputs cause a test failure, we'll be able to tell which ones. + +class MiscTests(unittest.TestCase): + + def test_ceil_div(self): + """Util.number.ceil_div""" + self.assertRaises(TypeError, number.ceil_div, "1", 1) + self.assertRaises(ZeroDivisionError, number.ceil_div, 1, 0) + self.assertRaises(ZeroDivisionError, number.ceil_div, -1, 0) + + # b = 1 + self.assertEqual(0, number.ceil_div(0, 1)) + self.assertEqual(1, number.ceil_div(1, 1)) + self.assertEqual(2, number.ceil_div(2, 1)) + self.assertEqual(3, number.ceil_div(3, 1)) + + # b = 2 + self.assertEqual(0, number.ceil_div(0, 2)) + self.assertEqual(1, number.ceil_div(1, 2)) + self.assertEqual(1, number.ceil_div(2, 2)) + self.assertEqual(2, number.ceil_div(3, 2)) + self.assertEqual(2, number.ceil_div(4, 2)) + self.assertEqual(3, number.ceil_div(5, 2)) + + # b = 3 + self.assertEqual(0, number.ceil_div(0, 3)) + self.assertEqual(1, number.ceil_div(1, 3)) + self.assertEqual(1, number.ceil_div(2, 3)) + self.assertEqual(1, number.ceil_div(3, 3)) + self.assertEqual(2, number.ceil_div(4, 3)) + self.assertEqual(2, number.ceil_div(5, 3)) + self.assertEqual(2, number.ceil_div(6, 3)) + self.assertEqual(3, number.ceil_div(7, 3)) + + # b = 4 + self.assertEqual(0, number.ceil_div(0, 4)) + self.assertEqual(1, number.ceil_div(1, 4)) + self.assertEqual(1, number.ceil_div(2, 4)) + self.assertEqual(1, number.ceil_div(3, 4)) + self.assertEqual(1, number.ceil_div(4, 4)) + self.assertEqual(2, number.ceil_div(5, 4)) + self.assertEqual(2, number.ceil_div(6, 4)) + self.assertEqual(2, number.ceil_div(7, 4)) + self.assertEqual(2, number.ceil_div(8, 4)) + self.assertEqual(3, number.ceil_div(9, 4)) + + def test_getPrime(self): + """Util.number.getPrime""" + self.assertRaises(ValueError, number.getPrime, -100) + self.assertRaises(ValueError, number.getPrime, 0) + self.assertRaises(ValueError, number.getPrime, 1) + + bits = 4 + for i in range(100): + x = number.getPrime(bits) + self.assertEqual(x >= (1 << bits - 1), 1) + self.assertEqual(x < (1 << bits), 1) + + bits = 512 + x = number.getPrime(bits) + self.assertNotEqual(x % 2, 0) + self.assertEqual(x >= (1 << bits - 1), 1) + self.assertEqual(x < (1 << bits), 1) + + def test_getStrongPrime(self): + """Util.number.getStrongPrime""" + self.assertRaises(ValueError, number.getStrongPrime, 256) + self.assertRaises(ValueError, number.getStrongPrime, 513) + bits = 512 + x = number.getStrongPrime(bits) + self.assertNotEqual(x % 2, 0) + self.assertEqual(x > (1 << bits-1)-1, 1) + self.assertEqual(x < (1 << bits), 1) + e = 2**16+1 + x = number.getStrongPrime(bits, e) + self.assertEqual(number.GCD(x-1, e), 1) + self.assertNotEqual(x % 2, 0) + self.assertEqual(x > (1 << bits-1)-1, 1) + self.assertEqual(x < (1 << bits), 1) + e = 2**16+2 + x = number.getStrongPrime(bits, e) + self.assertEqual(number.GCD((x-1)>>1, e), 1) + self.assertNotEqual(x % 2, 0) + self.assertEqual(x > (1 << bits-1)-1, 1) + self.assertEqual(x < (1 << bits), 1) + + def test_isPrime(self): + """Util.number.isPrime""" + self.assertEqual(number.isPrime(-3), False) # Regression test: negative numbers should not be prime + self.assertEqual(number.isPrime(-2), False) # Regression test: negative numbers should not be prime + self.assertEqual(number.isPrime(1), False) # Regression test: isPrime(1) caused some versions of PyCrypto to crash. + self.assertEqual(number.isPrime(2), True) + self.assertEqual(number.isPrime(3), True) + self.assertEqual(number.isPrime(4), False) + self.assertEqual(number.isPrime(2**1279-1), True) + self.assertEqual(number.isPrime(-(2**1279-1)), False) # Regression test: negative numbers should not be prime + # test some known gmp pseudo-primes taken from + # http://www.trnicely.net/misc/mpzspsp.html + for composite in (43 * 127 * 211, 61 * 151 * 211, 15259 * 30517, + 346141 * 692281, 1007119 * 2014237, 3589477 * 7178953, + 4859419 * 9718837, 2730439 * 5460877, + 245127919 * 490255837, 963939391 * 1927878781, + 4186358431 * 8372716861, 1576820467 * 3153640933): + self.assertEqual(number.isPrime(int(composite)), False) + + def test_size(self): + self.assertEqual(number.size(2),2) + self.assertEqual(number.size(3),2) + self.assertEqual(number.size(0xa2),8) + self.assertEqual(number.size(0xa2ba40),8*3) + self.assertEqual(number.size(0xa2ba40ee07e3b2bd2f02ce227f36a195024486e49c19cb41bbbdfbba98b22b0e577c2eeaffa20d883a76e65e394c69d4b3c05a1e8fadda27edb2a42bc000fe888b9b32c22d15add0cd76b3e7936e19955b220dd17d4ea904b1ec102b2e4de7751222aa99151024c7cb41cc5ea21d00eeb41f7c800834d2c6e06bce3bce7ea9a5), 1024) + self.assertRaises(ValueError, number.size, -1) + + +class LongTests(unittest.TestCase): + + def test1(self): + self.assertEqual(long_to_bytes(0), b'\x00') + self.assertEqual(long_to_bytes(1), b'\x01') + self.assertEqual(long_to_bytes(0x100), b'\x01\x00') + self.assertEqual(long_to_bytes(0xFF00000000), b'\xFF\x00\x00\x00\x00') + self.assertEqual(long_to_bytes(0xFF00000000), b'\xFF\x00\x00\x00\x00') + self.assertEqual(long_to_bytes(0x1122334455667788), b'\x11\x22\x33\x44\x55\x66\x77\x88') + self.assertEqual(long_to_bytes(0x112233445566778899), b'\x11\x22\x33\x44\x55\x66\x77\x88\x99') + + def test2(self): + self.assertEqual(long_to_bytes(0, 1), b'\x00') + self.assertEqual(long_to_bytes(0, 2), b'\x00\x00') + self.assertEqual(long_to_bytes(1, 3), b'\x00\x00\x01') + self.assertEqual(long_to_bytes(65535, 2), b'\xFF\xFF') + self.assertEqual(long_to_bytes(65536, 2), b'\x00\x01\x00\x00') + self.assertEqual(long_to_bytes(0x100, 1), b'\x01\x00') + self.assertEqual(long_to_bytes(0xFF00000001, 6), b'\x00\xFF\x00\x00\x00\x01') + self.assertEqual(long_to_bytes(0xFF00000001, 8), b'\x00\x00\x00\xFF\x00\x00\x00\x01') + self.assertEqual(long_to_bytes(0xFF00000001, 10), b'\x00\x00\x00\x00\x00\xFF\x00\x00\x00\x01') + self.assertEqual(long_to_bytes(0xFF00000001, 11), b'\x00\x00\x00\x00\x00\x00\xFF\x00\x00\x00\x01') + + def test_err1(self): + self.assertRaises(ValueError, long_to_bytes, -1) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(MiscTests) + tests += list_test_cases(LongTests) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Util/test_rfc1751.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Util/test_rfc1751.py new file mode 100644 index 000000000..af0aa2baf --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Util/test_rfc1751.py @@ -0,0 +1,38 @@ +import unittest + +import binascii +from Crypto.Util.RFC1751 import key_to_english, english_to_key + + +class RFC1751_Tests(unittest.TestCase): + + def test1(self): + data = [ + ('EB33F77EE73D4053', 'TIDE ITCH SLOW REIN RULE MOT'), + ('CCAC2AED591056BE4F90FD441C534766', 'RASH BUSH MILK LOOK BAD BRIM AVID GAFF BAIT ROT POD LOVE'), + ('EFF81F9BFBC65350920CDD7416DE8009', 'TROD MUTE TAIL WARM CHAR KONG HAAG CITY BORE O TEAL AWL') + ] + + for key_hex, words in data: + key_bin = binascii.a2b_hex(key_hex) + + w2 = key_to_english(key_bin) + self.assertEqual(w2, words) + + k2 = english_to_key(words) + self.assertEqual(k2, key_bin) + + def test_error_key_to_english(self): + + self.assertRaises(ValueError, key_to_english, b'0' * 7) + + +def get_tests(config={}): + from Crypto.SelfTest.st_common import list_test_cases + tests = list_test_cases(RFC1751_Tests) + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Util/test_strxor.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Util/test_strxor.py new file mode 100644 index 000000000..c91d38f5c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/Util/test_strxor.py @@ -0,0 +1,280 @@ +# +# SelfTest/Util/test_strxor.py: Self-test for XORing +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import unittest +from binascii import unhexlify, hexlify + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Util.strxor import strxor, strxor_c + + +class StrxorTests(unittest.TestCase): + + def test1(self): + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + term2 = unhexlify(b"383d4ba020573314395b") + result = unhexlify(b"c70ed123c59a7fcb6f12") + self.assertEqual(strxor(term1, term2), result) + self.assertEqual(strxor(term2, term1), result) + + def test2(self): + es = b"" + self.assertEqual(strxor(es, es), es) + + def test3(self): + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + all_zeros = b"\x00" * len(term1) + self.assertEqual(strxor(term1, term1), all_zeros) + + def test_wrong_length(self): + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + term2 = unhexlify(b"ff339a83e5cd4cdf564990") + self.assertRaises(ValueError, strxor, term1, term2) + + def test_bytearray(self): + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + term1_ba = bytearray(term1) + term2 = unhexlify(b"383d4ba020573314395b") + result = unhexlify(b"c70ed123c59a7fcb6f12") + + self.assertEqual(strxor(term1_ba, term2), result) + + def test_memoryview(self): + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + term1_mv = memoryview(term1) + term2 = unhexlify(b"383d4ba020573314395b") + result = unhexlify(b"c70ed123c59a7fcb6f12") + + self.assertEqual(strxor(term1_mv, term2), result) + + def test_output_bytearray(self): + """Verify result can be stored in pre-allocated memory""" + + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + term2 = unhexlify(b"383d4ba020573314395b") + original_term1 = term1[:] + original_term2 = term2[:] + expected_xor = unhexlify(b"c70ed123c59a7fcb6f12") + output = bytearray(len(term1)) + + result = strxor(term1, term2, output=output) + + self.assertEqual(result, None) + self.assertEqual(output, expected_xor) + self.assertEqual(term1, original_term1) + self.assertEqual(term2, original_term2) + + def test_output_memoryview(self): + """Verify result can be stored in pre-allocated memory""" + + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + term2 = unhexlify(b"383d4ba020573314395b") + original_term1 = term1[:] + original_term2 = term2[:] + expected_xor = unhexlify(b"c70ed123c59a7fcb6f12") + output = memoryview(bytearray(len(term1))) + + result = strxor(term1, term2, output=output) + + self.assertEqual(result, None) + self.assertEqual(output, expected_xor) + self.assertEqual(term1, original_term1) + self.assertEqual(term2, original_term2) + + def test_output_overlapping_bytearray(self): + """Verify result can be stored in overlapping memory""" + + term1 = bytearray(unhexlify(b"ff339a83e5cd4cdf5649")) + term2 = unhexlify(b"383d4ba020573314395b") + original_term2 = term2[:] + expected_xor = unhexlify(b"c70ed123c59a7fcb6f12") + + result = strxor(term1, term2, output=term1) + + self.assertEqual(result, None) + self.assertEqual(term1, expected_xor) + self.assertEqual(term2, original_term2) + + def test_output_overlapping_memoryview(self): + """Verify result can be stored in overlapping memory""" + + term1 = memoryview(bytearray(unhexlify(b"ff339a83e5cd4cdf5649"))) + term2 = unhexlify(b"383d4ba020573314395b") + original_term2 = term2[:] + expected_xor = unhexlify(b"c70ed123c59a7fcb6f12") + + result = strxor(term1, term2, output=term1) + + self.assertEqual(result, None) + self.assertEqual(term1, expected_xor) + self.assertEqual(term2, original_term2) + + def test_output_ro_bytes(self): + """Verify result cannot be stored in read-only memory""" + + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + term2 = unhexlify(b"383d4ba020573314395b") + + self.assertRaises(TypeError, strxor, term1, term2, output=term1) + + def test_output_ro_memoryview(self): + """Verify result cannot be stored in read-only memory""" + + term1 = memoryview(unhexlify(b"ff339a83e5cd4cdf5649")) + term2 = unhexlify(b"383d4ba020573314395b") + + self.assertRaises(TypeError, strxor, term1, term2, output=term1) + + def test_output_incorrect_length(self): + """Verify result cannot be stored in memory of incorrect length""" + + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + term2 = unhexlify(b"383d4ba020573314395b") + output = bytearray(len(term1) - 1) + + self.assertRaises(ValueError, strxor, term1, term2, output=output) + + +class Strxor_cTests(unittest.TestCase): + + def test1(self): + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + result = unhexlify(b"be72dbc2a48c0d9e1708") + self.assertEqual(strxor_c(term1, 65), result) + + def test2(self): + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + self.assertEqual(strxor_c(term1, 0), term1) + + def test3(self): + self.assertEqual(strxor_c(b"", 90), b"") + + def test_wrong_range(self): + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + self.assertRaises(ValueError, strxor_c, term1, -1) + self.assertRaises(ValueError, strxor_c, term1, 256) + + def test_bytearray(self): + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + term1_ba = bytearray(term1) + result = unhexlify(b"be72dbc2a48c0d9e1708") + + self.assertEqual(strxor_c(term1_ba, 65), result) + + def test_memoryview(self): + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + term1_mv = memoryview(term1) + result = unhexlify(b"be72dbc2a48c0d9e1708") + + self.assertEqual(strxor_c(term1_mv, 65), result) + + def test_output_bytearray(self): + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + original_term1 = term1[:] + expected_result = unhexlify(b"be72dbc2a48c0d9e1708") + output = bytearray(len(term1)) + + result = strxor_c(term1, 65, output=output) + + self.assertEqual(result, None) + self.assertEqual(output, expected_result) + self.assertEqual(term1, original_term1) + + def test_output_memoryview(self): + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + original_term1 = term1[:] + expected_result = unhexlify(b"be72dbc2a48c0d9e1708") + output = memoryview(bytearray(len(term1))) + + result = strxor_c(term1, 65, output=output) + + self.assertEqual(result, None) + self.assertEqual(output, expected_result) + self.assertEqual(term1, original_term1) + + def test_output_overlapping_bytearray(self): + """Verify result can be stored in overlapping memory""" + + term1 = bytearray(unhexlify(b"ff339a83e5cd4cdf5649")) + expected_xor = unhexlify(b"be72dbc2a48c0d9e1708") + + result = strxor_c(term1, 65, output=term1) + + self.assertEqual(result, None) + self.assertEqual(term1, expected_xor) + + def test_output_overlapping_memoryview(self): + """Verify result can be stored in overlapping memory""" + + term1 = memoryview(bytearray(unhexlify(b"ff339a83e5cd4cdf5649"))) + expected_xor = unhexlify(b"be72dbc2a48c0d9e1708") + + result = strxor_c(term1, 65, output=term1) + + self.assertEqual(result, None) + self.assertEqual(term1, expected_xor) + + def test_output_ro_bytes(self): + """Verify result cannot be stored in read-only memory""" + + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + + self.assertRaises(TypeError, strxor_c, term1, 65, output=term1) + + def test_output_ro_memoryview(self): + """Verify result cannot be stored in read-only memory""" + + term1 = memoryview(unhexlify(b"ff339a83e5cd4cdf5649")) + term2 = unhexlify(b"383d4ba020573314395b") + + self.assertRaises(TypeError, strxor_c, term1, 65, output=term1) + + def test_output_incorrect_length(self): + """Verify result cannot be stored in memory of incorrect length""" + + term1 = unhexlify(b"ff339a83e5cd4cdf5649") + output = bytearray(len(term1) - 1) + + self.assertRaises(ValueError, strxor_c, term1, 65, output=output) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(StrxorTests) + tests += list_test_cases(Strxor_cTests) + return tests + + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/__init__.py new file mode 100644 index 000000000..3a4b8b62e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/__init__.py @@ -0,0 +1,102 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/__init__.py: Self-test for PyCrypto +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self tests + +These tests should perform quickly and can ideally be used every time an +application runs. +""" + +import sys +import unittest +from importlib import import_module +from Crypto.Util.py3compat import StringIO + + +class SelfTestError(Exception): + def __init__(self, message, result): + Exception.__init__(self, message, result) + self.message = message + self.result = result + + +def run(module=None, verbosity=0, stream=None, tests=None, config=None, **kwargs): + """Execute self-tests. + + This raises SelfTestError if any test is unsuccessful. + + You may optionally pass in a sub-module of SelfTest if you only want to + perform some of the tests. For example, the following would test only the + hash modules: + + Crypto.SelfTest.run(Crypto.SelfTest.Hash) + + """ + + if config is None: + config = {} + suite = unittest.TestSuite() + if module is None: + if tests is None: + tests = get_tests(config=config) + suite.addTests(tests) + else: + if tests is None: + suite.addTests(module.get_tests(config=config)) + else: + raise ValueError("'module' and 'tests' arguments are mutually exclusive") + if stream is None: + kwargs['stream'] = StringIO() + else: + kwargs['stream'] = stream + runner = unittest.TextTestRunner(verbosity=verbosity, **kwargs) + result = runner.run(suite) + if not result.wasSuccessful(): + if stream is None: + sys.stderr.write(kwargs['stream'].getvalue()) + raise SelfTestError("Self-test failed", result) + return result + + +def get_tests(config={}): + tests = [] + + module_names = [ + "Cipher", "Hash", "Protocol", "PublicKey", "Random", + "Util", "Signature", "IO", "Math", + ] + + for name in module_names: + module = import_module("Crypto.SelfTest." + name) + tests += module.get_tests(config=config) + + return tests + + +if __name__ == '__main__': + def suite(): + return unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/__main__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/__main__.py new file mode 100644 index 000000000..537254ab9 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/__main__.py @@ -0,0 +1,43 @@ +#! /usr/bin/env python +# +# __main__.py : Stand-along loader for PyCryptodome test suite +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from __future__ import print_function + +import sys + +from Crypto import SelfTest + +slow_tests = not ("--skip-slow-tests" in sys.argv) +if not slow_tests: + print("Skipping slow tests") + +wycheproof_warnings = "--wycheproof-warnings" in sys.argv +if wycheproof_warnings: + print("Printing Wycheproof warnings") + +if "-v" in sys.argv: + verbosity=2 +else: + verbosity=1 + +config = {'slow_tests': slow_tests, 'wycheproof_warnings': wycheproof_warnings} +SelfTest.run(stream=sys.stdout, verbosity=verbosity, config=config) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/loader.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/loader.py new file mode 100644 index 000000000..651ce1557 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/loader.py @@ -0,0 +1,239 @@ +# =================================================================== +# +# Copyright (c) 2016, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import os +import re +import json +import errno +import binascii +import warnings +from binascii import unhexlify +from Crypto.Util.py3compat import FileNotFoundError + + +try: + import pycryptodome_test_vectors # type: ignore + test_vectors_available = True +except ImportError: + test_vectors_available = False + + +def _load_tests(dir_comps, file_in, description, conversions): + """Load and parse a test vector file + + Return a list of objects, one per group of adjacent + KV lines or for a single line in the form "[.*]". + + For a group of lines, the object has one attribute per line. + """ + + line_number = 0 + results = [] + + class TestVector(object): + def __init__(self, description, count): + self.desc = description + self.count = count + self.others = [] + + test_vector = None + count = 0 + new_group = True + + while True: + line_number += 1 + line = file_in.readline() + if not line: + if test_vector is not None: + results.append(test_vector) + break + line = line.strip() + + # Skip comments and empty lines + if line.startswith('#') or not line: + new_group = True + continue + + if line.startswith("["): + if test_vector is not None: + results.append(test_vector) + test_vector = None + results.append(line) + continue + + if new_group: + count += 1 + new_group = False + if test_vector is not None: + results.append(test_vector) + test_vector = TestVector("%s (#%d)" % (description, count), count) + + res = re.match("([A-Za-z0-9]+) = ?(.*)", line) + if not res: + test_vector.others += [line] + else: + token = res.group(1).lower() + data = res.group(2).lower() + + conversion = conversions.get(token, None) + if conversion is None: + if len(data) % 2 != 0: + data = "0" + data + setattr(test_vector, token, binascii.unhexlify(data)) + else: + setattr(test_vector, token, conversion(data)) + + # This line is ignored + return results + + +def load_test_vectors(dir_comps, file_name, description, conversions): + """Load and parse a test vector file, formatted using the NIST style. + + Args: + dir_comps (list of strings): + The path components under the ``pycryptodome_test_vectors`` package. + For instance ``("Cipher", "AES")``. + file_name (string): + The name of the file with the test vectors. + description (string): + A description applicable to the test vectors in the file. + conversions (dictionary): + The dictionary contains functions. + Values in the file that have an entry in this dictionary + will be converted usign the matching function. + Otherwise, values will be considered as hexadecimal and + converted to binary. + + Returns: + A list of test vector objects. + + The file is formatted in the following way: + + - Lines starting with "#" are comments and will be ignored. + - Each test vector is a sequence of 1 or more adjacent lines, where + each lines is an assignement. + - Test vectors are separated by an empty line, a comment, or + a line starting with "[". + + A test vector object has the following attributes: + + - desc (string): description + - counter (int): the order of the test vector in the file (from 1) + - others (list): zero or more lines of the test vector that were not assignments + - left-hand side of each assignment (lowercase): the value of the + assignement, either converted or bytes. + """ + + results = None + + try: + if not test_vectors_available: + raise FileNotFoundError(errno.ENOENT, + os.strerror(errno.ENOENT), + file_name) + + description = "%s test (%s)" % (description, file_name) + + init_dir = os.path.dirname(pycryptodome_test_vectors.__file__) + full_file_name = os.path.join(os.path.join(init_dir, *dir_comps), file_name) + with open(full_file_name) as file_in: + results = _load_tests(dir_comps, file_in, description, conversions) + + except FileNotFoundError: + warnings.warn("Warning: skipping extended tests for " + description, + UserWarning, + stacklevel=2) + + return results + + +def load_test_vectors_wycheproof(dir_comps, file_name, description, + root_tag={}, group_tag={}, unit_tag={}): + + result = [] + try: + if not test_vectors_available: + raise FileNotFoundError(errno.ENOENT, + os.strerror(errno.ENOENT), + file_name) + + init_dir = os.path.dirname(pycryptodome_test_vectors.__file__) + full_file_name = os.path.join(os.path.join(init_dir, *dir_comps), file_name) + with open(full_file_name) as file_in: + tv_tree = json.load(file_in) + + except FileNotFoundError: + warnings.warn("Warning: skipping extended tests for " + description, + UserWarning, + stacklevel=2) + return result + + class TestVector(object): + pass + + common_root = {} + for k, v in root_tag.items(): + common_root[k] = v(tv_tree) + + for group in tv_tree['testGroups']: + + common_group = {} + for k, v in group_tag.items(): + common_group[k] = v(group) + + for test in group['tests']: + tv = TestVector() + + for k, v in common_root.items(): + setattr(tv, k, v) + for k, v in common_group.items(): + setattr(tv, k, v) + + tv.id = test['tcId'] + tv.comment = test['comment'] + for attr in 'key', 'iv', 'aad', 'msg', 'ct', 'tag', 'label', \ + 'ikm', 'salt', 'info', 'okm', 'sig', 'public', 'shared': + if attr in test: + setattr(tv, attr, unhexlify(test[attr])) + tv.filename = file_name + + for k, v in unit_tag.items(): + setattr(tv, k, v(test)) + + tv.valid = test['result'] != "invalid" + tv.warning = test['result'] == "acceptable" + + tv.filename = file_name + + result.append(tv) + + return result + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/st_common.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/st_common.py new file mode 100644 index 000000000..e098d8129 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/SelfTest/st_common.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/st_common.py: Common functions for SelfTest modules +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Common functions for SelfTest modules""" + +import unittest +import binascii +from Crypto.Util.py3compat import b + + +def list_test_cases(class_): + """Return a list of TestCase instances given a TestCase class + + This is useful when you have defined test* methods on your TestCase class. + """ + return unittest.TestLoader().loadTestsFromTestCase(class_) + +def strip_whitespace(s): + """Remove whitespace from a text or byte string""" + if isinstance(s,str): + return b("".join(s.split())) + else: + return b("").join(s.split()) + +def a2b_hex(s): + """Convert hexadecimal to binary, ignoring whitespace""" + return binascii.a2b_hex(strip_whitespace(s)) + +def b2a_hex(s): + """Convert binary to hexadecimal""" + # For completeness + return binascii.b2a_hex(s) + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/DSS.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/DSS.py new file mode 100644 index 000000000..fa848179e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/DSS.py @@ -0,0 +1,403 @@ +# +# Signature/DSS.py : DSS.py +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util.asn1 import DerSequence +from Crypto.Util.number import long_to_bytes +from Crypto.Math.Numbers import Integer + +from Crypto.Hash import HMAC +from Crypto.PublicKey.ECC import EccKey +from Crypto.PublicKey.DSA import DsaKey + +__all__ = ['DssSigScheme', 'new'] + + +class DssSigScheme(object): + """A (EC)DSA signature object. + Do not instantiate directly. + Use :func:`Crypto.Signature.DSS.new`. + """ + + def __init__(self, key, encoding, order): + """Create a new Digital Signature Standard (DSS) object. + + Do not instantiate this object directly, + use `Crypto.Signature.DSS.new` instead. + """ + + self._key = key + self._encoding = encoding + self._order = order + + self._order_bits = self._order.size_in_bits() + self._order_bytes = (self._order_bits - 1) // 8 + 1 + + def can_sign(self): + """Return ``True`` if this signature object can be used + for signing messages.""" + + return self._key.has_private() + + def _compute_nonce(self, msg_hash): + raise NotImplementedError("To be provided by subclasses") + + def _valid_hash(self, msg_hash): + raise NotImplementedError("To be provided by subclasses") + + def sign(self, msg_hash): + """Compute the DSA/ECDSA signature of a message. + + Args: + msg_hash (hash object): + The hash that was carried out over the message. + The object belongs to the :mod:`Crypto.Hash` package. + Under mode ``'fips-186-3'``, the hash must be a FIPS + approved secure hash (SHA-2 or SHA-3). + + :return: The signature as ``bytes`` + :raise ValueError: if the hash algorithm is incompatible to the (EC)DSA key + :raise TypeError: if the (EC)DSA key has no private half + """ + + if not self._key.has_private(): + raise TypeError("Private key is needed to sign") + + if not self._valid_hash(msg_hash): + raise ValueError("Hash is not sufficiently strong") + + # Generate the nonce k (critical!) + nonce = self._compute_nonce(msg_hash) + + # Perform signature using the raw API + z = Integer.from_bytes(msg_hash.digest()[:self._order_bytes]) + sig_pair = self._key._sign(z, nonce) + + # Encode the signature into a single byte string + if self._encoding == 'binary': + output = b"".join([long_to_bytes(x, self._order_bytes) + for x in sig_pair]) + else: + # Dss-sig ::= SEQUENCE { + # r INTEGER, + # s INTEGER + # } + # Ecdsa-Sig-Value ::= SEQUENCE { + # r INTEGER, + # s INTEGER + # } + output = DerSequence(sig_pair).encode() + + return output + + def verify(self, msg_hash, signature): + """Check if a certain (EC)DSA signature is authentic. + + Args: + msg_hash (hash object): + The hash that was carried out over the message. + This is an object belonging to the :mod:`Crypto.Hash` module. + Under mode ``'fips-186-3'``, the hash must be a FIPS + approved secure hash (SHA-2 or SHA-3). + + signature (``bytes``): + The signature that needs to be validated. + + :raise ValueError: if the signature is not authentic + """ + + if not self._valid_hash(msg_hash): + raise ValueError("Hash is not sufficiently strong") + + if self._encoding == 'binary': + if len(signature) != (2 * self._order_bytes): + raise ValueError("The signature is not authentic (length)") + r_prime, s_prime = [Integer.from_bytes(x) + for x in (signature[:self._order_bytes], + signature[self._order_bytes:])] + else: + try: + der_seq = DerSequence().decode(signature, strict=True) + except (ValueError, IndexError): + raise ValueError("The signature is not authentic (DER)") + if len(der_seq) != 2 or not der_seq.hasOnlyInts(): + raise ValueError("The signature is not authentic (DER content)") + r_prime, s_prime = Integer(der_seq[0]), Integer(der_seq[1]) + + if not (0 < r_prime < self._order) or not (0 < s_prime < self._order): + raise ValueError("The signature is not authentic (d)") + + z = Integer.from_bytes(msg_hash.digest()[:self._order_bytes]) + result = self._key._verify(z, (r_prime, s_prime)) + if not result: + raise ValueError("The signature is not authentic") + # Make PyCrypto code to fail + return False + + +class DeterministicDsaSigScheme(DssSigScheme): + # Also applicable to ECDSA + + def __init__(self, key, encoding, order, private_key): + super(DeterministicDsaSigScheme, self).__init__(key, encoding, order) + self._private_key = private_key + + def _bits2int(self, bstr): + """See 2.3.2 in RFC6979""" + + result = Integer.from_bytes(bstr) + q_len = self._order.size_in_bits() + b_len = len(bstr) * 8 + if b_len > q_len: + # Only keep leftmost q_len bits + result >>= (b_len - q_len) + return result + + def _int2octets(self, int_mod_q): + """See 2.3.3 in RFC6979""" + + assert 0 < int_mod_q < self._order + return long_to_bytes(int_mod_q, self._order_bytes) + + def _bits2octets(self, bstr): + """See 2.3.4 in RFC6979""" + + z1 = self._bits2int(bstr) + if z1 < self._order: + z2 = z1 + else: + z2 = z1 - self._order + return self._int2octets(z2) + + def _compute_nonce(self, mhash): + """Generate k in a deterministic way""" + + # See section 3.2 in RFC6979.txt + # Step a + h1 = mhash.digest() + # Step b + mask_v = b'\x01' * mhash.digest_size + # Step c + nonce_k = b'\x00' * mhash.digest_size + + for int_oct in (b'\x00', b'\x01'): + # Step d/f + nonce_k = HMAC.new(nonce_k, + mask_v + int_oct + + self._int2octets(self._private_key) + + self._bits2octets(h1), mhash).digest() + # Step e/g + mask_v = HMAC.new(nonce_k, mask_v, mhash).digest() + + nonce = -1 + while not (0 < nonce < self._order): + # Step h.C (second part) + if nonce != -1: + nonce_k = HMAC.new(nonce_k, mask_v + b'\x00', + mhash).digest() + mask_v = HMAC.new(nonce_k, mask_v, mhash).digest() + + # Step h.A + mask_t = b"" + + # Step h.B + while len(mask_t) < self._order_bytes: + mask_v = HMAC.new(nonce_k, mask_v, mhash).digest() + mask_t += mask_v + + # Step h.C (first part) + nonce = self._bits2int(mask_t) + return nonce + + def _valid_hash(self, msg_hash): + return True + + +class FipsDsaSigScheme(DssSigScheme): + + #: List of L (bit length of p) and N (bit length of q) combinations + #: that are allowed by FIPS 186-3. The security level is provided in + #: Table 2 of FIPS 800-57 (rev3). + _fips_186_3_L_N = ( + (1024, 160), # 80 bits (SHA-1 or stronger) + (2048, 224), # 112 bits (SHA-224 or stronger) + (2048, 256), # 128 bits (SHA-256 or stronger) + (3072, 256) # 256 bits (SHA-512) + ) + + def __init__(self, key, encoding, order, randfunc): + super(FipsDsaSigScheme, self).__init__(key, encoding, order) + self._randfunc = randfunc + + L = Integer(key.p).size_in_bits() + if (L, self._order_bits) not in self._fips_186_3_L_N: + error = ("L/N (%d, %d) is not compliant to FIPS 186-3" + % (L, self._order_bits)) + raise ValueError(error) + + def _compute_nonce(self, msg_hash): + # hash is not used + return Integer.random_range(min_inclusive=1, + max_exclusive=self._order, + randfunc=self._randfunc) + + def _valid_hash(self, msg_hash): + """Verify that SHA-1, SHA-2 or SHA-3 are used""" + return (msg_hash.oid == "1.3.14.3.2.26" or + msg_hash.oid.startswith("2.16.840.1.101.3.4.2.")) + + +class FipsEcDsaSigScheme(DssSigScheme): + + def __init__(self, key, encoding, order, randfunc): + super(FipsEcDsaSigScheme, self).__init__(key, encoding, order) + self._randfunc = randfunc + + def _compute_nonce(self, msg_hash): + return Integer.random_range(min_inclusive=1, + max_exclusive=self._key._curve.order, + randfunc=self._randfunc) + + def _valid_hash(self, msg_hash): + """Verify that the strength of the hash matches or exceeds + the strength of the EC. We fail if the hash is too weak.""" + + modulus_bits = self._key.pointQ.size_in_bits() + + # SHS: SHA-2, SHA-3, truncated SHA-512 + sha224 = ("2.16.840.1.101.3.4.2.4", "2.16.840.1.101.3.4.2.7", "2.16.840.1.101.3.4.2.5") + sha256 = ("2.16.840.1.101.3.4.2.1", "2.16.840.1.101.3.4.2.8", "2.16.840.1.101.3.4.2.6") + sha384 = ("2.16.840.1.101.3.4.2.2", "2.16.840.1.101.3.4.2.9") + sha512 = ("2.16.840.1.101.3.4.2.3", "2.16.840.1.101.3.4.2.10") + shs = sha224 + sha256 + sha384 + sha512 + + try: + result = msg_hash.oid in shs + except AttributeError: + result = False + return result + + +def new(key, mode, encoding='binary', randfunc=None): + """Create a signature object :class:`DssSigScheme` that + can perform (EC)DSA signature or verification. + + .. note:: + Refer to `NIST SP 800 Part 1 Rev 4`_ (or newer release) for an + overview of the recommended key lengths. + + Args: + key (:class:`Crypto.PublicKey.DSA` or :class:`Crypto.PublicKey.ECC`): + The key to use for computing the signature (*private* keys only) + or for verifying one. + For DSA keys, let ``L`` and ``N`` be the bit lengths of the modulus ``p`` + and of ``q``: the pair ``(L,N)`` must appear in the following list, + in compliance to section 4.2 of `FIPS 186-4`_: + + - (1024, 160) *legacy only; do not create new signatures with this* + - (2048, 224) *deprecated; do not create new signatures with this* + - (2048, 256) + - (3072, 256) + + For ECC, only keys over P-224, P-256, P-384, and P-521 are accepted. + + mode (string): + The parameter can take these values: + + - ``'fips-186-3'``. The signature generation is randomized and carried out + according to `FIPS 186-3`_: the nonce ``k`` is taken from the RNG. + - ``'deterministic-rfc6979'``. The signature generation is not + randomized. See RFC6979_. + + encoding (string): + How the signature is encoded. This value determines the output of + :meth:`sign` and the input to :meth:`verify`. + + The following values are accepted: + + - ``'binary'`` (default), the signature is the raw concatenation + of ``r`` and ``s``. It is defined in the IEEE P.1363 standard. + For DSA, the size in bytes of the signature is ``N/4`` bytes + (e.g. 64 for ``N=256``). + For ECDSA, the signature is always twice the length of a point + coordinate (e.g. 64 bytes for P-256). + + - ``'der'``, the signature is a ASN.1 DER SEQUENCE + with two INTEGERs (``r`` and ``s``). It is defined in RFC3279_. + The size of the signature is variable. + + randfunc (callable): + A function that returns random ``bytes``, of a given length. + If omitted, the internal RNG is used. + Only applicable for the *'fips-186-3'* mode. + + .. _FIPS 186-3: http://csrc.nist.gov/publications/fips/fips186-3/fips_186-3.pdf + .. _FIPS 186-4: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf + .. _NIST SP 800 Part 1 Rev 4: http://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-57pt1r4.pdf + .. _RFC6979: http://tools.ietf.org/html/rfc6979 + .. _RFC3279: https://tools.ietf.org/html/rfc3279#section-2.2.2 + """ + + # The goal of the 'mode' parameter is to avoid to + # have the current version of the standard as default. + # + # Over time, such version will be superseded by (for instance) + # FIPS 186-4 and it will be odd to have -3 as default. + + if encoding not in ('binary', 'der'): + raise ValueError("Unknown encoding '%s'" % encoding) + + if isinstance(key, EccKey): + order = key._curve.order + private_key_attr = 'd' + if key._curve.name == "ed25519": + raise ValueError("ECC key is not on a NIST P curve") + elif isinstance(key, DsaKey): + order = Integer(key.q) + private_key_attr = 'x' + else: + raise ValueError("Unsupported key type " + str(type(key))) + + if key.has_private(): + private_key = getattr(key, private_key_attr) + else: + private_key = None + + if mode == 'deterministic-rfc6979': + return DeterministicDsaSigScheme(key, encoding, order, private_key) + elif mode == 'fips-186-3': + if isinstance(key, EccKey): + return FipsEcDsaSigScheme(key, encoding, order, randfunc) + else: + return FipsDsaSigScheme(key, encoding, order, randfunc) + else: + raise ValueError("Unknown DSS mode '%s'" % mode) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/DSS.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/DSS.pyi new file mode 100644 index 000000000..08cad814c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/DSS.pyi @@ -0,0 +1,27 @@ +from typing import Union, Optional, Callable +from typing_extensions import Protocol + +from Crypto.PublicKey.DSA import DsaKey +from Crypto.PublicKey.ECC import EccKey + +class Hash(Protocol): + def digest(self) -> bytes: ... + +__all__ = ['new'] + +class DssSigScheme: + def __init__(self, key: Union[DsaKey, EccKey], encoding: str, order: int) -> None: ... + def can_sign(self) -> bool: ... + def sign(self, msg_hash: Hash) -> bytes: ... + def verify(self, msg_hash: Hash, signature: bytes) -> bool: ... + +class DeterministicDsaSigScheme(DssSigScheme): + def __init__(self, key, encoding, order, private_key) -> None: ... + +class FipsDsaSigScheme(DssSigScheme): + def __init__(self, key: DsaKey, encoding: str, order: int, randfunc: Callable) -> None: ... + +class FipsEcDsaSigScheme(DssSigScheme): + def __init__(self, key: EccKey, encoding: str, order: int, randfunc: Callable) -> None: ... + +def new(key: Union[DsaKey, EccKey], mode: str, encoding: Optional[str]='binary', randfunc: Optional[Callable]=None) -> Union[DeterministicDsaSigScheme, FipsDsaSigScheme, FipsEcDsaSigScheme]: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/PKCS1_PSS.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/PKCS1_PSS.py new file mode 100644 index 000000000..c39d38816 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/PKCS1_PSS.py @@ -0,0 +1,55 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +""" +Legacy module for PKCS#1 PSS signatures. + +:undocumented: __package__ +""" + +import types + +from Crypto.Signature import pss + + +def _pycrypto_verify(self, hash_object, signature): + try: + self._verify(hash_object, signature) + except (ValueError, TypeError): + return False + return True + + +def new(rsa_key, mgfunc=None, saltLen=None, randfunc=None): + pkcs1 = pss.new(rsa_key, mask_func=mgfunc, + salt_bytes=saltLen, rand_func=randfunc) + pkcs1._verify = pkcs1.verify + pkcs1.verify = types.MethodType(_pycrypto_verify, pkcs1) + return pkcs1 diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/PKCS1_PSS.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/PKCS1_PSS.pyi new file mode 100644 index 000000000..1371e69a4 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/PKCS1_PSS.pyi @@ -0,0 +1,28 @@ +from typing import Union, Callable, Optional +from typing_extensions import Protocol + +from Crypto.PublicKey.RSA import RsaKey + + +class Hash(Protocol): + def digest(self) -> bytes: ... + def update(self, bytes) -> None: ... + + +class HashModule(Protocol): + @staticmethod + def new(data: Optional[bytes]) -> Hash: ... + + +MaskFunction = Callable[[bytes, int, Union[Hash, HashModule]], bytes] +RndFunction = Callable[[int], bytes] + +class PSS_SigScheme: + def __init__(self, key: RsaKey, mgfunc: MaskFunction, saltLen: int, randfunc: RndFunction) -> None: ... + def can_sign(self) -> bool: ... + def sign(self, msg_hash: Hash) -> bytes: ... + def verify(self, msg_hash: Hash, signature: bytes) -> bool: ... + + + +def new(rsa_key: RsaKey, mgfunc: Optional[MaskFunction]=None, saltLen: Optional[int]=None, randfunc: Optional[RndFunction]=None) -> PSS_SigScheme: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/PKCS1_v1_5.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/PKCS1_v1_5.py new file mode 100644 index 000000000..ac888edb4 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/PKCS1_v1_5.py @@ -0,0 +1,53 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +""" +Legacy module for PKCS#1 v1.5 signatures. + +:undocumented: __package__ +""" + +import types + +from Crypto.Signature import pkcs1_15 + +def _pycrypto_verify(self, hash_object, signature): + try: + self._verify(hash_object, signature) + except (ValueError, TypeError): + return False + return True + +def new(rsa_key): + pkcs1 = pkcs1_15.new(rsa_key) + pkcs1._verify = pkcs1.verify + pkcs1.verify = types.MethodType(_pycrypto_verify, pkcs1) + return pkcs1 + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/PKCS1_v1_5.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/PKCS1_v1_5.pyi new file mode 100644 index 000000000..6e9d8eda3 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/PKCS1_v1_5.pyi @@ -0,0 +1,16 @@ +from typing import Optional +from typing_extensions import Protocol + +from Crypto.PublicKey.RSA import RsaKey + +class Hash(Protocol): + def digest(self) -> bytes: ... + +class PKCS115_SigScheme: + def __init__(self, rsa_key: RsaKey) -> None: ... + def can_sign(self) -> bool: ... + def sign(self, msg_hash: Hash) -> bytes: ... + def verify(self, msg_hash: Hash, signature: bytes) -> bool: ... + + +def new(rsa_key: RsaKey) -> PKCS115_SigScheme: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/__init__.py new file mode 100644 index 000000000..11ca64c39 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/__init__.py @@ -0,0 +1,36 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +"""Digital signature protocols + +A collection of standardized protocols to carry out digital signatures. +""" + +__all__ = ['PKCS1_v1_5', 'PKCS1_PSS', 'DSS', 'pkcs1_15', 'pss', 'eddsa'] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/eddsa.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/eddsa.py new file mode 100644 index 000000000..658f272aa --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/eddsa.py @@ -0,0 +1,343 @@ +# =================================================================== +# +# Copyright (c) 2022, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Math.Numbers import Integer + +from Crypto.Hash import SHA512, SHAKE256 +from Crypto.Util.py3compat import bchr, is_bytes +from Crypto.PublicKey.ECC import (EccKey, + construct, + _import_ed25519_public_key, + _import_ed448_public_key) + + +def import_public_key(encoded): + """Create a new Ed25519 or Ed448 public key object, + starting from the key encoded as raw ``bytes``, + in the format described in RFC8032. + + Args: + encoded (bytes): + The EdDSA public key to import. + It must be 32 bytes for Ed25519, and 57 bytes for Ed448. + + Returns: + :class:`Crypto.PublicKey.EccKey` : a new ECC key object. + + Raises: + ValueError: when the given key cannot be parsed. + """ + + if len(encoded) == 32: + x, y = _import_ed25519_public_key(encoded) + curve_name = "Ed25519" + elif len(encoded) == 57: + x, y = _import_ed448_public_key(encoded) + curve_name = "Ed448" + else: + raise ValueError("Not an EdDSA key (%d bytes)" % len(encoded)) + return construct(curve=curve_name, point_x=x, point_y=y) + + +def import_private_key(encoded): + """Create a new Ed25519 or Ed448 private key object, + starting from the key encoded as raw ``bytes``, + in the format described in RFC8032. + + Args: + encoded (bytes): + The EdDSA private key to import. + It must be 32 bytes for Ed25519, and 57 bytes for Ed448. + + Returns: + :class:`Crypto.PublicKey.EccKey` : a new ECC key object. + + Raises: + ValueError: when the given key cannot be parsed. + """ + + if len(encoded) == 32: + curve_name = "ed25519" + elif len(encoded) == 57: + curve_name = "ed448" + else: + raise ValueError("Incorrect length. Only EdDSA private keys are supported.") + + # Note that the private key is truly a sequence of random bytes, + # so we cannot check its correctness in any way. + + return construct(seed=encoded, curve=curve_name) + + +class EdDSASigScheme(object): + """An EdDSA signature object. + Do not instantiate directly. + Use :func:`Crypto.Signature.eddsa.new`. + """ + + def __init__(self, key, context): + """Create a new EdDSA object. + + Do not instantiate this object directly, + use `Crypto.Signature.DSS.new` instead. + """ + + self._key = key + self._context = context + self._A = key._export_eddsa() + self._order = key._curve.order + + def can_sign(self): + """Return ``True`` if this signature object can be used + for signing messages.""" + + return self._key.has_private() + + def sign(self, msg_or_hash): + """Compute the EdDSA signature of a message. + + Args: + msg_or_hash (bytes or a hash object): + The message to sign (``bytes``, in case of *PureEdDSA*) or + the hash that was carried out over the message (hash object, for *HashEdDSA*). + + The hash object must be :class:`Crypto.Hash.SHA512` for Ed25519, + and :class:`Crypto.Hash.SHAKE256` object for Ed448. + + :return: The signature as ``bytes``. It is always 64 bytes for Ed25519, and 114 bytes for Ed448. + :raise TypeError: if the EdDSA key has no private half + """ + + if not self._key.has_private(): + raise TypeError("Private key is needed to sign") + + if self._key._curve.name == "ed25519": + ph = isinstance(msg_or_hash, SHA512.SHA512Hash) + if not (ph or is_bytes(msg_or_hash)): + raise TypeError("'msg_or_hash' must be bytes of a SHA-512 hash") + eddsa_sign_method = self._sign_ed25519 + + elif self._key._curve.name == "ed448": + ph = isinstance(msg_or_hash, SHAKE256.SHAKE256_XOF) + if not (ph or is_bytes(msg_or_hash)): + raise TypeError("'msg_or_hash' must be bytes of a SHAKE256 hash") + eddsa_sign_method = self._sign_ed448 + + else: + raise ValueError("Incorrect curve for EdDSA") + + return eddsa_sign_method(msg_or_hash, ph) + + def _sign_ed25519(self, msg_or_hash, ph): + + if self._context or ph: + flag = int(ph) + # dom2(flag, self._context) + dom2 = b'SigEd25519 no Ed25519 collisions' + bchr(flag) + \ + bchr(len(self._context)) + self._context + else: + dom2 = b'' + + PHM = msg_or_hash.digest() if ph else msg_or_hash + + # See RFC 8032, section 5.1.6 + + # Step 2 + r_hash = SHA512.new(dom2 + self._key._prefix + PHM).digest() + r = Integer.from_bytes(r_hash, 'little') % self._order + # Step 3 + R_pk = EccKey(point=r * self._key._curve.G)._export_eddsa() + # Step 4 + k_hash = SHA512.new(dom2 + R_pk + self._A + PHM).digest() + k = Integer.from_bytes(k_hash, 'little') % self._order + # Step 5 + s = (r + k * self._key.d) % self._order + + return R_pk + s.to_bytes(32, 'little') + + def _sign_ed448(self, msg_or_hash, ph): + + flag = int(ph) + # dom4(flag, self._context) + dom4 = b'SigEd448' + bchr(flag) + \ + bchr(len(self._context)) + self._context + + PHM = msg_or_hash.read(64) if ph else msg_or_hash + + # See RFC 8032, section 5.2.6 + + # Step 2 + r_hash = SHAKE256.new(dom4 + self._key._prefix + PHM).read(114) + r = Integer.from_bytes(r_hash, 'little') % self._order + # Step 3 + R_pk = EccKey(point=r * self._key._curve.G)._export_eddsa() + # Step 4 + k_hash = SHAKE256.new(dom4 + R_pk + self._A + PHM).read(114) + k = Integer.from_bytes(k_hash, 'little') % self._order + # Step 5 + s = (r + k * self._key.d) % self._order + + return R_pk + s.to_bytes(57, 'little') + + def verify(self, msg_or_hash, signature): + """Check if an EdDSA signature is authentic. + + Args: + msg_or_hash (bytes or a hash object): + The message to verify (``bytes``, in case of *PureEdDSA*) or + the hash that was carried out over the message (hash object, for *HashEdDSA*). + + The hash object must be :class:`Crypto.Hash.SHA512` object for Ed25519, + and :class:`Crypto.Hash.SHAKE256` for Ed448. + + signature (``bytes``): + The signature that needs to be validated. + It must be 64 bytes for Ed25519, and 114 bytes for Ed448. + + :raise ValueError: if the signature is not authentic + """ + + if self._key._curve.name == "ed25519": + ph = isinstance(msg_or_hash, SHA512.SHA512Hash) + if not (ph or is_bytes(msg_or_hash)): + raise TypeError("'msg_or_hash' must be bytes of a SHA-512 hash") + eddsa_verify_method = self._verify_ed25519 + + elif self._key._curve.name == "ed448": + ph = isinstance(msg_or_hash, SHAKE256.SHAKE256_XOF) + if not (ph or is_bytes(msg_or_hash)): + raise TypeError("'msg_or_hash' must be bytes of a SHAKE256 hash") + eddsa_verify_method = self._verify_ed448 + + else: + raise ValueError("Incorrect curve for EdDSA") + + return eddsa_verify_method(msg_or_hash, signature, ph) + + def _verify_ed25519(self, msg_or_hash, signature, ph): + + if len(signature) != 64: + raise ValueError("The signature is not authentic (length)") + + if self._context or ph: + flag = int(ph) + dom2 = b'SigEd25519 no Ed25519 collisions' + bchr(flag) + \ + bchr(len(self._context)) + self._context + else: + dom2 = b'' + + PHM = msg_or_hash.digest() if ph else msg_or_hash + + # Section 5.1.7 + + # Step 1 + try: + R = import_public_key(signature[:32]).pointQ + except ValueError: + raise ValueError("The signature is not authentic (R)") + s = Integer.from_bytes(signature[32:], 'little') + if s > self._order: + raise ValueError("The signature is not authentic (S)") + # Step 2 + k_hash = SHA512.new(dom2 + signature[:32] + self._A + PHM).digest() + k = Integer.from_bytes(k_hash, 'little') % self._order + # Step 3 + point1 = s * 8 * self._key._curve.G + # OPTIMIZE: with double-scalar multiplication, with no SCA + # countermeasures because it is public values + point2 = 8 * R + k * 8 * self._key.pointQ + if point1 != point2: + raise ValueError("The signature is not authentic") + + def _verify_ed448(self, msg_or_hash, signature, ph): + + if len(signature) != 114: + raise ValueError("The signature is not authentic (length)") + + flag = int(ph) + # dom4(flag, self._context) + dom4 = b'SigEd448' + bchr(flag) + \ + bchr(len(self._context)) + self._context + + PHM = msg_or_hash.read(64) if ph else msg_or_hash + + # Section 5.2.7 + + # Step 1 + try: + R = import_public_key(signature[:57]).pointQ + except ValueError: + raise ValueError("The signature is not authentic (R)") + s = Integer.from_bytes(signature[57:], 'little') + if s > self._order: + raise ValueError("The signature is not authentic (S)") + # Step 2 + k_hash = SHAKE256.new(dom4 + signature[:57] + self._A + PHM).read(114) + k = Integer.from_bytes(k_hash, 'little') % self._order + # Step 3 + point1 = s * 8 * self._key._curve.G + # OPTIMIZE: with double-scalar multiplication, with no SCA + # countermeasures because it is public values + point2 = 8 * R + k * 8 * self._key.pointQ + if point1 != point2: + raise ValueError("The signature is not authentic") + + +def new(key, mode, context=None): + """Create a signature object :class:`EdDSASigScheme` that + can perform or verify an EdDSA signature. + + Args: + key (:class:`Crypto.PublicKey.ECC` object): + The key to use for computing the signature (*private* keys only) + or for verifying one. + The key must be on the curve ``Ed25519`` or ``Ed448``. + + mode (string): + This parameter must be ``'rfc8032'``. + + context (bytes): + Up to 255 bytes of `context `_, + which is a constant byte string to segregate different protocols or + different applications of the same key. + """ + + if not isinstance(key, EccKey) or not key._is_eddsa(): + raise ValueError("EdDSA can only be used with EdDSA keys") + + if mode != 'rfc8032': + raise ValueError("Mode must be 'rfc8032'") + + if context is None: + context = b'' + elif len(context) > 255: + raise ValueError("Context for EdDSA must not be longer than 255 bytes") + + return EdDSASigScheme(key, context) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/eddsa.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/eddsa.pyi new file mode 100644 index 000000000..060d5938f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/eddsa.pyi @@ -0,0 +1,21 @@ +from typing import Union, Optional +from typing_extensions import Protocol +from Crypto.PublicKey.ECC import EccKey + +class Hash(Protocol): + def digest(self) -> bytes: ... + +class XOF(Protocol): + def read(self, len: int) -> bytes: ... + +def import_public_key(encoded: bytes) -> EccKey: ... +def import_private_key(encoded: bytes) -> EccKey: ... + +class EdDSASigScheme(object): + + def __init__(self, key: EccKey, context: bytes) -> None: ... + def can_sign(self) -> bool: ... + def sign(self, msg_or_hash: Union[bytes, Hash, XOF]) -> bytes: ... + def verify(self, msg_or_hash: Union[bytes, Hash, XOF], signature: bytes) -> None: ... + +def new(key: EccKey, mode: str, context: Optional[bytes]=None) -> EdDSASigScheme: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/pkcs1_15.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/pkcs1_15.py new file mode 100644 index 000000000..0b02d998e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/pkcs1_15.py @@ -0,0 +1,223 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import Crypto.Util.number +from Crypto.Util.number import ceil_div, bytes_to_long, long_to_bytes +from Crypto.Util.asn1 import DerSequence, DerNull, DerOctetString, DerObjectId + +class PKCS115_SigScheme: + """A signature object for ``RSASSA-PKCS1-v1_5``. + Do not instantiate directly. + Use :func:`Crypto.Signature.pkcs1_15.new`. + """ + + def __init__(self, rsa_key): + """Initialize this PKCS#1 v1.5 signature scheme object. + + :Parameters: + rsa_key : an RSA key object + Creation of signatures is only possible if this is a *private* + RSA key. Verification of signatures is always possible. + """ + self._key = rsa_key + + def can_sign(self): + """Return ``True`` if this object can be used to sign messages.""" + return self._key.has_private() + + def sign(self, msg_hash): + """Create the PKCS#1 v1.5 signature of a message. + + This function is also called ``RSASSA-PKCS1-V1_5-SIGN`` and + it is specified in + `section 8.2.1 of RFC8017 `_. + + :parameter msg_hash: + This is an object from the :mod:`Crypto.Hash` package. + It has been used to digest the message to sign. + :type msg_hash: hash object + + :return: the signature encoded as a *byte string*. + :raise ValueError: if the RSA key is not long enough for the given hash algorithm. + :raise TypeError: if the RSA key has no private half. + """ + + # See 8.2.1 in RFC3447 + modBits = Crypto.Util.number.size(self._key.n) + k = ceil_div(modBits,8) # Convert from bits to bytes + + # Step 1 + em = _EMSA_PKCS1_V1_5_ENCODE(msg_hash, k) + # Step 2a (OS2IP) + em_int = bytes_to_long(em) + # Step 2b (RSASP1) and Step 2c (I2OSP) + signature = self._key._decrypt_to_bytes(em_int) + # Verify no faults occurred + if em_int != pow(bytes_to_long(signature), self._key.e, self._key.n): + raise ValueError("Fault detected in RSA private key operation") + return signature + + def verify(self, msg_hash, signature): + """Check if the PKCS#1 v1.5 signature over a message is valid. + + This function is also called ``RSASSA-PKCS1-V1_5-VERIFY`` and + it is specified in + `section 8.2.2 of RFC8037 `_. + + :parameter msg_hash: + The hash that was carried out over the message. This is an object + belonging to the :mod:`Crypto.Hash` module. + :type parameter: hash object + + :parameter signature: + The signature that needs to be validated. + :type signature: byte string + + :raise ValueError: if the signature is not valid. + """ + + # See 8.2.2 in RFC3447 + modBits = Crypto.Util.number.size(self._key.n) + k = ceil_div(modBits, 8) # Convert from bits to bytes + + # Step 1 + if len(signature) != k: + raise ValueError("Invalid signature") + # Step 2a (O2SIP) + signature_int = bytes_to_long(signature) + # Step 2b (RSAVP1) + em_int = self._key._encrypt(signature_int) + # Step 2c (I2OSP) + em1 = long_to_bytes(em_int, k) + # Step 3 + try: + possible_em1 = [ _EMSA_PKCS1_V1_5_ENCODE(msg_hash, k, True) ] + # MD2/4/5 hashes always require NULL params in AlgorithmIdentifier. + # For all others, it is optional. + try: + algorithm_is_md = msg_hash.oid.startswith('1.2.840.113549.2.') + except AttributeError: + algorithm_is_md = False + if not algorithm_is_md: # MD2/MD4/MD5 + possible_em1.append(_EMSA_PKCS1_V1_5_ENCODE(msg_hash, k, False)) + except ValueError: + raise ValueError("Invalid signature") + # Step 4 + # By comparing the full encodings (as opposed to checking each + # of its components one at a time) we avoid attacks to the padding + # scheme like Bleichenbacher's (see http://www.mail-archive.com/cryptography@metzdowd.com/msg06537). + # + if em1 not in possible_em1: + raise ValueError("Invalid signature") + pass + + +def _EMSA_PKCS1_V1_5_ENCODE(msg_hash, emLen, with_hash_parameters=True): + """ + Implement the ``EMSA-PKCS1-V1_5-ENCODE`` function, as defined + in PKCS#1 v2.1 (RFC3447, 9.2). + + ``_EMSA-PKCS1-V1_5-ENCODE`` actually accepts the message ``M`` as input, + and hash it internally. Here, we expect that the message has already + been hashed instead. + + :Parameters: + msg_hash : hash object + The hash object that holds the digest of the message being signed. + emLen : int + The length the final encoding must have, in bytes. + with_hash_parameters : bool + If True (default), include NULL parameters for the hash + algorithm in the ``digestAlgorithm`` SEQUENCE. + + :attention: the early standard (RFC2313) stated that ``DigestInfo`` + had to be BER-encoded. This means that old signatures + might have length tags in indefinite form, which + is not supported in DER. Such encoding cannot be + reproduced by this function. + + :Return: An ``emLen`` byte long string that encodes the hash. + """ + + # First, build the ASN.1 DER object DigestInfo: + # + # DigestInfo ::= SEQUENCE { + # digestAlgorithm AlgorithmIdentifier, + # digest OCTET STRING + # } + # + # where digestAlgorithm identifies the hash function and shall be an + # algorithm ID with an OID in the set PKCS1-v1-5DigestAlgorithms. + # + # PKCS1-v1-5DigestAlgorithms ALGORITHM-IDENTIFIER ::= { + # { OID id-md2 PARAMETERS NULL }| + # { OID id-md5 PARAMETERS NULL }| + # { OID id-sha1 PARAMETERS NULL }| + # { OID id-sha256 PARAMETERS NULL }| + # { OID id-sha384 PARAMETERS NULL }| + # { OID id-sha512 PARAMETERS NULL } + # } + # + # Appendix B.1 also says that for SHA-1/-2 algorithms, the parameters + # should be omitted. They may be present, but when they are, they shall + # have NULL value. + + digestAlgo = DerSequence([ DerObjectId(msg_hash.oid).encode() ]) + + if with_hash_parameters: + digestAlgo.append(DerNull().encode()) + + digest = DerOctetString(msg_hash.digest()) + digestInfo = DerSequence([ + digestAlgo.encode(), + digest.encode() + ]).encode() + + # We need at least 11 bytes for the remaining data: 3 fixed bytes and + # at least 8 bytes of padding). + if emLen bytes: ... + +class PKCS115_SigScheme: + def __init__(self, rsa_key: RsaKey) -> None: ... + def can_sign(self) -> bool: ... + def sign(self, msg_hash: Hash) -> bytes: ... + def verify(self, msg_hash: Hash, signature: bytes) -> None: ... + +def _EMSA_PKCS1_V1_5_ENCODE(msg_hash: Hash, emLen: int, with_hash_parameters: Optional[bool]=True) -> bytes: ... + +def new(rsa_key: RsaKey) -> PKCS115_SigScheme: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/pss.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/pss.py new file mode 100644 index 000000000..f2295df6c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/pss.py @@ -0,0 +1,387 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util.py3compat import bchr, bord, iter_range +import Crypto.Util.number +from Crypto.Util.number import (ceil_div, + long_to_bytes, + bytes_to_long + ) +from Crypto.Util.strxor import strxor +from Crypto import Random + + +class PSS_SigScheme: + """A signature object for ``RSASSA-PSS``. + Do not instantiate directly. + Use :func:`Crypto.Signature.pss.new`. + """ + + def __init__(self, key, mgfunc, saltLen, randfunc): + """Initialize this PKCS#1 PSS signature scheme object. + + :Parameters: + key : an RSA key object + If a private half is given, both signature and + verification are possible. + If a public half is given, only verification is possible. + mgfunc : callable + A mask generation function that accepts two parameters: + a string to use as seed, and the lenth of the mask to + generate, in bytes. + saltLen : integer + Length of the salt, in bytes. + randfunc : callable + A function that returns random bytes. + """ + + self._key = key + self._saltLen = saltLen + self._mgfunc = mgfunc + self._randfunc = randfunc + + def can_sign(self): + """Return ``True`` if this object can be used to sign messages.""" + return self._key.has_private() + + def sign(self, msg_hash): + """Create the PKCS#1 PSS signature of a message. + + This function is also called ``RSASSA-PSS-SIGN`` and + it is specified in + `section 8.1.1 of RFC8017 `_. + + :parameter msg_hash: + This is an object from the :mod:`Crypto.Hash` package. + It has been used to digest the message to sign. + :type msg_hash: hash object + + :return: the signature encoded as a *byte string*. + :raise ValueError: if the RSA key is not long enough for the given hash algorithm. + :raise TypeError: if the RSA key has no private half. + """ + + # Set defaults for salt length and mask generation function + if self._saltLen is None: + sLen = msg_hash.digest_size + else: + sLen = self._saltLen + + if self._mgfunc is None: + mgf = lambda x, y: MGF1(x, y, msg_hash) + else: + mgf = self._mgfunc + + modBits = Crypto.Util.number.size(self._key.n) + + # See 8.1.1 in RFC3447 + k = ceil_div(modBits, 8) # k is length in bytes of the modulus + # Step 1 + em = _EMSA_PSS_ENCODE(msg_hash, modBits-1, self._randfunc, mgf, sLen) + # Step 2a (OS2IP) + em_int = bytes_to_long(em) + # Step 2b (RSASP1) and Step 2c (I2OSP) + signature = self._key._decrypt_to_bytes(em_int) + # Verify no faults occurred + if em_int != pow(bytes_to_long(signature), self._key.e, self._key.n): + raise ValueError("Fault detected in RSA private key operation") + return signature + + def verify(self, msg_hash, signature): + """Check if the PKCS#1 PSS signature over a message is valid. + + This function is also called ``RSASSA-PSS-VERIFY`` and + it is specified in + `section 8.1.2 of RFC8037 `_. + + :parameter msg_hash: + The hash that was carried out over the message. This is an object + belonging to the :mod:`Crypto.Hash` module. + :type parameter: hash object + + :parameter signature: + The signature that needs to be validated. + :type signature: bytes + + :raise ValueError: if the signature is not valid. + """ + + # Set defaults for salt length and mask generation function + if self._saltLen is None: + sLen = msg_hash.digest_size + else: + sLen = self._saltLen + if self._mgfunc: + mgf = self._mgfunc + else: + mgf = lambda x, y: MGF1(x, y, msg_hash) + + modBits = Crypto.Util.number.size(self._key.n) + + # See 8.1.2 in RFC3447 + k = ceil_div(modBits, 8) # Convert from bits to bytes + # Step 1 + if len(signature) != k: + raise ValueError("Incorrect signature") + # Step 2a (O2SIP) + signature_int = bytes_to_long(signature) + # Step 2b (RSAVP1) + em_int = self._key._encrypt(signature_int) + # Step 2c (I2OSP) + emLen = ceil_div(modBits - 1, 8) + em = long_to_bytes(em_int, emLen) + # Step 3/4 + _EMSA_PSS_VERIFY(msg_hash, em, modBits-1, mgf, sLen) + + +def MGF1(mgfSeed, maskLen, hash_gen): + """Mask Generation Function, described in `B.2.1 of RFC8017 + `_. + + :param mfgSeed: + seed from which the mask is generated + :type mfgSeed: byte string + + :param maskLen: + intended length in bytes of the mask + :type maskLen: integer + + :param hash_gen: + A module or a hash object from :mod:`Crypto.Hash` + :type hash_object: + + :return: the mask, as a *byte string* + """ + + T = b"" + for counter in iter_range(ceil_div(maskLen, hash_gen.digest_size)): + c = long_to_bytes(counter, 4) + hobj = hash_gen.new() + hobj.update(mgfSeed + c) + T = T + hobj.digest() + assert(len(T) >= maskLen) + return T[:maskLen] + + +def _EMSA_PSS_ENCODE(mhash, emBits, randFunc, mgf, sLen): + r""" + Implement the ``EMSA-PSS-ENCODE`` function, as defined + in PKCS#1 v2.1 (RFC3447, 9.1.1). + + The original ``EMSA-PSS-ENCODE`` actually accepts the message ``M`` + as input, and hash it internally. Here, we expect that the message + has already been hashed instead. + + :Parameters: + mhash : hash object + The hash object that holds the digest of the message being signed. + emBits : int + Maximum length of the final encoding, in bits. + randFunc : callable + An RNG function that accepts as only parameter an int, and returns + a string of random bytes, to be used as salt. + mgf : callable + A mask generation function that accepts two parameters: a string to + use as seed, and the lenth of the mask to generate, in bytes. + sLen : int + Length of the salt, in bytes. + + :Return: An ``emLen`` byte long string that encodes the hash + (with ``emLen = \ceil(emBits/8)``). + + :Raise ValueError: + When digest or salt length are too big. + """ + + emLen = ceil_div(emBits, 8) + + # Bitmask of digits that fill up + lmask = 0 + for i in iter_range(8*emLen-emBits): + lmask = lmask >> 1 | 0x80 + + # Step 1 and 2 have been already done + # Step 3 + if emLen < mhash.digest_size+sLen+2: + raise ValueError("Digest or salt length are too long" + " for given key size.") + # Step 4 + salt = randFunc(sLen) + # Step 5 + m_prime = bchr(0)*8 + mhash.digest() + salt + # Step 6 + h = mhash.new() + h.update(m_prime) + # Step 7 + ps = bchr(0)*(emLen-sLen-mhash.digest_size-2) + # Step 8 + db = ps + bchr(1) + salt + # Step 9 + dbMask = mgf(h.digest(), emLen-mhash.digest_size-1) + # Step 10 + maskedDB = strxor(db, dbMask) + # Step 11 + maskedDB = bchr(bord(maskedDB[0]) & ~lmask) + maskedDB[1:] + # Step 12 + em = maskedDB + h.digest() + bchr(0xBC) + return em + + +def _EMSA_PSS_VERIFY(mhash, em, emBits, mgf, sLen): + """ + Implement the ``EMSA-PSS-VERIFY`` function, as defined + in PKCS#1 v2.1 (RFC3447, 9.1.2). + + ``EMSA-PSS-VERIFY`` actually accepts the message ``M`` as input, + and hash it internally. Here, we expect that the message has already + been hashed instead. + + :Parameters: + mhash : hash object + The hash object that holds the digest of the message to be verified. + em : string + The signature to verify, therefore proving that the sender really + signed the message that was received. + emBits : int + Length of the final encoding (em), in bits. + mgf : callable + A mask generation function that accepts two parameters: a string to + use as seed, and the lenth of the mask to generate, in bytes. + sLen : int + Length of the salt, in bytes. + + :Raise ValueError: + When the encoding is inconsistent, or the digest or salt lengths + are too big. + """ + + emLen = ceil_div(emBits, 8) + + # Bitmask of digits that fill up + lmask = 0 + for i in iter_range(8*emLen-emBits): + lmask = lmask >> 1 | 0x80 + + # Step 1 and 2 have been already done + # Step 3 + if emLen < mhash.digest_size+sLen+2: + raise ValueError("Incorrect signature") + # Step 4 + if ord(em[-1:]) != 0xBC: + raise ValueError("Incorrect signature") + # Step 5 + maskedDB = em[:emLen-mhash.digest_size-1] + h = em[emLen-mhash.digest_size-1:-1] + # Step 6 + if lmask & bord(em[0]): + raise ValueError("Incorrect signature") + # Step 7 + dbMask = mgf(h, emLen-mhash.digest_size-1) + # Step 8 + db = strxor(maskedDB, dbMask) + # Step 9 + db = bchr(bord(db[0]) & ~lmask) + db[1:] + # Step 10 + if not db.startswith(bchr(0)*(emLen-mhash.digest_size-sLen-2) + bchr(1)): + raise ValueError("Incorrect signature") + # Step 11 + if sLen > 0: + salt = db[-sLen:] + else: + salt = b"" + # Step 12 + m_prime = bchr(0)*8 + mhash.digest() + salt + # Step 13 + hobj = mhash.new() + hobj.update(m_prime) + hp = hobj.digest() + # Step 14 + if h != hp: + raise ValueError("Incorrect signature") + + +def new(rsa_key, **kwargs): + """Create an object for making or verifying PKCS#1 PSS signatures. + + :parameter rsa_key: + The RSA key to use for signing or verifying the message. + This is a :class:`Crypto.PublicKey.RSA` object. + Signing is only possible when ``rsa_key`` is a **private** RSA key. + :type rsa_key: RSA object + + :Keyword Arguments: + + * *mask_func* (``callable``) -- + A function that returns the mask (as `bytes`). + It must accept two parameters: a seed (as `bytes`) + and the length of the data to return. + + If not specified, it will be the function :func:`MGF1` defined in + `RFC8017 `_ and + combined with the same hash algorithm applied to the + message to sign or verify. + + If you want to use a different function, for instance still :func:`MGF1` + but together with another hash, you can do:: + + from Crypto.Hash import SHA256 + from Crypto.Signature.pss import MGF1 + mgf = lambda x, y: MGF1(x, y, SHA256) + + * *salt_bytes* (``integer``) -- + Length of the salt, in bytes. + It is a value between 0 and ``emLen - hLen - 2``, where ``emLen`` + is the size of the RSA modulus and ``hLen`` is the size of the digest + applied to the message to sign or verify. + + The salt is generated internally, you don't need to provide it. + + If not specified, the salt length will be ``hLen``. + If it is zero, the signature scheme becomes deterministic. + + Note that in some implementations such as OpenSSL the default + salt length is ``emLen - hLen - 2`` (even though it is not more + secure than ``hLen``). + + * *rand_func* (``callable``) -- + A function that returns random ``bytes``, of the desired length. + The default is :func:`Crypto.Random.get_random_bytes`. + + :return: a :class:`PSS_SigScheme` signature object + """ + + mask_func = kwargs.pop("mask_func", None) + salt_len = kwargs.pop("salt_bytes", None) + rand_func = kwargs.pop("rand_func", None) + if rand_func is None: + rand_func = Random.get_random_bytes + if kwargs: + raise ValueError("Unknown keywords: " + str(kwargs.keys())) + return PSS_SigScheme(rsa_key, mask_func, salt_len, rand_func) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/pss.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/pss.pyi new file mode 100644 index 000000000..d4088e166 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Signature/pss.pyi @@ -0,0 +1,30 @@ +from typing import Union, Callable, Optional +from typing_extensions import Protocol + +from Crypto.PublicKey.RSA import RsaKey + + +class Hash(Protocol): + def digest(self) -> bytes: ... + def update(self, bytes) -> None: ... + + +class HashModule(Protocol): + @staticmethod + def new(data: Optional[bytes]) -> Hash: ... + + +MaskFunction = Callable[[bytes, int, Union[Hash, HashModule]], bytes] +RndFunction = Callable[[int], bytes] + +class PSS_SigScheme: + def __init__(self, key: RsaKey, mgfunc: MaskFunction, saltLen: int, randfunc: RndFunction) -> None: ... + def can_sign(self) -> bool: ... + def sign(self, msg_hash: Hash) -> bytes: ... + def verify(self, msg_hash: Hash, signature: bytes) -> None: ... + + +MGF1 : MaskFunction +def _EMSA_PSS_ENCODE(mhash: Hash, emBits: int, randFunc: RndFunction, mgf:MaskFunction, sLen: int) -> str: ... +def _EMSA_PSS_VERIFY(mhash: Hash, em: str, emBits: int, mgf: MaskFunction, sLen: int) -> None: ... +def new(rsa_key: RsaKey, **kwargs: Union[MaskFunction, RndFunction, int]) -> PSS_SigScheme: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/Counter.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/Counter.py new file mode 100644 index 000000000..269b5a772 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/Counter.py @@ -0,0 +1,79 @@ +# -*- coding: ascii -*- +# +# Util/Counter.py : Fast counter for use with CTR-mode ciphers +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +def new(nbits, prefix=b"", suffix=b"", initial_value=1, little_endian=False, allow_wraparound=False): + """Create a stateful counter block function suitable for CTR encryption modes. + + Each call to the function returns the next counter block. + Each counter block is made up by three parts: + + +------+--------------+-------+ + |prefix| counter value|postfix| + +------+--------------+-------+ + + The counter value is incremented by 1 at each call. + + Args: + nbits (integer): + Length of the desired counter value, in bits. It must be a multiple of 8. + prefix (byte string): + The constant prefix of the counter block. By default, no prefix is + used. + suffix (byte string): + The constant postfix of the counter block. By default, no suffix is + used. + initial_value (integer): + The initial value of the counter. Default value is 1. + Its length in bits must not exceed the argument ``nbits``. + little_endian (boolean): + If ``True``, the counter number will be encoded in little endian format. + If ``False`` (default), in big endian format. + allow_wraparound (boolean): + This parameter is ignored. + An ``OverflowError`` exception is always raised when the counter wraps + around to zero. + Returns: + An object that can be passed with the :data:`counter` parameter to a CTR mode + cipher. + + It must hold that *len(prefix) + nbits//8 + len(suffix)* matches the + block size of the underlying block cipher. + """ + + if (nbits % 8) != 0: + raise ValueError("'nbits' must be a multiple of 8") + + iv_bl = initial_value.bit_length() + if iv_bl > nbits: + raise ValueError("Initial value takes %d bits but it is longer than " + "the counter (%d bits)" % + (iv_bl, nbits)) + + # Ignore wraparound + return {"counter_len": nbits // 8, + "prefix": prefix, + "suffix": suffix, + "initial_value": initial_value, + "little_endian": little_endian + } diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/Counter.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/Counter.pyi new file mode 100644 index 000000000..fa2ffdd8f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/Counter.pyi @@ -0,0 +1,5 @@ +from typing import Optional, Union, Dict + +def new(nbits: int, prefix: Optional[bytes]=..., suffix: Optional[bytes]=..., initial_value: Optional[int]=1, + little_endian: Optional[bool]=False, allow_wraparound: Optional[bool]=False) -> \ + Dict[str, Union[int, bytes, bool]]: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/Padding.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/Padding.py new file mode 100644 index 000000000..da69e5598 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/Padding.py @@ -0,0 +1,108 @@ +# +# Util/Padding.py : Functions to manage padding +# +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +__all__ = [ 'pad', 'unpad' ] + +from Crypto.Util.py3compat import * + + +def pad(data_to_pad, block_size, style='pkcs7'): + """Apply standard padding. + + Args: + data_to_pad (byte string): + The data that needs to be padded. + block_size (integer): + The block boundary to use for padding. The output length is guaranteed + to be a multiple of :data:`block_size`. + style (string): + Padding algorithm. It can be *'pkcs7'* (default), *'iso7816'* or *'x923'*. + + Return: + byte string : the original data with the appropriate padding added at the end. + """ + + padding_len = block_size-len(data_to_pad)%block_size + if style == 'pkcs7': + padding = bchr(padding_len)*padding_len + elif style == 'x923': + padding = bchr(0)*(padding_len-1) + bchr(padding_len) + elif style == 'iso7816': + padding = bchr(128) + bchr(0)*(padding_len-1) + else: + raise ValueError("Unknown padding style") + return data_to_pad + padding + + +def unpad(padded_data, block_size, style='pkcs7'): + """Remove standard padding. + + Args: + padded_data (byte string): + A piece of data with padding that needs to be stripped. + block_size (integer): + The block boundary to use for padding. The input length + must be a multiple of :data:`block_size`. + style (string): + Padding algorithm. It can be *'pkcs7'* (default), *'iso7816'* or *'x923'*. + Return: + byte string : data without padding. + Raises: + ValueError: if the padding is incorrect. + """ + + pdata_len = len(padded_data) + if pdata_len == 0: + raise ValueError("Zero-length input cannot be unpadded") + if pdata_len % block_size: + raise ValueError("Input data is not padded") + if style in ('pkcs7', 'x923'): + padding_len = bord(padded_data[-1]) + if padding_len<1 or padding_len>min(block_size, pdata_len): + raise ValueError("Padding is incorrect.") + if style == 'pkcs7': + if padded_data[-padding_len:]!=bchr(padding_len)*padding_len: + raise ValueError("PKCS#7 padding is incorrect.") + else: + if padded_data[-padding_len:-1]!=bchr(0)*(padding_len-1): + raise ValueError("ANSI X.923 padding is incorrect.") + elif style == 'iso7816': + padding_len = pdata_len - padded_data.rfind(bchr(128)) + if padding_len<1 or padding_len>min(block_size, pdata_len): + raise ValueError("Padding is incorrect.") + if padding_len>1 and padded_data[1-padding_len:]!=bchr(0)*(padding_len-1): + raise ValueError("ISO 7816-4 padding is incorrect.") + else: + raise ValueError("Unknown padding style") + return padded_data[:-padding_len] + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/Padding.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/Padding.pyi new file mode 100644 index 000000000..4d8d30df3 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/Padding.pyi @@ -0,0 +1,6 @@ +from typing import Optional + +__all__ = [ 'pad', 'unpad' ] + +def pad(data_to_pad: bytes, block_size: int, style: Optional[str]='pkcs7') -> bytes: ... +def unpad(padded_data: bytes, block_size: int, style: Optional[str]='pkcs7') -> bytes: ... \ No newline at end of file diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/RFC1751.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/RFC1751.py new file mode 100644 index 000000000..9ed52d2cb --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/RFC1751.py @@ -0,0 +1,386 @@ +# rfc1751.py : Converts between 128-bit strings and a human-readable +# sequence of words, as defined in RFC1751: "A Convention for +# Human-Readable 128-bit Keys", by Daniel L. McDonald. +# +# Part of the Python Cryptography Toolkit +# +# Written by Andrew M. Kuchling and others +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from __future__ import print_function + +import binascii + +from Crypto.Util.py3compat import bord, bchr + +binary = {0: '0000', 1: '0001', 2: '0010', 3: '0011', 4: '0100', 5: '0101', + 6: '0110', 7: '0111', 8: '1000', 9: '1001', 10: '1010', 11: '1011', + 12: '1100', 13: '1101', 14: '1110', 15: '1111'} + + +def _key2bin(s): + "Convert a key into a string of binary digits" + kl = map(lambda x: bord(x), s) + kl = map(lambda x: binary[x >> 4] + binary[x & 15], kl) + return ''.join(kl) + + +def _extract(key, start, length): + """Extract a bitstring(2.x)/bytestring(2.x) from a string of binary digits, and return its + numeric value.""" + + result = 0 + for y in key[start:start+length]: + result = result * 2 + ord(y) - 48 + return result + + +def key_to_english(key): + """Transform an arbitrary key into a string containing English words. + + Example:: + + >>> from Crypto.Util.RFC1751 import key_to_english + >>> key_to_english(b'66666666') + 'RAM LOIS GOAD CREW CARE HIT' + + Args: + key (byte string): + The key to convert. Its length must be a multiple of 8. + Return: + A string of English words. + """ + + if len(key) % 8 != 0: + raise ValueError('The length of the key must be a multiple of 8.') + + english = '' + for index in range(0, len(key), 8): # Loop over 8-byte subkeys + subkey = key[index:index + 8] + # Compute the parity of the key + skbin = _key2bin(subkey) + p = 0 + for i in range(0, 64, 2): + p = p + _extract(skbin, i, 2) + # Append parity bits to the subkey + skbin = _key2bin(subkey + bchr((p << 6) & 255)) + for i in range(0, 64, 11): + english = english + wordlist[_extract(skbin, i, 11)] + ' ' + + return english.strip() + + +def english_to_key(s): + """Transform a string into a corresponding key. + + Example:: + + >>> from Crypto.Util.RFC1751 import english_to_key + >>> english_to_key('RAM LOIS GOAD CREW CARE HIT') + b'66666666' + + Args: + s (string): the string with the words separated by whitespace; + the number of words must be a multiple of 6. + Return: + A byte string. + """ + + L = s.upper().split() + key = b'' + for index in range(0, len(L), 6): + sublist = L[index:index + 6] + char = 9 * [0] + bits = 0 + for i in sublist: + index = wordlist.index(i) + shift = (8 - (bits + 11) % 8) % 8 + y = index << shift + cl, cc, cr = (y >> 16), (y >> 8) & 0xff, y & 0xff + if (shift > 5): + char[bits >> 3] = char[bits >> 3] | cl + char[(bits >> 3) + 1] = char[(bits >> 3) + 1] | cc + char[(bits >> 3) + 2] = char[(bits >> 3) + 2] | cr + elif shift > -3: + char[bits >> 3] = char[bits >> 3] | cc + char[(bits >> 3) + 1] = char[(bits >> 3) + 1] | cr + else: + char[bits >> 3] = char[bits >> 3] | cr + bits = bits + 11 + + subkey = b'' + for y in char: + subkey = subkey + bchr(y) + + # Check the parity of the resulting key + skbin = _key2bin(subkey) + p = 0 + for i in range(0, 64, 2): + p = p + _extract(skbin, i, 2) + if (p & 3) != _extract(skbin, 64, 2): + raise ValueError("Parity error in resulting key") + key = key + subkey[0:8] + return key + + +wordlist = [ + "A", "ABE", "ACE", "ACT", "AD", "ADA", "ADD", + "AGO", "AID", "AIM", "AIR", "ALL", "ALP", "AM", "AMY", "AN", "ANA", + "AND", "ANN", "ANT", "ANY", "APE", "APS", "APT", "ARC", "ARE", "ARK", + "ARM", "ART", "AS", "ASH", "ASK", "AT", "ATE", "AUG", "AUK", "AVE", + "AWE", "AWK", "AWL", "AWN", "AX", "AYE", "BAD", "BAG", "BAH", "BAM", + "BAN", "BAR", "BAT", "BAY", "BE", "BED", "BEE", "BEG", "BEN", "BET", + "BEY", "BIB", "BID", "BIG", "BIN", "BIT", "BOB", "BOG", "BON", "BOO", + "BOP", "BOW", "BOY", "BUB", "BUD", "BUG", "BUM", "BUN", "BUS", "BUT", + "BUY", "BY", "BYE", "CAB", "CAL", "CAM", "CAN", "CAP", "CAR", "CAT", + "CAW", "COD", "COG", "COL", "CON", "COO", "COP", "COT", "COW", "COY", + "CRY", "CUB", "CUE", "CUP", "CUR", "CUT", "DAB", "DAD", "DAM", "DAN", + "DAR", "DAY", "DEE", "DEL", "DEN", "DES", "DEW", "DID", "DIE", "DIG", + "DIN", "DIP", "DO", "DOE", "DOG", "DON", "DOT", "DOW", "DRY", "DUB", + "DUD", "DUE", "DUG", "DUN", "EAR", "EAT", "ED", "EEL", "EGG", "EGO", + "ELI", "ELK", "ELM", "ELY", "EM", "END", "EST", "ETC", "EVA", "EVE", + "EWE", "EYE", "FAD", "FAN", "FAR", "FAT", "FAY", "FED", "FEE", "FEW", + "FIB", "FIG", "FIN", "FIR", "FIT", "FLO", "FLY", "FOE", "FOG", "FOR", + "FRY", "FUM", "FUN", "FUR", "GAB", "GAD", "GAG", "GAL", "GAM", "GAP", + "GAS", "GAY", "GEE", "GEL", "GEM", "GET", "GIG", "GIL", "GIN", "GO", + "GOT", "GUM", "GUN", "GUS", "GUT", "GUY", "GYM", "GYP", "HA", "HAD", + "HAL", "HAM", "HAN", "HAP", "HAS", "HAT", "HAW", "HAY", "HE", "HEM", + "HEN", "HER", "HEW", "HEY", "HI", "HID", "HIM", "HIP", "HIS", "HIT", + "HO", "HOB", "HOC", "HOE", "HOG", "HOP", "HOT", "HOW", "HUB", "HUE", + "HUG", "HUH", "HUM", "HUT", "I", "ICY", "IDA", "IF", "IKE", "ILL", + "INK", "INN", "IO", "ION", "IQ", "IRA", "IRE", "IRK", "IS", "IT", + "ITS", "IVY", "JAB", "JAG", "JAM", "JAN", "JAR", "JAW", "JAY", "JET", + "JIG", "JIM", "JO", "JOB", "JOE", "JOG", "JOT", "JOY", "JUG", "JUT", + "KAY", "KEG", "KEN", "KEY", "KID", "KIM", "KIN", "KIT", "LA", "LAB", + "LAC", "LAD", "LAG", "LAM", "LAP", "LAW", "LAY", "LEA", "LED", "LEE", + "LEG", "LEN", "LEO", "LET", "LEW", "LID", "LIE", "LIN", "LIP", "LIT", + "LO", "LOB", "LOG", "LOP", "LOS", "LOT", "LOU", "LOW", "LOY", "LUG", + "LYE", "MA", "MAC", "MAD", "MAE", "MAN", "MAO", "MAP", "MAT", "MAW", + "MAY", "ME", "MEG", "MEL", "MEN", "MET", "MEW", "MID", "MIN", "MIT", + "MOB", "MOD", "MOE", "MOO", "MOP", "MOS", "MOT", "MOW", "MUD", "MUG", + "MUM", "MY", "NAB", "NAG", "NAN", "NAP", "NAT", "NAY", "NE", "NED", + "NEE", "NET", "NEW", "NIB", "NIL", "NIP", "NIT", "NO", "NOB", "NOD", + "NON", "NOR", "NOT", "NOV", "NOW", "NU", "NUN", "NUT", "O", "OAF", + "OAK", "OAR", "OAT", "ODD", "ODE", "OF", "OFF", "OFT", "OH", "OIL", + "OK", "OLD", "ON", "ONE", "OR", "ORB", "ORE", "ORR", "OS", "OTT", + "OUR", "OUT", "OVA", "OW", "OWE", "OWL", "OWN", "OX", "PA", "PAD", + "PAL", "PAM", "PAN", "PAP", "PAR", "PAT", "PAW", "PAY", "PEA", "PEG", + "PEN", "PEP", "PER", "PET", "PEW", "PHI", "PI", "PIE", "PIN", "PIT", + "PLY", "PO", "POD", "POE", "POP", "POT", "POW", "PRO", "PRY", "PUB", + "PUG", "PUN", "PUP", "PUT", "QUO", "RAG", "RAM", "RAN", "RAP", "RAT", + "RAW", "RAY", "REB", "RED", "REP", "RET", "RIB", "RID", "RIG", "RIM", + "RIO", "RIP", "ROB", "ROD", "ROE", "RON", "ROT", "ROW", "ROY", "RUB", + "RUE", "RUG", "RUM", "RUN", "RYE", "SAC", "SAD", "SAG", "SAL", "SAM", + "SAN", "SAP", "SAT", "SAW", "SAY", "SEA", "SEC", "SEE", "SEN", "SET", + "SEW", "SHE", "SHY", "SIN", "SIP", "SIR", "SIS", "SIT", "SKI", "SKY", + "SLY", "SO", "SOB", "SOD", "SON", "SOP", "SOW", "SOY", "SPA", "SPY", + "SUB", "SUD", "SUE", "SUM", "SUN", "SUP", "TAB", "TAD", "TAG", "TAN", + "TAP", "TAR", "TEA", "TED", "TEE", "TEN", "THE", "THY", "TIC", "TIE", + "TIM", "TIN", "TIP", "TO", "TOE", "TOG", "TOM", "TON", "TOO", "TOP", + "TOW", "TOY", "TRY", "TUB", "TUG", "TUM", "TUN", "TWO", "UN", "UP", + "US", "USE", "VAN", "VAT", "VET", "VIE", "WAD", "WAG", "WAR", "WAS", + "WAY", "WE", "WEB", "WED", "WEE", "WET", "WHO", "WHY", "WIN", "WIT", + "WOK", "WON", "WOO", "WOW", "WRY", "WU", "YAM", "YAP", "YAW", "YE", + "YEA", "YES", "YET", "YOU", "ABED", "ABEL", "ABET", "ABLE", "ABUT", + "ACHE", "ACID", "ACME", "ACRE", "ACTA", "ACTS", "ADAM", "ADDS", + "ADEN", "AFAR", "AFRO", "AGEE", "AHEM", "AHOY", "AIDA", "AIDE", + "AIDS", "AIRY", "AJAR", "AKIN", "ALAN", "ALEC", "ALGA", "ALIA", + "ALLY", "ALMA", "ALOE", "ALSO", "ALTO", "ALUM", "ALVA", "AMEN", + "AMES", "AMID", "AMMO", "AMOK", "AMOS", "AMRA", "ANDY", "ANEW", + "ANNA", "ANNE", "ANTE", "ANTI", "AQUA", "ARAB", "ARCH", "AREA", + "ARGO", "ARID", "ARMY", "ARTS", "ARTY", "ASIA", "ASKS", "ATOM", + "AUNT", "AURA", "AUTO", "AVER", "AVID", "AVIS", "AVON", "AVOW", + "AWAY", "AWRY", "BABE", "BABY", "BACH", "BACK", "BADE", "BAIL", + "BAIT", "BAKE", "BALD", "BALE", "BALI", "BALK", "BALL", "BALM", + "BAND", "BANE", "BANG", "BANK", "BARB", "BARD", "BARE", "BARK", + "BARN", "BARR", "BASE", "BASH", "BASK", "BASS", "BATE", "BATH", + "BAWD", "BAWL", "BEAD", "BEAK", "BEAM", "BEAN", "BEAR", "BEAT", + "BEAU", "BECK", "BEEF", "BEEN", "BEER", + "BEET", "BELA", "BELL", "BELT", "BEND", "BENT", "BERG", "BERN", + "BERT", "BESS", "BEST", "BETA", "BETH", "BHOY", "BIAS", "BIDE", + "BIEN", "BILE", "BILK", "BILL", "BIND", "BING", "BIRD", "BITE", + "BITS", "BLAB", "BLAT", "BLED", "BLEW", "BLOB", "BLOC", "BLOT", + "BLOW", "BLUE", "BLUM", "BLUR", "BOAR", "BOAT", "BOCA", "BOCK", + "BODE", "BODY", "BOGY", "BOHR", "BOIL", "BOLD", "BOLO", "BOLT", + "BOMB", "BONA", "BOND", "BONE", "BONG", "BONN", "BONY", "BOOK", + "BOOM", "BOON", "BOOT", "BORE", "BORG", "BORN", "BOSE", "BOSS", + "BOTH", "BOUT", "BOWL", "BOYD", "BRAD", "BRAE", "BRAG", "BRAN", + "BRAY", "BRED", "BREW", "BRIG", "BRIM", "BROW", "BUCK", "BUDD", + "BUFF", "BULB", "BULK", "BULL", "BUNK", "BUNT", "BUOY", "BURG", + "BURL", "BURN", "BURR", "BURT", "BURY", "BUSH", "BUSS", "BUST", + "BUSY", "BYTE", "CADY", "CAFE", "CAGE", "CAIN", "CAKE", "CALF", + "CALL", "CALM", "CAME", "CANE", "CANT", "CARD", "CARE", "CARL", + "CARR", "CART", "CASE", "CASH", "CASK", "CAST", "CAVE", "CEIL", + "CELL", "CENT", "CERN", "CHAD", "CHAR", "CHAT", "CHAW", "CHEF", + "CHEN", "CHEW", "CHIC", "CHIN", "CHOU", "CHOW", "CHUB", "CHUG", + "CHUM", "CITE", "CITY", "CLAD", "CLAM", "CLAN", "CLAW", "CLAY", + "CLOD", "CLOG", "CLOT", "CLUB", "CLUE", "COAL", "COAT", "COCA", + "COCK", "COCO", "CODA", "CODE", "CODY", "COED", "COIL", "COIN", + "COKE", "COLA", "COLD", "COLT", "COMA", "COMB", "COME", "COOK", + "COOL", "COON", "COOT", "CORD", "CORE", "CORK", "CORN", "COST", + "COVE", "COWL", "CRAB", "CRAG", "CRAM", "CRAY", "CREW", "CRIB", + "CROW", "CRUD", "CUBA", "CUBE", "CUFF", "CULL", "CULT", "CUNY", + "CURB", "CURD", "CURE", "CURL", "CURT", "CUTS", "DADE", "DALE", + "DAME", "DANA", "DANE", "DANG", "DANK", "DARE", "DARK", "DARN", + "DART", "DASH", "DATA", "DATE", "DAVE", "DAVY", "DAWN", "DAYS", + "DEAD", "DEAF", "DEAL", "DEAN", "DEAR", "DEBT", "DECK", "DEED", + "DEEM", "DEER", "DEFT", "DEFY", "DELL", "DENT", "DENY", "DESK", + "DIAL", "DICE", "DIED", "DIET", "DIME", "DINE", "DING", "DINT", + "DIRE", "DIRT", "DISC", "DISH", "DISK", "DIVE", "DOCK", "DOES", + "DOLE", "DOLL", "DOLT", "DOME", "DONE", "DOOM", "DOOR", "DORA", + "DOSE", "DOTE", "DOUG", "DOUR", "DOVE", "DOWN", "DRAB", "DRAG", + "DRAM", "DRAW", "DREW", "DRUB", "DRUG", "DRUM", "DUAL", "DUCK", + "DUCT", "DUEL", "DUET", "DUKE", "DULL", "DUMB", "DUNE", "DUNK", + "DUSK", "DUST", "DUTY", "EACH", "EARL", "EARN", "EASE", "EAST", + "EASY", "EBEN", "ECHO", "EDDY", "EDEN", "EDGE", "EDGY", "EDIT", + "EDNA", "EGAN", "ELAN", "ELBA", "ELLA", "ELSE", "EMIL", "EMIT", + "EMMA", "ENDS", "ERIC", "EROS", "EVEN", "EVER", "EVIL", "EYED", + "FACE", "FACT", "FADE", "FAIL", "FAIN", "FAIR", "FAKE", "FALL", + "FAME", "FANG", "FARM", "FAST", "FATE", "FAWN", "FEAR", "FEAT", + "FEED", "FEEL", "FEET", "FELL", "FELT", "FEND", "FERN", "FEST", + "FEUD", "FIEF", "FIGS", "FILE", "FILL", "FILM", "FIND", "FINE", + "FINK", "FIRE", "FIRM", "FISH", "FISK", "FIST", "FITS", "FIVE", + "FLAG", "FLAK", "FLAM", "FLAT", "FLAW", "FLEA", "FLED", "FLEW", + "FLIT", "FLOC", "FLOG", "FLOW", "FLUB", "FLUE", "FOAL", "FOAM", + "FOGY", "FOIL", "FOLD", "FOLK", "FOND", "FONT", "FOOD", "FOOL", + "FOOT", "FORD", "FORE", "FORK", "FORM", "FORT", "FOSS", "FOUL", + "FOUR", "FOWL", "FRAU", "FRAY", "FRED", "FREE", "FRET", "FREY", + "FROG", "FROM", "FUEL", "FULL", "FUME", "FUND", "FUNK", "FURY", + "FUSE", "FUSS", "GAFF", "GAGE", "GAIL", "GAIN", "GAIT", "GALA", + "GALE", "GALL", "GALT", "GAME", "GANG", "GARB", "GARY", "GASH", + "GATE", "GAUL", "GAUR", "GAVE", "GAWK", "GEAR", "GELD", "GENE", + "GENT", "GERM", "GETS", "GIBE", "GIFT", "GILD", "GILL", "GILT", + "GINA", "GIRD", "GIRL", "GIST", "GIVE", "GLAD", "GLEE", "GLEN", + "GLIB", "GLOB", "GLOM", "GLOW", "GLUE", "GLUM", "GLUT", "GOAD", + "GOAL", "GOAT", "GOER", "GOES", "GOLD", "GOLF", "GONE", "GONG", + "GOOD", "GOOF", "GORE", "GORY", "GOSH", "GOUT", "GOWN", "GRAB", + "GRAD", "GRAY", "GREG", "GREW", "GREY", "GRID", "GRIM", "GRIN", + "GRIT", "GROW", "GRUB", "GULF", "GULL", "GUNK", "GURU", "GUSH", + "GUST", "GWEN", "GWYN", "HAAG", "HAAS", "HACK", "HAIL", "HAIR", + "HALE", "HALF", "HALL", "HALO", "HALT", "HAND", "HANG", "HANK", + "HANS", "HARD", "HARK", "HARM", "HART", "HASH", "HAST", "HATE", + "HATH", "HAUL", "HAVE", "HAWK", "HAYS", "HEAD", "HEAL", "HEAR", + "HEAT", "HEBE", "HECK", "HEED", "HEEL", "HEFT", "HELD", "HELL", + "HELM", "HERB", "HERD", "HERE", "HERO", "HERS", "HESS", "HEWN", + "HICK", "HIDE", "HIGH", "HIKE", "HILL", "HILT", "HIND", "HINT", + "HIRE", "HISS", "HIVE", "HOBO", "HOCK", "HOFF", "HOLD", "HOLE", + "HOLM", "HOLT", "HOME", "HONE", "HONK", "HOOD", "HOOF", "HOOK", + "HOOT", "HORN", "HOSE", "HOST", "HOUR", "HOVE", "HOWE", "HOWL", + "HOYT", "HUCK", "HUED", "HUFF", "HUGE", "HUGH", "HUGO", "HULK", + "HULL", "HUNK", "HUNT", "HURD", "HURL", "HURT", "HUSH", "HYDE", + "HYMN", "IBIS", "ICON", "IDEA", "IDLE", "IFFY", "INCA", "INCH", + "INTO", "IONS", "IOTA", "IOWA", "IRIS", "IRMA", "IRON", "ISLE", + "ITCH", "ITEM", "IVAN", "JACK", "JADE", "JAIL", "JAKE", "JANE", + "JAVA", "JEAN", "JEFF", "JERK", "JESS", "JEST", "JIBE", "JILL", + "JILT", "JIVE", "JOAN", "JOBS", "JOCK", "JOEL", "JOEY", "JOHN", + "JOIN", "JOKE", "JOLT", "JOVE", "JUDD", "JUDE", "JUDO", "JUDY", + "JUJU", "JUKE", "JULY", "JUNE", "JUNK", "JUNO", "JURY", "JUST", + "JUTE", "KAHN", "KALE", "KANE", "KANT", "KARL", "KATE", "KEEL", + "KEEN", "KENO", "KENT", "KERN", "KERR", "KEYS", "KICK", "KILL", + "KIND", "KING", "KIRK", "KISS", "KITE", "KLAN", "KNEE", "KNEW", + "KNIT", "KNOB", "KNOT", "KNOW", "KOCH", "KONG", "KUDO", "KURD", + "KURT", "KYLE", "LACE", "LACK", "LACY", "LADY", "LAID", "LAIN", + "LAIR", "LAKE", "LAMB", "LAME", "LAND", "LANE", "LANG", "LARD", + "LARK", "LASS", "LAST", "LATE", "LAUD", "LAVA", "LAWN", "LAWS", + "LAYS", "LEAD", "LEAF", "LEAK", "LEAN", "LEAR", "LEEK", "LEER", + "LEFT", "LEND", "LENS", "LENT", "LEON", "LESK", "LESS", "LEST", + "LETS", "LIAR", "LICE", "LICK", "LIED", "LIEN", "LIES", "LIEU", + "LIFE", "LIFT", "LIKE", "LILA", "LILT", "LILY", "LIMA", "LIMB", + "LIME", "LIND", "LINE", "LINK", "LINT", "LION", "LISA", "LIST", + "LIVE", "LOAD", "LOAF", "LOAM", "LOAN", "LOCK", "LOFT", "LOGE", + "LOIS", "LOLA", "LONE", "LONG", "LOOK", "LOON", "LOOT", "LORD", + "LORE", "LOSE", "LOSS", "LOST", "LOUD", "LOVE", "LOWE", "LUCK", + "LUCY", "LUGE", "LUKE", "LULU", "LUND", "LUNG", "LURA", "LURE", + "LURK", "LUSH", "LUST", "LYLE", "LYNN", "LYON", "LYRA", "MACE", + "MADE", "MAGI", "MAID", "MAIL", "MAIN", "MAKE", "MALE", "MALI", + "MALL", "MALT", "MANA", "MANN", "MANY", "MARC", "MARE", "MARK", + "MARS", "MART", "MARY", "MASH", "MASK", "MASS", "MAST", "MATE", + "MATH", "MAUL", "MAYO", "MEAD", "MEAL", "MEAN", "MEAT", "MEEK", + "MEET", "MELD", "MELT", "MEMO", "MEND", "MENU", "MERT", "MESH", + "MESS", "MICE", "MIKE", "MILD", "MILE", "MILK", "MILL", "MILT", + "MIMI", "MIND", "MINE", "MINI", "MINK", "MINT", "MIRE", "MISS", + "MIST", "MITE", "MITT", "MOAN", "MOAT", "MOCK", "MODE", "MOLD", + "MOLE", "MOLL", "MOLT", "MONA", "MONK", "MONT", "MOOD", "MOON", + "MOOR", "MOOT", "MORE", "MORN", "MORT", "MOSS", "MOST", "MOTH", + "MOVE", "MUCH", "MUCK", "MUDD", "MUFF", "MULE", "MULL", "MURK", + "MUSH", "MUST", "MUTE", "MUTT", "MYRA", "MYTH", "NAGY", "NAIL", + "NAIR", "NAME", "NARY", "NASH", "NAVE", "NAVY", "NEAL", "NEAR", + "NEAT", "NECK", "NEED", "NEIL", "NELL", "NEON", "NERO", "NESS", + "NEST", "NEWS", "NEWT", "NIBS", "NICE", "NICK", "NILE", "NINA", + "NINE", "NOAH", "NODE", "NOEL", "NOLL", "NONE", "NOOK", "NOON", + "NORM", "NOSE", "NOTE", "NOUN", "NOVA", "NUDE", "NULL", "NUMB", + "OATH", "OBEY", "OBOE", "ODIN", "OHIO", "OILY", "OINT", "OKAY", + "OLAF", "OLDY", "OLGA", "OLIN", "OMAN", "OMEN", "OMIT", "ONCE", + "ONES", "ONLY", "ONTO", "ONUS", "ORAL", "ORGY", "OSLO", "OTIS", + "OTTO", "OUCH", "OUST", "OUTS", "OVAL", "OVEN", "OVER", "OWLY", + "OWNS", "QUAD", "QUIT", "QUOD", "RACE", "RACK", "RACY", "RAFT", + "RAGE", "RAID", "RAIL", "RAIN", "RAKE", "RANK", "RANT", "RARE", + "RASH", "RATE", "RAVE", "RAYS", "READ", "REAL", "REAM", "REAR", + "RECK", "REED", "REEF", "REEK", "REEL", "REID", "REIN", "RENA", + "REND", "RENT", "REST", "RICE", "RICH", "RICK", "RIDE", "RIFT", + "RILL", "RIME", "RING", "RINK", "RISE", "RISK", "RITE", "ROAD", + "ROAM", "ROAR", "ROBE", "ROCK", "RODE", "ROIL", "ROLL", "ROME", + "ROOD", "ROOF", "ROOK", "ROOM", "ROOT", "ROSA", "ROSE", "ROSS", + "ROSY", "ROTH", "ROUT", "ROVE", "ROWE", "ROWS", "RUBE", "RUBY", + "RUDE", "RUDY", "RUIN", "RULE", "RUNG", "RUNS", "RUNT", "RUSE", + "RUSH", "RUSK", "RUSS", "RUST", "RUTH", "SACK", "SAFE", "SAGE", + "SAID", "SAIL", "SALE", "SALK", "SALT", "SAME", "SAND", "SANE", + "SANG", "SANK", "SARA", "SAUL", "SAVE", "SAYS", "SCAN", "SCAR", + "SCAT", "SCOT", "SEAL", "SEAM", "SEAR", "SEAT", "SEED", "SEEK", + "SEEM", "SEEN", "SEES", "SELF", "SELL", "SEND", "SENT", "SETS", + "SEWN", "SHAG", "SHAM", "SHAW", "SHAY", "SHED", "SHIM", "SHIN", + "SHOD", "SHOE", "SHOT", "SHOW", "SHUN", "SHUT", "SICK", "SIDE", + "SIFT", "SIGH", "SIGN", "SILK", "SILL", "SILO", "SILT", "SINE", + "SING", "SINK", "SIRE", "SITE", "SITS", "SITU", "SKAT", "SKEW", + "SKID", "SKIM", "SKIN", "SKIT", "SLAB", "SLAM", "SLAT", "SLAY", + "SLED", "SLEW", "SLID", "SLIM", "SLIT", "SLOB", "SLOG", "SLOT", + "SLOW", "SLUG", "SLUM", "SLUR", "SMOG", "SMUG", "SNAG", "SNOB", + "SNOW", "SNUB", "SNUG", "SOAK", "SOAR", "SOCK", "SODA", "SOFA", + "SOFT", "SOIL", "SOLD", "SOME", "SONG", "SOON", "SOOT", "SORE", + "SORT", "SOUL", "SOUR", "SOWN", "STAB", "STAG", "STAN", "STAR", + "STAY", "STEM", "STEW", "STIR", "STOW", "STUB", "STUN", "SUCH", + "SUDS", "SUIT", "SULK", "SUMS", "SUNG", "SUNK", "SURE", "SURF", + "SWAB", "SWAG", "SWAM", "SWAN", "SWAT", "SWAY", "SWIM", "SWUM", + "TACK", "TACT", "TAIL", "TAKE", "TALE", "TALK", "TALL", "TANK", + "TASK", "TATE", "TAUT", "TEAL", "TEAM", "TEAR", "TECH", "TEEM", + "TEEN", "TEET", "TELL", "TEND", "TENT", "TERM", "TERN", "TESS", + "TEST", "THAN", "THAT", "THEE", "THEM", "THEN", "THEY", "THIN", + "THIS", "THUD", "THUG", "TICK", "TIDE", "TIDY", "TIED", "TIER", + "TILE", "TILL", "TILT", "TIME", "TINA", "TINE", "TINT", "TINY", + "TIRE", "TOAD", "TOGO", "TOIL", "TOLD", "TOLL", "TONE", "TONG", + "TONY", "TOOK", "TOOL", "TOOT", "TORE", "TORN", "TOTE", "TOUR", + "TOUT", "TOWN", "TRAG", "TRAM", "TRAY", "TREE", "TREK", "TRIG", + "TRIM", "TRIO", "TROD", "TROT", "TROY", "TRUE", "TUBA", "TUBE", + "TUCK", "TUFT", "TUNA", "TUNE", "TUNG", "TURF", "TURN", "TUSK", + "TWIG", "TWIN", "TWIT", "ULAN", "UNIT", "URGE", "USED", "USER", + "USES", "UTAH", "VAIL", "VAIN", "VALE", "VARY", "VASE", "VAST", + "VEAL", "VEDA", "VEIL", "VEIN", "VEND", "VENT", "VERB", "VERY", + "VETO", "VICE", "VIEW", "VINE", "VISE", "VOID", "VOLT", "VOTE", + "WACK", "WADE", "WAGE", "WAIL", "WAIT", "WAKE", "WALE", "WALK", + "WALL", "WALT", "WAND", "WANE", "WANG", "WANT", "WARD", "WARM", + "WARN", "WART", "WASH", "WAST", "WATS", "WATT", "WAVE", "WAVY", + "WAYS", "WEAK", "WEAL", "WEAN", "WEAR", "WEED", "WEEK", "WEIR", + "WELD", "WELL", "WELT", "WENT", "WERE", "WERT", "WEST", "WHAM", + "WHAT", "WHEE", "WHEN", "WHET", "WHOA", "WHOM", "WICK", "WIFE", + "WILD", "WILL", "WIND", "WINE", "WING", "WINK", "WINO", "WIRE", + "WISE", "WISH", "WITH", "WOLF", "WONT", "WOOD", "WOOL", "WORD", + "WORE", "WORK", "WORM", "WORN", "WOVE", "WRIT", "WYNN", "YALE", + "YANG", "YANK", "YARD", "YARN", "YAWL", "YAWN", "YEAH", "YEAR", + "YELL", "YOGA", "YOKE" ] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/RFC1751.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/RFC1751.pyi new file mode 100644 index 000000000..6ad07ff66 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/RFC1751.pyi @@ -0,0 +1,7 @@ +from typing import Dict, List + +binary: Dict[int, str] +wordlist: List[str] + +def key_to_english(key: bytes) -> str: ... +def english_to_key(s: str) -> bytes: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/__init__.py new file mode 100644 index 000000000..f12214d30 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Miscellaneous modules + +Contains useful modules that don't belong into any of the +other Crypto.* subpackages. + +======================== ============================================= +Module Description +======================== ============================================= +`Crypto.Util.number` Number-theoretic functions (primality testing, etc.) +`Crypto.Util.Counter` Fast counter functions for CTR cipher modes. +`Crypto.Util.RFC1751` Converts between 128-bit keys and human-readable + strings of words. +`Crypto.Util.asn1` Minimal support for ASN.1 DER encoding +`Crypto.Util.Padding` Set of functions for adding and removing padding. +======================== ============================================= + +:undocumented: _galois, _number_new, cpuid, py3compat, _raw_api +""" + +__all__ = ['RFC1751', 'number', 'strxor', 'asn1', 'Counter', 'Padding'] + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/_cpu_features.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/_cpu_features.py new file mode 100644 index 000000000..b3039b587 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/_cpu_features.py @@ -0,0 +1,46 @@ +# =================================================================== +# +# Copyright (c) 2018, Helder Eijs +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util._raw_api import load_pycryptodome_raw_lib + + +_raw_cpuid_lib = load_pycryptodome_raw_lib("Crypto.Util._cpuid_c", + """ + int have_aes_ni(void); + int have_clmul(void); + """) + + +def have_aes_ni(): + return _raw_cpuid_lib.have_aes_ni() + + +def have_clmul(): + return _raw_cpuid_lib.have_clmul() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/_cpu_features.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/_cpu_features.pyi new file mode 100644 index 000000000..10e669e38 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/_cpu_features.pyi @@ -0,0 +1,2 @@ +def have_aes_ni() -> int: ... +def have_clmul() -> int: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/_cpuid_c.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/_cpuid_c.abi3.so new file mode 100755 index 000000000..51e31b74f Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/_cpuid_c.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/_file_system.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/_file_system.py new file mode 100644 index 000000000..1cb0c4bf4 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/_file_system.py @@ -0,0 +1,54 @@ +# =================================================================== +# +# Copyright (c) 2016, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import os + + +def pycryptodome_filename(dir_comps, filename): + """Return the complete file name for the module + + dir_comps : list of string + The list of directory names in the PyCryptodome package. + The first element must be "Crypto". + + filename : string + The filename (inclusing extension) in the target directory. + """ + + if dir_comps[0] != "Crypto": + raise ValueError("Only available for modules under 'Crypto'") + + dir_comps = list(dir_comps[1:]) + [filename] + + util_lib, _ = os.path.split(os.path.abspath(__file__)) + root_lib = os.path.join(util_lib, "..") + + return os.path.join(root_lib, *dir_comps) + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/_file_system.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/_file_system.pyi new file mode 100644 index 000000000..d54a1268f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/_file_system.pyi @@ -0,0 +1,4 @@ +from typing import List + + +def pycryptodome_filename(dir_comps: List[str], filename: str) -> str: ... \ No newline at end of file diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/_raw_api.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/_raw_api.py new file mode 100644 index 000000000..e0065c3d0 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/_raw_api.py @@ -0,0 +1,325 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +import os +import abc +import sys +from Crypto.Util.py3compat import byte_string +from Crypto.Util._file_system import pycryptodome_filename + +# +# List of file suffixes for Python extensions +# +if sys.version_info[0] < 3: + + import imp + extension_suffixes = [] + for ext, mod, typ in imp.get_suffixes(): + if typ == imp.C_EXTENSION: + extension_suffixes.append(ext) + +else: + + from importlib import machinery + extension_suffixes = machinery.EXTENSION_SUFFIXES + +# Which types with buffer interface we support (apart from byte strings) +_buffer_type = (bytearray, memoryview) + + +class _VoidPointer(object): + @abc.abstractmethod + def get(self): + """Return the memory location we point to""" + return + + @abc.abstractmethod + def address_of(self): + """Return a raw pointer to this pointer""" + return + + +try: + # Starting from v2.18, pycparser (used by cffi for in-line ABI mode) + # stops working correctly when PYOPTIMIZE==2 or the parameter -OO is + # passed. In that case, we fall back to ctypes. + # Note that PyPy ships with an old version of pycparser so we can keep + # using cffi there. + # See https://github.com/Legrandin/pycryptodome/issues/228 + if '__pypy__' not in sys.builtin_module_names and sys.flags.optimize == 2: + raise ImportError("CFFI with optimize=2 fails due to pycparser bug.") + + # cffi still uses PyUnicode_GetSize, which was removed in Python 3.12 + # thus leading to a crash on cffi.dlopen() + # See https://groups.google.com/u/1/g/python-cffi/c/oZkOIZ_zi5k + if sys.version_info >= (3, 12) and os.name == "nt": + raise ImportError("CFFI is not compatible with Python 3.12 on Windows") + + from cffi import FFI + + ffi = FFI() + null_pointer = ffi.NULL + uint8_t_type = ffi.typeof(ffi.new("const uint8_t*")) + + _Array = ffi.new("uint8_t[1]").__class__.__bases__ + + def load_lib(name, cdecl): + """Load a shared library and return a handle to it. + + @name, either an absolute path or the name of a library + in the system search path. + + @cdecl, the C function declarations. + """ + + if hasattr(ffi, "RTLD_DEEPBIND") and not os.getenv('PYCRYPTODOME_DISABLE_DEEPBIND'): + lib = ffi.dlopen(name, ffi.RTLD_DEEPBIND) + else: + lib = ffi.dlopen(name) + ffi.cdef(cdecl) + return lib + + def c_ulong(x): + """Convert a Python integer to unsigned long""" + return x + + c_ulonglong = c_ulong + c_uint = c_ulong + c_ubyte = c_ulong + + def c_size_t(x): + """Convert a Python integer to size_t""" + return x + + def create_string_buffer(init_or_size, size=None): + """Allocate the given amount of bytes (initially set to 0)""" + + if isinstance(init_or_size, bytes): + size = max(len(init_or_size) + 1, size) + result = ffi.new("uint8_t[]", size) + result[:] = init_or_size + else: + if size: + raise ValueError("Size must be specified once only") + result = ffi.new("uint8_t[]", init_or_size) + return result + + def get_c_string(c_string): + """Convert a C string into a Python byte sequence""" + return ffi.string(c_string) + + def get_raw_buffer(buf): + """Convert a C buffer into a Python byte sequence""" + return ffi.buffer(buf)[:] + + def c_uint8_ptr(data): + if isinstance(data, _buffer_type): + # This only works for cffi >= 1.7 + return ffi.cast(uint8_t_type, ffi.from_buffer(data)) + elif byte_string(data) or isinstance(data, _Array): + return data + else: + raise TypeError("Object type %s cannot be passed to C code" % type(data)) + + class VoidPointer_cffi(_VoidPointer): + """Model a newly allocated pointer to void""" + + def __init__(self): + self._pp = ffi.new("void *[1]") + + def get(self): + return self._pp[0] + + def address_of(self): + return self._pp + + def VoidPointer(): + return VoidPointer_cffi() + + backend = "cffi" + +except ImportError: + + import ctypes + from ctypes import (CDLL, c_void_p, byref, c_ulong, c_ulonglong, c_size_t, + create_string_buffer, c_ubyte, c_uint) + from ctypes.util import find_library + from ctypes import Array as _Array + + null_pointer = None + cached_architecture = [] + + def c_ubyte(c): + if not (0 <= c < 256): + raise OverflowError() + return ctypes.c_ubyte(c) + + def load_lib(name, cdecl): + if not cached_architecture: + # platform.architecture() creates a subprocess, so caching the + # result makes successive imports faster. + import platform + cached_architecture[:] = platform.architecture() + bits, linkage = cached_architecture + if "." not in name and not linkage.startswith("Win"): + full_name = find_library(name) + if full_name is None: + raise OSError("Cannot load library '%s'" % name) + name = full_name + return CDLL(name) + + def get_c_string(c_string): + return c_string.value + + def get_raw_buffer(buf): + return buf.raw + + # ---- Get raw pointer --- + + _c_ssize_t = ctypes.c_ssize_t + + _PyBUF_SIMPLE = 0 + _PyObject_GetBuffer = ctypes.pythonapi.PyObject_GetBuffer + _PyBuffer_Release = ctypes.pythonapi.PyBuffer_Release + _py_object = ctypes.py_object + _c_ssize_p = ctypes.POINTER(_c_ssize_t) + + # See Include/object.h for CPython + # and https://github.com/pallets/click/blob/master/src/click/_winconsole.py + class _Py_buffer(ctypes.Structure): + _fields_ = [ + ('buf', c_void_p), + ('obj', ctypes.py_object), + ('len', _c_ssize_t), + ('itemsize', _c_ssize_t), + ('readonly', ctypes.c_int), + ('ndim', ctypes.c_int), + ('format', ctypes.c_char_p), + ('shape', _c_ssize_p), + ('strides', _c_ssize_p), + ('suboffsets', _c_ssize_p), + ('internal', c_void_p) + ] + + # Extra field for CPython 2.6/2.7 + if sys.version_info[0] == 2: + _fields_.insert(-1, ('smalltable', _c_ssize_t * 2)) + + def c_uint8_ptr(data): + if byte_string(data) or isinstance(data, _Array): + return data + elif isinstance(data, _buffer_type): + obj = _py_object(data) + buf = _Py_buffer() + _PyObject_GetBuffer(obj, byref(buf), _PyBUF_SIMPLE) + try: + buffer_type = ctypes.c_ubyte * buf.len + return buffer_type.from_address(buf.buf) + finally: + _PyBuffer_Release(byref(buf)) + else: + raise TypeError("Object type %s cannot be passed to C code" % type(data)) + + # --- + + class VoidPointer_ctypes(_VoidPointer): + """Model a newly allocated pointer to void""" + + def __init__(self): + self._p = c_void_p() + + def get(self): + return self._p + + def address_of(self): + return byref(self._p) + + def VoidPointer(): + return VoidPointer_ctypes() + + backend = "ctypes" + + +class SmartPointer(object): + """Class to hold a non-managed piece of memory""" + + def __init__(self, raw_pointer, destructor): + self._raw_pointer = raw_pointer + self._destructor = destructor + + def get(self): + return self._raw_pointer + + def release(self): + rp, self._raw_pointer = self._raw_pointer, None + return rp + + def __del__(self): + try: + if self._raw_pointer is not None: + self._destructor(self._raw_pointer) + self._raw_pointer = None + except AttributeError: + pass + + +def load_pycryptodome_raw_lib(name, cdecl): + """Load a shared library and return a handle to it. + + @name, the name of the library expressed as a PyCryptodome module, + for instance Crypto.Cipher._raw_cbc. + + @cdecl, the C function declarations. + """ + + split = name.split(".") + dir_comps, basename = split[:-1], split[-1] + attempts = [] + for ext in extension_suffixes: + try: + filename = basename + ext + full_name = pycryptodome_filename(dir_comps, filename) + if not os.path.isfile(full_name): + attempts.append("Not found '%s'" % filename) + continue + return load_lib(full_name, cdecl) + except OSError as exp: + attempts.append("Cannot load '%s': %s" % (filename, str(exp))) + raise OSError("Cannot load native module '%s': %s" % (name, ", ".join(attempts))) + + +def is_buffer(x): + """Return True if object x supports the buffer interface""" + return isinstance(x, (bytes, bytearray, memoryview)) + + +def is_writeable_buffer(x): + return (isinstance(x, bytearray) or + (isinstance(x, memoryview) and not x.readonly)) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/_raw_api.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/_raw_api.pyi new file mode 100644 index 000000000..2bc530163 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/_raw_api.pyi @@ -0,0 +1,27 @@ +from typing import Any, Optional, Union + +def load_lib(name: str, cdecl: str) -> Any : ... +def c_ulong(x: int ) -> Any : ... +def c_ulonglong(x: int ) -> Any : ... +def c_size_t(x: int) -> Any : ... +def create_string_buffer(init_or_size: Union[bytes,int], size: Optional[int]) -> Any : ... +def get_c_string(c_string: Any) -> bytes : ... +def get_raw_buffer(buf: Any) -> bytes : ... +def c_uint8_ptr(data: Union[bytes, memoryview, bytearray]) -> Any : ... + +class VoidPointer(object): + def get(self) -> Any : ... + def address_of(self) -> Any : ... + +class SmartPointer(object): + def __init__(self, raw_pointer: Any, destructor: Any) -> None : ... + def get(self) -> Any : ... + def release(self) -> Any : ... + +backend : str +null_pointer : Any +ffi: Any + +def load_pycryptodome_raw_lib(name: str, cdecl: str) -> Any : ... +def is_buffer(x: Any) -> bool : ... +def is_writeable_buffer(x: Any) -> bool : ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/_strxor.abi3.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/_strxor.abi3.so new file mode 100755 index 000000000..f0f3784db Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/_strxor.abi3.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/asn1.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/asn1.py new file mode 100644 index 000000000..a646eacb4 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/asn1.py @@ -0,0 +1,1064 @@ +# -*- coding: ascii -*- +# +# Util/asn1.py : Minimal support for ASN.1 DER binary encoding. +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +import struct + +from Crypto.Util.py3compat import byte_string, bchr, bord + +from Crypto.Util.number import long_to_bytes, bytes_to_long + +__all__ = ['DerObject', 'DerInteger', 'DerBoolean', 'DerOctetString', + 'DerNull', 'DerSequence', 'DerObjectId', 'DerBitString', 'DerSetOf'] + +# Useful references: +# - https://luca.ntop.org/Teaching/Appunti/asn1.html +# - https://letsencrypt.org/docs/a-warm-welcome-to-asn1-and-der/ +# - https://www.zytrax.com/tech/survival/asn1.html +# - https://www.oss.com/asn1/resources/books-whitepapers-pubs/larmouth-asn1-book.pdf +# - https://www.itu.int/ITU-T/studygroups/com17/languages/X.690-0207.pdf +# - https://misc.daniel-marschall.de/asn.1/oid-converter/online.php + +def _is_number(x, only_non_negative=False): + test = 0 + try: + test = x + test + except TypeError: + return False + return not only_non_negative or x >= 0 + + +class BytesIO_EOF(object): + """This class differs from BytesIO in that a ValueError exception is + raised whenever EOF is reached.""" + + def __init__(self, initial_bytes): + self._buffer = initial_bytes + self._index = 0 + self._bookmark = None + + def set_bookmark(self): + self._bookmark = self._index + + def data_since_bookmark(self): + assert self._bookmark is not None + return self._buffer[self._bookmark:self._index] + + def remaining_data(self): + return len(self._buffer) - self._index + + def read(self, length): + new_index = self._index + length + if new_index > len(self._buffer): + raise ValueError("Not enough data for DER decoding: expected %d bytes and found %d" % (new_index, len(self._buffer))) + + result = self._buffer[self._index:new_index] + self._index = new_index + return result + + def read_byte(self): + return bord(self.read(1)[0]) + + +class DerObject(object): + """Base class for defining a single DER object. + + This class should never be directly instantiated. + """ + + def __init__(self, asn1Id=None, payload=b'', implicit=None, + constructed=False, explicit=None): + """Initialize the DER object according to a specific ASN.1 type. + + :Parameters: + asn1Id : integer or byte + The universal DER tag number for this object + (e.g. 0x10 for a SEQUENCE). + If None, the tag is not known yet. + + payload : byte string + The initial payload of the object (that it, + the content octets). + If not specified, the payload is empty. + + implicit : integer or byte + The IMPLICIT tag number (< 0x1F) to use for the encoded object. + It overrides the universal tag *asn1Id*. + It cannot be combined with the ``explicit`` parameter. + By default, there is no IMPLICIT tag. + + constructed : bool + True when the ASN.1 type is *constructed*. + False when it is *primitive* (default). + + explicit : integer or byte + The EXPLICIT tag number (< 0x1F) to use for the encoded object. + It cannot be combined with the ``implicit`` parameter. + By default, there is no EXPLICIT tag. + """ + + if asn1Id is None: + # The tag octet will be read in with ``decode`` + self._tag_octet = None + return + asn1Id = self._convertTag(asn1Id) + + self.payload = payload + + # In a BER/DER identifier octet: + # * bits 4-0 contain the tag value + # * bit 5 is set if the type is 'constructed' + # and unset if 'primitive' + # * bits 7-6 depend on the encoding class + # + # Class | Bit 7, Bit 6 + # ---------------------------------- + # universal | 0 0 + # application | 0 1 + # context-spec | 1 0 (default for IMPLICIT/EXPLICIT) + # private | 1 1 + # + + constructed_bit = 0x20 if constructed else 0x00 + + if None not in (explicit, implicit): + raise ValueError("Explicit and implicit tags are" + " mutually exclusive") + + if implicit is not None: + # IMPLICIT tag overrides asn1Id + self._tag_octet = 0x80 | constructed_bit | self._convertTag(implicit) + elif explicit is not None: + # 'constructed bit' is always asserted for an EXPLICIT tag + self._tag_octet = 0x80 | 0x20 | self._convertTag(explicit) + self._inner_tag_octet = constructed_bit | asn1Id + else: + # Neither IMPLICIT nor EXPLICIT + self._tag_octet = constructed_bit | asn1Id + + def _convertTag(self, tag): + """Check if *tag* is a real DER tag (5 bits). + Convert it from a character to number if necessary. + """ + if not _is_number(tag): + if len(tag) == 1: + tag = bord(tag[0]) + # Ensure that tag is a low tag + if not (_is_number(tag) and 0 <= tag < 0x1F): + raise ValueError("Wrong DER tag") + return tag + + @staticmethod + def _definite_form(length): + """Build length octets according to BER/DER + definite form. + """ + if length > 127: + encoding = long_to_bytes(length) + return bchr(len(encoding) + 128) + encoding + return bchr(length) + + def encode(self): + """Return this DER element, fully encoded as a binary byte string.""" + + # Concatenate identifier octets, length octets, + # and contents octets + + output_payload = self.payload + + # In case of an EXTERNAL tag, first encode the inner + # element. + if hasattr(self, "_inner_tag_octet"): + output_payload = (bchr(self._inner_tag_octet) + + self._definite_form(len(self.payload)) + + self.payload) + + return (bchr(self._tag_octet) + + self._definite_form(len(output_payload)) + + output_payload) + + def _decodeLen(self, s): + """Decode DER length octets from a file.""" + + length = s.read_byte() + + if length > 127: + encoded_length = s.read(length & 0x7F) + if bord(encoded_length[0]) == 0: + raise ValueError("Invalid DER: length has leading zero") + length = bytes_to_long(encoded_length) + if length <= 127: + raise ValueError("Invalid DER: length in long form but smaller than 128") + + return length + + def decode(self, der_encoded, strict=False): + """Decode a complete DER element, and re-initializes this + object with it. + + Args: + der_encoded (byte string): A complete DER element. + + Raises: + ValueError: in case of parsing errors. + """ + + if not byte_string(der_encoded): + raise ValueError("Input is not a byte string") + + s = BytesIO_EOF(der_encoded) + self._decodeFromStream(s, strict) + + # There shouldn't be other bytes left + if s.remaining_data() > 0: + raise ValueError("Unexpected extra data after the DER structure") + + return self + + def _decodeFromStream(self, s, strict): + """Decode a complete DER element from a file.""" + + idOctet = s.read_byte() + if self._tag_octet is not None: + if idOctet != self._tag_octet: + raise ValueError("Unexpected DER tag") + else: + self._tag_octet = idOctet + length = self._decodeLen(s) + self.payload = s.read(length) + + # In case of an EXTERNAL tag, further decode the inner + # element. + if hasattr(self, "_inner_tag_octet"): + p = BytesIO_EOF(self.payload) + inner_octet = p.read_byte() + if inner_octet != self._inner_tag_octet: + raise ValueError("Unexpected internal DER tag") + length = self._decodeLen(p) + self.payload = p.read(length) + + # There shouldn't be other bytes left + if p.remaining_data() > 0: + raise ValueError("Unexpected extra data after the DER structure") + + +class DerInteger(DerObject): + """Class to model a DER INTEGER. + + An example of encoding is:: + + >>> from Crypto.Util.asn1 import DerInteger + >>> from binascii import hexlify, unhexlify + >>> int_der = DerInteger(9) + >>> print hexlify(int_der.encode()) + + which will show ``020109``, the DER encoding of 9. + + And for decoding:: + + >>> s = unhexlify(b'020109') + >>> try: + >>> int_der = DerInteger() + >>> int_der.decode(s) + >>> print int_der.value + >>> except ValueError: + >>> print "Not a valid DER INTEGER" + + the output will be ``9``. + + :ivar value: The integer value + :vartype value: integer + """ + + def __init__(self, value=0, implicit=None, explicit=None): + """Initialize the DER object as an INTEGER. + + :Parameters: + value : integer + The value of the integer. + + implicit : integer + The IMPLICIT tag to use for the encoded object. + It overrides the universal tag for INTEGER (2). + """ + + DerObject.__init__(self, 0x02, b'', implicit, + False, explicit) + self.value = value # The integer value + + def encode(self): + """Return the DER INTEGER, fully encoded as a + binary string.""" + + number = self.value + self.payload = b'' + while True: + self.payload = bchr(int(number & 255)) + self.payload + if 128 <= number <= 255: + self.payload = bchr(0x00) + self.payload + if -128 <= number <= 255: + break + number >>= 8 + return DerObject.encode(self) + + def decode(self, der_encoded, strict=False): + """Decode a DER-encoded INTEGER, and re-initializes this + object with it. + + Args: + der_encoded (byte string): A complete INTEGER DER element. + + Raises: + ValueError: in case of parsing errors. + """ + + return DerObject.decode(self, der_encoded, strict=strict) + + def _decodeFromStream(self, s, strict): + """Decode a complete DER INTEGER from a file.""" + + # Fill up self.payload + DerObject._decodeFromStream(self, s, strict) + + if strict: + if len(self.payload) == 0: + raise ValueError("Invalid encoding for DER INTEGER: empty payload") + if len(self.payload) >= 2 and struct.unpack('>H', self.payload[:2])[0] < 0x80: + raise ValueError("Invalid encoding for DER INTEGER: leading zero") + + # Derive self.value from self.payload + self.value = 0 + bits = 1 + for i in self.payload: + self.value *= 256 + self.value += bord(i) + bits <<= 8 + if self.payload and bord(self.payload[0]) & 0x80: + self.value -= bits + + +class DerBoolean(DerObject): + """Class to model a DER-encoded BOOLEAN. + + An example of encoding is:: + + >>> from Crypto.Util.asn1 import DerBoolean + >>> bool_der = DerBoolean(True) + >>> print(bool_der.encode().hex()) + + which will show ``0101ff``, the DER encoding of True. + + And for decoding:: + + >>> s = bytes.fromhex('0101ff') + >>> try: + >>> bool_der = DerBoolean() + >>> bool_der.decode(s) + >>> print(bool_der.value) + >>> except ValueError: + >>> print "Not a valid DER BOOLEAN" + + the output will be ``True``. + + :ivar value: The boolean value + :vartype value: boolean + """ + def __init__(self, value=False, implicit=None, explicit=None): + """Initialize the DER object as a BOOLEAN. + + Args: + value (boolean): + The value of the boolean. Default is False. + + implicit (integer or byte): + The IMPLICIT tag number (< 0x1F) to use for the encoded object. + It overrides the universal tag for BOOLEAN (1). + It cannot be combined with the ``explicit`` parameter. + By default, there is no IMPLICIT tag. + + explicit (integer or byte): + The EXPLICIT tag number (< 0x1F) to use for the encoded object. + It cannot be combined with the ``implicit`` parameter. + By default, there is no EXPLICIT tag. + """ + + DerObject.__init__(self, 0x01, b'', implicit, False, explicit) + self.value = value # The boolean value + + def encode(self): + """Return the DER BOOLEAN, fully encoded as a binary string.""" + + self.payload = b'\xFF' if self.value else b'\x00' + return DerObject.encode(self) + + def decode(self, der_encoded, strict=False): + """Decode a DER-encoded BOOLEAN, and re-initializes this object with it. + + Args: + der_encoded (byte string): A DER-encoded BOOLEAN. + + Raises: + ValueError: in case of parsing errors. + """ + + return DerObject.decode(self, der_encoded, strict) + + def _decodeFromStream(self, s, strict): + """Decode a DER-encoded BOOLEAN from a file.""" + + # Fill up self.payload + DerObject._decodeFromStream(self, s, strict) + + if len(self.payload) != 1: + raise ValueError("Invalid encoding for DER BOOLEAN: payload is not 1 byte") + + if bord(self.payload[0]) == 0: + self.value = False + elif bord(self.payload[0]) == 0xFF: + self.value = True + else: + raise ValueError("Invalid payload for DER BOOLEAN") + + +class DerSequence(DerObject): + """Class to model a DER SEQUENCE. + + This object behaves like a dynamic Python sequence. + + Sub-elements that are INTEGERs behave like Python integers. + + Any other sub-element is a binary string encoded as a complete DER + sub-element (TLV). + + An example of encoding is: + + >>> from Crypto.Util.asn1 import DerSequence, DerInteger + >>> from binascii import hexlify, unhexlify + >>> obj_der = unhexlify('070102') + >>> seq_der = DerSequence([4]) + >>> seq_der.append(9) + >>> seq_der.append(obj_der.encode()) + >>> print hexlify(seq_der.encode()) + + which will show ``3009020104020109070102``, the DER encoding of the + sequence containing ``4``, ``9``, and the object with payload ``02``. + + For decoding: + + >>> s = unhexlify(b'3009020104020109070102') + >>> try: + >>> seq_der = DerSequence() + >>> seq_der.decode(s) + >>> print len(seq_der) + >>> print seq_der[0] + >>> print seq_der[:] + >>> except ValueError: + >>> print "Not a valid DER SEQUENCE" + + the output will be:: + + 3 + 4 + [4, 9, b'\x07\x01\x02'] + + """ + + def __init__(self, startSeq=None, implicit=None, explicit=None): + """Initialize the DER object as a SEQUENCE. + + :Parameters: + startSeq : Python sequence + A sequence whose element are either integers or + other DER objects. + + implicit : integer or byte + The IMPLICIT tag number (< 0x1F) to use for the encoded object. + It overrides the universal tag for SEQUENCE (16). + It cannot be combined with the ``explicit`` parameter. + By default, there is no IMPLICIT tag. + + explicit : integer or byte + The EXPLICIT tag number (< 0x1F) to use for the encoded object. + It cannot be combined with the ``implicit`` parameter. + By default, there is no EXPLICIT tag. + """ + + DerObject.__init__(self, 0x10, b'', implicit, True, explicit) + if startSeq is None: + self._seq = [] + else: + self._seq = startSeq + + # A few methods to make it behave like a python sequence + + def __delitem__(self, n): + del self._seq[n] + + def __getitem__(self, n): + return self._seq[n] + + def __setitem__(self, key, value): + self._seq[key] = value + + def __setslice__(self, i, j, sequence): + self._seq[i:j] = sequence + + def __delslice__(self, i, j): + del self._seq[i:j] + + def __getslice__(self, i, j): + return self._seq[max(0, i):max(0, j)] + + def __len__(self): + return len(self._seq) + + def __iadd__(self, item): + self._seq.append(item) + return self + + def append(self, item): + self._seq.append(item) + return self + + def insert(self, index, item): + self._seq.insert(index, item) + return self + + def hasInts(self, only_non_negative=True): + """Return the number of items in this sequence that are + integers. + + Args: + only_non_negative (boolean): + If ``True``, negative integers are not counted in. + """ + + items = [x for x in self._seq if _is_number(x, only_non_negative)] + return len(items) + + def hasOnlyInts(self, only_non_negative=True): + """Return ``True`` if all items in this sequence are integers + or non-negative integers. + + This function returns False is the sequence is empty, + or at least one member is not an integer. + + Args: + only_non_negative (boolean): + If ``True``, the presence of negative integers + causes the method to return ``False``.""" + return self._seq and self.hasInts(only_non_negative) == len(self._seq) + + def encode(self): + """Return this DER SEQUENCE, fully encoded as a + binary string. + + Raises: + ValueError: if some elements in the sequence are neither integers + nor byte strings. + """ + self.payload = b'' + for item in self._seq: + if byte_string(item): + self.payload += item + elif _is_number(item): + self.payload += DerInteger(item).encode() + else: + self.payload += item.encode() + return DerObject.encode(self) + + def decode(self, der_encoded, strict=False, nr_elements=None, only_ints_expected=False): + """Decode a complete DER SEQUENCE, and re-initializes this + object with it. + + Args: + der_encoded (byte string): + A complete SEQUENCE DER element. + nr_elements (None or integer or list of integers): + The number of members the SEQUENCE can have + only_ints_expected (boolean): + Whether the SEQUENCE is expected to contain only integers. + strict (boolean): + Whether decoding must check for strict DER compliancy. + + Raises: + ValueError: in case of parsing errors. + + DER INTEGERs are decoded into Python integers. Any other DER + element is not decoded. Its validity is not checked. + """ + + self._nr_elements = nr_elements + result = DerObject.decode(self, der_encoded, strict=strict) + + if only_ints_expected and not self.hasOnlyInts(): + raise ValueError("Some members are not INTEGERs") + + return result + + def _decodeFromStream(self, s, strict): + """Decode a complete DER SEQUENCE from a file.""" + + self._seq = [] + + # Fill up self.payload + DerObject._decodeFromStream(self, s, strict) + + # Add one item at a time to self.seq, by scanning self.payload + p = BytesIO_EOF(self.payload) + while p.remaining_data() > 0: + p.set_bookmark() + + der = DerObject() + der._decodeFromStream(p, strict) + + # Parse INTEGERs differently + if der._tag_octet != 0x02: + self._seq.append(p.data_since_bookmark()) + else: + derInt = DerInteger() + data = p.data_since_bookmark() + derInt.decode(data, strict=strict) + self._seq.append(derInt.value) + + ok = True + if self._nr_elements is not None: + try: + ok = len(self._seq) in self._nr_elements + except TypeError: + ok = len(self._seq) == self._nr_elements + + if not ok: + raise ValueError("Unexpected number of members (%d)" + " in the sequence" % len(self._seq)) + + +class DerOctetString(DerObject): + """Class to model a DER OCTET STRING. + + An example of encoding is: + + >>> from Crypto.Util.asn1 import DerOctetString + >>> from binascii import hexlify, unhexlify + >>> os_der = DerOctetString(b'\\xaa') + >>> os_der.payload += b'\\xbb' + >>> print hexlify(os_der.encode()) + + which will show ``0402aabb``, the DER encoding for the byte string + ``b'\\xAA\\xBB'``. + + For decoding: + + >>> s = unhexlify(b'0402aabb') + >>> try: + >>> os_der = DerOctetString() + >>> os_der.decode(s) + >>> print hexlify(os_der.payload) + >>> except ValueError: + >>> print "Not a valid DER OCTET STRING" + + the output will be ``aabb``. + + :ivar payload: The content of the string + :vartype payload: byte string + """ + + def __init__(self, value=b'', implicit=None): + """Initialize the DER object as an OCTET STRING. + + :Parameters: + value : byte string + The initial payload of the object. + If not specified, the payload is empty. + + implicit : integer + The IMPLICIT tag to use for the encoded object. + It overrides the universal tag for OCTET STRING (4). + """ + DerObject.__init__(self, 0x04, value, implicit, False) + + +class DerNull(DerObject): + """Class to model a DER NULL element.""" + + def __init__(self): + """Initialize the DER object as a NULL.""" + + DerObject.__init__(self, 0x05, b'', None, False) + + +class DerObjectId(DerObject): + """Class to model a DER OBJECT ID. + + An example of encoding is: + + >>> from Crypto.Util.asn1 import DerObjectId + >>> from binascii import hexlify, unhexlify + >>> oid_der = DerObjectId("1.2") + >>> oid_der.value += ".840.113549.1.1.1" + >>> print hexlify(oid_der.encode()) + + which will show ``06092a864886f70d010101``, the DER encoding for the + RSA Object Identifier ``1.2.840.113549.1.1.1``. + + For decoding: + + >>> s = unhexlify(b'06092a864886f70d010101') + >>> try: + >>> oid_der = DerObjectId() + >>> oid_der.decode(s) + >>> print oid_der.value + >>> except ValueError: + >>> print "Not a valid DER OBJECT ID" + + the output will be ``1.2.840.113549.1.1.1``. + + :ivar value: The Object ID (OID), a dot separated list of integers + :vartype value: string + """ + + def __init__(self, value='', implicit=None, explicit=None): + """Initialize the DER object as an OBJECT ID. + + :Parameters: + value : string + The initial Object Identifier (e.g. "1.2.0.0.6.2"). + implicit : integer + The IMPLICIT tag to use for the encoded object. + It overrides the universal tag for OBJECT ID (6). + explicit : integer + The EXPLICIT tag to use for the encoded object. + """ + DerObject.__init__(self, 0x06, b'', implicit, False, explicit) + self.value = value + + def encode(self): + """Return the DER OBJECT ID, fully encoded as a + binary string.""" + + comps = [int(x) for x in self.value.split(".")] + + if len(comps) < 2: + raise ValueError("Not a valid Object Identifier string") + if comps[0] > 2: + raise ValueError("First component must be 0, 1 or 2") + if comps[0] < 2 and comps[1] > 39: + raise ValueError("Second component must be 39 at most") + + subcomps = [40 * comps[0] + comps[1]] + comps[2:] + + encoding = [] + for v in reversed(subcomps): + encoding.append(v & 0x7F) + v >>= 7 + while v: + encoding.append((v & 0x7F) | 0x80) + v >>= 7 + + self.payload = b''.join([bchr(x) for x in reversed(encoding)]) + return DerObject.encode(self) + + def decode(self, der_encoded, strict=False): + """Decode a complete DER OBJECT ID, and re-initializes this + object with it. + + Args: + der_encoded (byte string): + A complete DER OBJECT ID. + strict (boolean): + Whether decoding must check for strict DER compliancy. + + Raises: + ValueError: in case of parsing errors. + """ + + return DerObject.decode(self, der_encoded, strict) + + def _decodeFromStream(self, s, strict): + """Decode a complete DER OBJECT ID from a file.""" + + # Fill up self.payload + DerObject._decodeFromStream(self, s, strict) + + # Derive self.value from self.payload + p = BytesIO_EOF(self.payload) + + subcomps = [] + v = 0 + while p.remaining_data(): + c = p.read_byte() + v = (v << 7) + (c & 0x7F) + if not (c & 0x80): + subcomps.append(v) + v = 0 + + if len(subcomps) == 0: + raise ValueError("Empty payload") + + if subcomps[0] < 40: + subcomps[:1] = [0, subcomps[0]] + elif subcomps[0] < 80: + subcomps[:1] = [1, subcomps[0] - 40] + else: + subcomps[:1] = [2, subcomps[0] - 80] + + self.value = ".".join([str(x) for x in subcomps]) + + +class DerBitString(DerObject): + """Class to model a DER BIT STRING. + + An example of encoding is: + + >>> from Crypto.Util.asn1 import DerBitString + >>> bs_der = DerBitString(b'\\xAA') + >>> bs_der.value += b'\\xBB' + >>> print(bs_der.encode().hex()) + + which will show ``030300aabb``, the DER encoding for the bit string + ``b'\\xAA\\xBB'``. + + For decoding: + + >>> s = bytes.fromhex('030300aabb') + >>> try: + >>> bs_der = DerBitString() + >>> bs_der.decode(s) + >>> print(bs_der.value.hex()) + >>> except ValueError: + >>> print "Not a valid DER BIT STRING" + + the output will be ``aabb``. + + :ivar value: The content of the string + :vartype value: byte string + """ + + def __init__(self, value=b'', implicit=None, explicit=None): + """Initialize the DER object as a BIT STRING. + + :Parameters: + value : byte string or DER object + The initial, packed bit string. + If not specified, the bit string is empty. + implicit : integer + The IMPLICIT tag to use for the encoded object. + It overrides the universal tag for BIT STRING (3). + explicit : integer + The EXPLICIT tag to use for the encoded object. + """ + DerObject.__init__(self, 0x03, b'', implicit, False, explicit) + + # The bitstring value (packed) + if isinstance(value, DerObject): + self.value = value.encode() + else: + self.value = value + + def encode(self): + """Return the DER BIT STRING, fully encoded as a + byte string.""" + + # Add padding count byte + self.payload = b'\x00' + self.value + return DerObject.encode(self) + + def decode(self, der_encoded, strict=False): + """Decode a complete DER BIT STRING, and re-initializes this + object with it. + + Args: + der_encoded (byte string): a complete DER BIT STRING. + strict (boolean): + Whether decoding must check for strict DER compliancy. + + Raises: + ValueError: in case of parsing errors. + """ + + return DerObject.decode(self, der_encoded, strict) + + def _decodeFromStream(self, s, strict): + """Decode a complete DER BIT STRING DER from a file.""" + + # Fill-up self.payload + DerObject._decodeFromStream(self, s, strict) + + if self.payload and bord(self.payload[0]) != 0: + raise ValueError("Not a valid BIT STRING") + + # Fill-up self.value + self.value = b'' + # Remove padding count byte + if self.payload: + self.value = self.payload[1:] + + +class DerSetOf(DerObject): + """Class to model a DER SET OF. + + An example of encoding is: + + >>> from Crypto.Util.asn1 import DerBitString + >>> from binascii import hexlify, unhexlify + >>> so_der = DerSetOf([4,5]) + >>> so_der.add(6) + >>> print hexlify(so_der.encode()) + + which will show ``3109020104020105020106``, the DER encoding + of a SET OF with items 4,5, and 6. + + For decoding: + + >>> s = unhexlify(b'3109020104020105020106') + >>> try: + >>> so_der = DerSetOf() + >>> so_der.decode(s) + >>> print [x for x in so_der] + >>> except ValueError: + >>> print "Not a valid DER SET OF" + + the output will be ``[4, 5, 6]``. + """ + + def __init__(self, startSet=None, implicit=None): + """Initialize the DER object as a SET OF. + + :Parameters: + startSet : container + The initial set of integers or DER encoded objects. + implicit : integer + The IMPLICIT tag to use for the encoded object. + It overrides the universal tag for SET OF (17). + """ + DerObject.__init__(self, 0x11, b'', implicit, True) + self._seq = [] + + # All elements must be of the same type (and therefore have the + # same leading octet) + self._elemOctet = None + + if startSet: + for e in startSet: + self.add(e) + + def __getitem__(self, n): + return self._seq[n] + + def __iter__(self): + return iter(self._seq) + + def __len__(self): + return len(self._seq) + + def add(self, elem): + """Add an element to the set. + + Args: + elem (byte string or integer): + An element of the same type of objects already in the set. + It can be an integer or a DER encoded object. + """ + + if _is_number(elem): + eo = 0x02 + elif isinstance(elem, DerObject): + eo = self._tag_octet + else: + eo = bord(elem[0]) + + if self._elemOctet != eo: + if self._elemOctet is not None: + raise ValueError("New element does not belong to the set") + self._elemOctet = eo + + if elem not in self._seq: + self._seq.append(elem) + + def decode(self, der_encoded, strict=False): + """Decode a complete SET OF DER element, and re-initializes this + object with it. + + DER INTEGERs are decoded into Python integers. Any other DER + element is left undecoded; its validity is not checked. + + Args: + der_encoded (byte string): a complete DER BIT SET OF. + strict (boolean): + Whether decoding must check for strict DER compliancy. + + Raises: + ValueError: in case of parsing errors. + """ + + return DerObject.decode(self, der_encoded, strict) + + def _decodeFromStream(self, s, strict): + """Decode a complete DER SET OF from a file.""" + + self._seq = [] + + # Fill up self.payload + DerObject._decodeFromStream(self, s, strict) + + # Add one item at a time to self.seq, by scanning self.payload + p = BytesIO_EOF(self.payload) + setIdOctet = -1 + while p.remaining_data() > 0: + p.set_bookmark() + + der = DerObject() + der._decodeFromStream(p, strict) + + # Verify that all members are of the same type + if setIdOctet < 0: + setIdOctet = der._tag_octet + else: + if setIdOctet != der._tag_octet: + raise ValueError("Not all elements are of the same DER type") + + # Parse INTEGERs differently + if setIdOctet != 0x02: + self._seq.append(p.data_since_bookmark()) + else: + derInt = DerInteger() + derInt.decode(p.data_since_bookmark(), strict) + self._seq.append(derInt.value) + # end + + def encode(self): + """Return this SET OF DER element, fully encoded as a + binary string. + """ + + # Elements in the set must be ordered in lexicographic order + ordered = [] + for item in self._seq: + if _is_number(item): + bys = DerInteger(item).encode() + elif isinstance(item, DerObject): + bys = item.encode() + else: + bys = item + ordered.append(bys) + ordered.sort() + self.payload = b''.join(ordered) + return DerObject.encode(self) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/asn1.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/asn1.pyi new file mode 100644 index 000000000..ee4891c5d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/asn1.pyi @@ -0,0 +1,80 @@ +from typing import Optional, Sequence, Union, Set, Iterable + +__all__ = ['DerObject', 'DerInteger', 'DerOctetString', 'DerNull', + 'DerSequence', 'DerObjectId', 'DerBitString', 'DerSetOf'] + +# TODO: Make the encoded DerObjects their own type, so that DerSequence and +# DerSetOf can check their contents better + +class BytesIO_EOF: + def __init__(self, initial_bytes: bytes) -> None: ... + def set_bookmark(self) -> None: ... + def data_since_bookmark(self) -> bytes: ... + def remaining_data(self) -> int: ... + def read(self, length: int) -> bytes: ... + def read_byte(self) -> bytes: ... + +class DerObject: + payload: bytes + def __init__(self, asn1Id: Optional[int]=None, payload: Optional[bytes]=..., implicit: Optional[int]=None, + constructed: Optional[bool]=False, explicit: Optional[int]=None) -> None: ... + def encode(self) -> bytes: ... + def decode(self, der_encoded: bytes, strict: bool=...) -> DerObject: ... + +class DerInteger(DerObject): + value: int + def __init__(self, value: Optional[int]= 0, implicit: Optional[int]=None, explicit: Optional[int]=None) -> None: ... + def encode(self) -> bytes: ... + def decode(self, der_encoded: bytes, strict: bool=...) -> DerInteger: ... + +class DerBoolean(DerObject): + value: bool + def __init__(self, value: bool=..., implicit: Optional[Union[int, bytes]]=..., explicit: Optional[Union[int, bytes]]=...) -> None: ... + def encode(self) -> bytes: ... + def decode(self, der_encoded: bytes, strict: bool=...) -> DerBoolean: ... + +class DerSequence(DerObject): + def __init__(self, startSeq: Optional[Sequence[Union[int, DerInteger, DerObject]]]=None, implicit: Optional[int]=None) -> None: ... + def __delitem__(self, n: int) -> None: ... + def __getitem__(self, n: int) -> None: ... + def __setitem__(self, key: int, value: DerObject) -> None: ... + def __setslice__(self, i: int, j: int, sequence: Sequence) -> None: ... + def __delslice__(self, i: int, j: int) -> None: ... + def __getslice__(self, i: int, j: int) -> DerSequence: ... + def __len__(self) -> int: ... + def __iadd__(self, item: DerObject) -> DerSequence: ... + def append(self, item: DerObject) -> DerSequence: ... + def hasInts(self, only_non_negative: Optional[bool]=True) -> int: ... + def hasOnlyInts(self, only_non_negative: Optional[bool]=True) -> bool: ... + def encode(self) -> bytes: ... + def decode(self, der_encoded: bytes, strict: bool=..., nr_elements: Optional[int]=None, only_ints_expected: Optional[bool]=False) -> DerSequence: ... + +class DerOctetString(DerObject): + payload: bytes + def __init__(self, value: Optional[bytes]=..., implicit: Optional[int]=None) -> None: ... + +class DerNull(DerObject): + def __init__(self) -> None: ... + +class DerObjectId(DerObject): + value: str + def __init__(self, value: Optional[str]=..., implicit: Optional[int]=None, explicit: Optional[int]=None) -> None: ... + def encode(self) -> bytes: ... + def decode(self, der_encoded: bytes, strict: bool=...) -> DerObjectId: ... + +class DerBitString(DerObject): + value: bytes + def __init__(self, value: Optional[bytes]=..., implicit: Optional[int]=None, explicit: Optional[int]=None) -> None: ... + def encode(self) -> bytes: ... + def decode(self, der_encoded: bytes, strict: bool=...) -> DerBitString: ... + +DerSetElement = Union[bytes, int] + +class DerSetOf(DerObject): + def __init__(self, startSet: Optional[Set[DerSetElement]]=None, implicit: Optional[int]=None) -> None: ... + def __getitem__(self, n: int) -> DerSetElement: ... + def __iter__(self) -> Iterable: ... + def __len__(self) -> int: ... + def add(self, elem: DerSetElement) -> None: ... + def decode(self, der_encoded: bytes, strict: bool=...) -> DerObject: ... + def encode(self) -> bytes: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/number.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/number.py new file mode 100644 index 000000000..82652b26e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/number.py @@ -0,0 +1,1525 @@ +# +# number.py : Number-theoretic functions +# +# Part of the Python Cryptography Toolkit +# +# Written by Andrew M. Kuchling, Barry A. Warsaw, and others +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +# + +import math +import sys +import struct +from Crypto import Random +from Crypto.Util.py3compat import iter_range + +# Backward compatibility +_fastmath = None + + +def ceil_div(n, d): + """Return ceil(n/d), that is, the smallest integer r such that r*d >= n""" + + if d == 0: + raise ZeroDivisionError() + if (n < 0) or (d < 0): + raise ValueError("Non positive values") + r, q = divmod(n, d) + if (n != 0) and (q != 0): + r += 1 + return r + + +def size (N): + """Returns the size of the number N in bits.""" + + if N < 0: + raise ValueError("Size in bits only available for non-negative numbers") + return N.bit_length() + + +def getRandomInteger(N, randfunc=None): + """Return a random number at most N bits long. + + If :data:`randfunc` is omitted, then :meth:`Random.get_random_bytes` is used. + + .. deprecated:: 3.0 + This function is for internal use only and may be renamed or removed in + the future. Use :func:`Crypto.Random.random.getrandbits` instead. + """ + + if randfunc is None: + randfunc = Random.get_random_bytes + + S = randfunc(N>>3) + odd_bits = N % 8 + if odd_bits != 0: + rand_bits = ord(randfunc(1)) >> (8-odd_bits) + S = struct.pack('B', rand_bits) + S + value = bytes_to_long(S) + return value + +def getRandomRange(a, b, randfunc=None): + """Return a random number *n* so that *a <= n < b*. + + If :data:`randfunc` is omitted, then :meth:`Random.get_random_bytes` is used. + + .. deprecated:: 3.0 + This function is for internal use only and may be renamed or removed in + the future. Use :func:`Crypto.Random.random.randrange` instead. + """ + + range_ = b - a - 1 + bits = size(range_) + value = getRandomInteger(bits, randfunc) + while value > range_: + value = getRandomInteger(bits, randfunc) + return a + value + +def getRandomNBitInteger(N, randfunc=None): + """Return a random number with exactly N-bits, + i.e. a random number between 2**(N-1) and (2**N)-1. + + If :data:`randfunc` is omitted, then :meth:`Random.get_random_bytes` is used. + + .. deprecated:: 3.0 + This function is for internal use only and may be renamed or removed in + the future. + """ + + value = getRandomInteger (N-1, randfunc) + value |= 2 ** (N-1) # Ensure high bit is set + assert size(value) >= N + return value + + +if sys.version_info[:2] >= (3, 5): + + GCD = math.gcd + +else: + + def GCD(x,y): + """Greatest Common Denominator of :data:`x` and :data:`y`. + """ + + x = abs(x) ; y = abs(y) + while x > 0: + x, y = y % x, x + return y + + +if sys.version_info[:2] >= (3, 8): + + def inverse(u, v): + """The inverse of :data:`u` *mod* :data:`v`.""" + + if v == 0: + raise ZeroDivisionError("Modulus cannot be zero") + if v < 0: + raise ValueError("Modulus cannot be negative") + + return pow(u, -1, v) + +else: + + def inverse(u, v): + """The inverse of :data:`u` *mod* :data:`v`.""" + + if v == 0: + raise ZeroDivisionError("Modulus cannot be zero") + if v < 0: + raise ValueError("Modulus cannot be negative") + + u3, v3 = u, v + u1, v1 = 1, 0 + while v3 > 0: + q = u3 // v3 + u1, v1 = v1, u1 - v1*q + u3, v3 = v3, u3 - v3*q + if u3 != 1: + raise ValueError("No inverse value can be computed") + while u1<0: + u1 = u1 + v + return u1 + +# Given a number of bits to generate and a random generation function, +# find a prime number of the appropriate size. + +def getPrime(N, randfunc=None): + """Return a random N-bit prime number. + + N must be an integer larger than 1. + If randfunc is omitted, then :meth:`Random.get_random_bytes` is used. + """ + if randfunc is None: + randfunc = Random.get_random_bytes + + if N < 2: + raise ValueError("N must be larger than 1") + + while True: + number = getRandomNBitInteger(N, randfunc) | 1 + if isPrime(number, randfunc=randfunc): + break + return number + + +def _rabinMillerTest(n, rounds, randfunc=None): + """_rabinMillerTest(n:long, rounds:int, randfunc:callable):int + Tests if n is prime. + Returns 0 when n is definitely composite. + Returns 1 when n is probably prime. + Returns 2 when n is definitely prime. + + If randfunc is omitted, then Random.new().read is used. + + This function is for internal use only and may be renamed or removed in + the future. + """ + # check special cases (n==2, n even, n < 2) + if n < 3 or (n & 1) == 0: + return n == 2 + # n might be very large so it might be beneficial to precalculate n-1 + n_1 = n - 1 + # determine m and b so that 2**b * m = n - 1 and b maximal + b = 0 + m = n_1 + while (m & 1) == 0: + b += 1 + m >>= 1 + + tested = [] + # we need to do at most n-2 rounds. + for i in iter_range (min (rounds, n-2)): + # randomly choose a < n and make sure it hasn't been tested yet + a = getRandomRange (2, n, randfunc) + while a in tested: + a = getRandomRange (2, n, randfunc) + tested.append (a) + # do the rabin-miller test + z = pow (a, m, n) # (a**m) % n + if z == 1 or z == n_1: + continue + composite = 1 + for r in iter_range(b): + z = (z * z) % n + if z == 1: + return 0 + elif z == n_1: + composite = 0 + break + if composite: + return 0 + return 1 + +def getStrongPrime(N, e=0, false_positive_prob=1e-6, randfunc=None): + r""" + Return a random strong *N*-bit prime number. + In this context, *p* is a strong prime if *p-1* and *p+1* have at + least one large prime factor. + + Args: + N (integer): the exact length of the strong prime. + It must be a multiple of 128 and > 512. + e (integer): if provided, the returned prime (minus 1) + will be coprime to *e* and thus suitable for RSA where + *e* is the public exponent. + false_positive_prob (float): + The statistical probability for the result not to be actually a + prime. It defaults to 10\ :sup:`-6`. + Note that the real probability of a false-positive is far less. This is + just the mathematically provable limit. + randfunc (callable): + A function that takes a parameter *N* and that returns + a random byte string of such length. + If omitted, :func:`Crypto.Random.get_random_bytes` is used. + Return: + The new strong prime. + + .. deprecated:: 3.0 + This function is for internal use only and may be renamed or removed in + the future. + """ + + # This function was implemented following the + # instructions found in the paper: + # "FAST GENERATION OF RANDOM, STRONG RSA PRIMES" + # by Robert D. Silverman + # RSA Laboratories + # May 17, 1997 + # which by the time of writing could be freely downloaded here: + # http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.17.2713&rep=rep1&type=pdf + + if randfunc is None: + randfunc = Random.get_random_bytes + + # Use the accelerator if available + if _fastmath is not None: + return _fastmath.getStrongPrime(long(N), long(e), false_positive_prob, + randfunc) + + if (N < 512) or ((N % 128) != 0): + raise ValueError ("bits must be multiple of 128 and > 512") + + rabin_miller_rounds = int(math.ceil(-math.log(false_positive_prob)/math.log(4))) + + # calculate range for X + # lower_bound = sqrt(2) * 2^{511 + 128*x} + # upper_bound = 2^{512 + 128*x} - 1 + x = (N - 512) >> 7 + # We need to approximate the sqrt(2) in the lower_bound by an integer + # expression because floating point math overflows with these numbers + lower_bound = (14142135623730950489 * (2 ** (511 + 128*x))) // 10000000000000000000 + upper_bound = (1 << (512 + 128*x)) - 1 + # Randomly choose X in calculated range + X = getRandomRange (lower_bound, upper_bound, randfunc) + + # generate p1 and p2 + p = [0, 0] + for i in (0, 1): + # randomly choose 101-bit y + y = getRandomNBitInteger (101, randfunc) + # initialize the field for sieving + field = [0] * 5 * len (sieve_base) + # sieve the field + for prime in sieve_base: + offset = y % prime + for j in iter_range((prime - offset) % prime, len (field), prime): + field[j] = 1 + + # look for suitable p[i] starting at y + result = 0 + for j in range(len(field)): + composite = field[j] + # look for next canidate + if composite: + continue + tmp = y + j + result = _rabinMillerTest (tmp, rabin_miller_rounds) + if result > 0: + p[i] = tmp + break + if result == 0: + raise RuntimeError ("Couln't find prime in field. " + "Developer: Increase field_size") + + # Calculate R + # R = (p2^{-1} mod p1) * p2 - (p1^{-1} mod p2) * p1 + tmp1 = inverse (p[1], p[0]) * p[1] # (p2^-1 mod p1)*p2 + tmp2 = inverse (p[0], p[1]) * p[0] # (p1^-1 mod p2)*p1 + R = tmp1 - tmp2 # (p2^-1 mod p1)*p2 - (p1^-1 mod p2)*p1 + + # search for final prime number starting by Y0 + # Y0 = X + (R - X mod p1p2) + increment = p[0] * p[1] + X = X + (R - (X % increment)) + while 1: + is_possible_prime = 1 + # first check candidate against sieve_base + for prime in sieve_base: + if (X % prime) == 0: + is_possible_prime = 0 + break + # if e is given make sure that e and X-1 are coprime + # this is not necessarily a strong prime criterion but useful when + # creating them for RSA where the p-1 and q-1 should be coprime to + # the public exponent e + if e and is_possible_prime: + if e & 1: + if GCD(e, X-1) != 1: + is_possible_prime = 0 + else: + if GCD(e, (X-1) // 2) != 1: + is_possible_prime = 0 + + # do some Rabin-Miller-Tests + if is_possible_prime: + result = _rabinMillerTest (X, rabin_miller_rounds) + if result > 0: + break + X += increment + # abort when X has more bits than requested + # TODO: maybe we shouldn't abort but rather start over. + if X >= 1 << N: + raise RuntimeError ("Couln't find prime in field. " + "Developer: Increase field_size") + return X + +def isPrime(N, false_positive_prob=1e-6, randfunc=None): + r"""Test if a number *N* is a prime. + + Args: + false_positive_prob (float): + The statistical probability for the result not to be actually a + prime. It defaults to 10\ :sup:`-6`. + Note that the real probability of a false-positive is far less. + This is just the mathematically provable limit. + randfunc (callable): + A function that takes a parameter *N* and that returns + a random byte string of such length. + If omitted, :func:`Crypto.Random.get_random_bytes` is used. + + Return: + `True` is the input is indeed prime. + """ + + if randfunc is None: + randfunc = Random.get_random_bytes + + if _fastmath is not None: + return _fastmath.isPrime(long(N), false_positive_prob, randfunc) + + if N < 3 or N & 1 == 0: + return N == 2 + for p in sieve_base: + if N == p: + return True + if N % p == 0: + return False + + rounds = int(math.ceil(-math.log(false_positive_prob)/math.log(4))) + return bool(_rabinMillerTest(N, rounds, randfunc)) + + +# Improved conversion functions contributed by Barry Warsaw, after +# careful benchmarking + +import struct + +def long_to_bytes(n, blocksize=0): + """Convert a positive integer to a byte string using big endian encoding. + + If :data:`blocksize` is absent or zero, the byte string will + be of minimal length. + + Otherwise, the length of the byte string is guaranteed to be a multiple + of :data:`blocksize`. If necessary, zeroes (``\\x00``) are added at the left. + + .. note:: + In Python 3, if you are sure that :data:`n` can fit into + :data:`blocksize` bytes, you can simply use the native method instead:: + + >>> n.to_bytes(blocksize, 'big') + + For instance:: + + >>> n = 80 + >>> n.to_bytes(2, 'big') + b'\\x00P' + + However, and unlike this ``long_to_bytes()`` function, + an ``OverflowError`` exception is raised if :data:`n` does not fit. + """ + + if n < 0 or blocksize < 0: + raise ValueError("Values must be non-negative") + + result = [] + pack = struct.pack + + # Fill the first block independently from the value of n + bsr = blocksize + while bsr >= 8: + result.insert(0, pack('>Q', n & 0xFFFFFFFFFFFFFFFF)) + n = n >> 64 + bsr -= 8 + + while bsr >= 4: + result.insert(0, pack('>I', n & 0xFFFFFFFF)) + n = n >> 32 + bsr -= 4 + + while bsr > 0: + result.insert(0, pack('>B', n & 0xFF)) + n = n >> 8 + bsr -= 1 + + if n == 0: + if len(result) == 0: + bresult = b'\x00' + else: + bresult = b''.join(result) + else: + # The encoded number exceeds the block size + while n > 0: + result.insert(0, pack('>Q', n & 0xFFFFFFFFFFFFFFFF)) + n = n >> 64 + result[0] = result[0].lstrip(b'\x00') + bresult = b''.join(result) + # bresult has minimum length here + if blocksize > 0: + target_len = ((len(bresult) - 1) // blocksize + 1) * blocksize + bresult = b'\x00' * (target_len - len(bresult)) + bresult + + return bresult + + +def bytes_to_long(s): + """Convert a byte string to a long integer (big endian). + + In Python 3.2+, use the native method instead:: + + >>> int.from_bytes(s, 'big') + + For instance:: + + >>> int.from_bytes(b'\x00P', 'big') + 80 + + This is (essentially) the inverse of :func:`long_to_bytes`. + """ + acc = 0 + + unpack = struct.unpack + + # Up to Python 2.7.4, struct.unpack can't work with bytearrays nor + # memoryviews + if sys.version_info[0:3] < (2, 7, 4): + if isinstance(s, bytearray): + s = bytes(s) + elif isinstance(s, memoryview): + s = s.tobytes() + + length = len(s) + if length % 4: + extra = (4 - length % 4) + s = b'\x00' * extra + s + length = length + extra + for i in range(0, length, 4): + acc = (acc << 32) + unpack('>I', s[i:i+4])[0] + return acc + + +# For backwards compatibility... +import warnings +def long2str(n, blocksize=0): + warnings.warn("long2str() has been replaced by long_to_bytes()") + return long_to_bytes(n, blocksize) +def str2long(s): + warnings.warn("str2long() has been replaced by bytes_to_long()") + return bytes_to_long(s) + + +# The first 10000 primes used for checking primality. +# This should be enough to eliminate most of the odd +# numbers before needing to do a Rabin-Miller test at all. +sieve_base = ( + 2, 3, 5, 7, 11, 13, 17, 19, 23, 29, + 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, + 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, + 127, 131, 137, 139, 149, 151, 157, 163, 167, 173, + 179, 181, 191, 193, 197, 199, 211, 223, 227, 229, + 233, 239, 241, 251, 257, 263, 269, 271, 277, 281, + 283, 293, 307, 311, 313, 317, 331, 337, 347, 349, + 353, 359, 367, 373, 379, 383, 389, 397, 401, 409, + 419, 421, 431, 433, 439, 443, 449, 457, 461, 463, + 467, 479, 487, 491, 499, 503, 509, 521, 523, 541, + 547, 557, 563, 569, 571, 577, 587, 593, 599, 601, + 607, 613, 617, 619, 631, 641, 643, 647, 653, 659, + 661, 673, 677, 683, 691, 701, 709, 719, 727, 733, + 739, 743, 751, 757, 761, 769, 773, 787, 797, 809, + 811, 821, 823, 827, 829, 839, 853, 857, 859, 863, + 877, 881, 883, 887, 907, 911, 919, 929, 937, 941, + 947, 953, 967, 971, 977, 983, 991, 997, 1009, 1013, + 1019, 1021, 1031, 1033, 1039, 1049, 1051, 1061, 1063, 1069, + 1087, 1091, 1093, 1097, 1103, 1109, 1117, 1123, 1129, 1151, + 1153, 1163, 1171, 1181, 1187, 1193, 1201, 1213, 1217, 1223, + 1229, 1231, 1237, 1249, 1259, 1277, 1279, 1283, 1289, 1291, + 1297, 1301, 1303, 1307, 1319, 1321, 1327, 1361, 1367, 1373, + 1381, 1399, 1409, 1423, 1427, 1429, 1433, 1439, 1447, 1451, + 1453, 1459, 1471, 1481, 1483, 1487, 1489, 1493, 1499, 1511, + 1523, 1531, 1543, 1549, 1553, 1559, 1567, 1571, 1579, 1583, + 1597, 1601, 1607, 1609, 1613, 1619, 1621, 1627, 1637, 1657, + 1663, 1667, 1669, 1693, 1697, 1699, 1709, 1721, 1723, 1733, + 1741, 1747, 1753, 1759, 1777, 1783, 1787, 1789, 1801, 1811, + 1823, 1831, 1847, 1861, 1867, 1871, 1873, 1877, 1879, 1889, + 1901, 1907, 1913, 1931, 1933, 1949, 1951, 1973, 1979, 1987, + 1993, 1997, 1999, 2003, 2011, 2017, 2027, 2029, 2039, 2053, + 2063, 2069, 2081, 2083, 2087, 2089, 2099, 2111, 2113, 2129, + 2131, 2137, 2141, 2143, 2153, 2161, 2179, 2203, 2207, 2213, + 2221, 2237, 2239, 2243, 2251, 2267, 2269, 2273, 2281, 2287, + 2293, 2297, 2309, 2311, 2333, 2339, 2341, 2347, 2351, 2357, + 2371, 2377, 2381, 2383, 2389, 2393, 2399, 2411, 2417, 2423, + 2437, 2441, 2447, 2459, 2467, 2473, 2477, 2503, 2521, 2531, + 2539, 2543, 2549, 2551, 2557, 2579, 2591, 2593, 2609, 2617, + 2621, 2633, 2647, 2657, 2659, 2663, 2671, 2677, 2683, 2687, + 2689, 2693, 2699, 2707, 2711, 2713, 2719, 2729, 2731, 2741, + 2749, 2753, 2767, 2777, 2789, 2791, 2797, 2801, 2803, 2819, + 2833, 2837, 2843, 2851, 2857, 2861, 2879, 2887, 2897, 2903, + 2909, 2917, 2927, 2939, 2953, 2957, 2963, 2969, 2971, 2999, + 3001, 3011, 3019, 3023, 3037, 3041, 3049, 3061, 3067, 3079, + 3083, 3089, 3109, 3119, 3121, 3137, 3163, 3167, 3169, 3181, + 3187, 3191, 3203, 3209, 3217, 3221, 3229, 3251, 3253, 3257, + 3259, 3271, 3299, 3301, 3307, 3313, 3319, 3323, 3329, 3331, + 3343, 3347, 3359, 3361, 3371, 3373, 3389, 3391, 3407, 3413, + 3433, 3449, 3457, 3461, 3463, 3467, 3469, 3491, 3499, 3511, + 3517, 3527, 3529, 3533, 3539, 3541, 3547, 3557, 3559, 3571, + 3581, 3583, 3593, 3607, 3613, 3617, 3623, 3631, 3637, 3643, + 3659, 3671, 3673, 3677, 3691, 3697, 3701, 3709, 3719, 3727, + 3733, 3739, 3761, 3767, 3769, 3779, 3793, 3797, 3803, 3821, + 3823, 3833, 3847, 3851, 3853, 3863, 3877, 3881, 3889, 3907, + 3911, 3917, 3919, 3923, 3929, 3931, 3943, 3947, 3967, 3989, + 4001, 4003, 4007, 4013, 4019, 4021, 4027, 4049, 4051, 4057, + 4073, 4079, 4091, 4093, 4099, 4111, 4127, 4129, 4133, 4139, + 4153, 4157, 4159, 4177, 4201, 4211, 4217, 4219, 4229, 4231, + 4241, 4243, 4253, 4259, 4261, 4271, 4273, 4283, 4289, 4297, + 4327, 4337, 4339, 4349, 4357, 4363, 4373, 4391, 4397, 4409, + 4421, 4423, 4441, 4447, 4451, 4457, 4463, 4481, 4483, 4493, + 4507, 4513, 4517, 4519, 4523, 4547, 4549, 4561, 4567, 4583, + 4591, 4597, 4603, 4621, 4637, 4639, 4643, 4649, 4651, 4657, + 4663, 4673, 4679, 4691, 4703, 4721, 4723, 4729, 4733, 4751, + 4759, 4783, 4787, 4789, 4793, 4799, 4801, 4813, 4817, 4831, + 4861, 4871, 4877, 4889, 4903, 4909, 4919, 4931, 4933, 4937, + 4943, 4951, 4957, 4967, 4969, 4973, 4987, 4993, 4999, 5003, + 5009, 5011, 5021, 5023, 5039, 5051, 5059, 5077, 5081, 5087, + 5099, 5101, 5107, 5113, 5119, 5147, 5153, 5167, 5171, 5179, + 5189, 5197, 5209, 5227, 5231, 5233, 5237, 5261, 5273, 5279, + 5281, 5297, 5303, 5309, 5323, 5333, 5347, 5351, 5381, 5387, + 5393, 5399, 5407, 5413, 5417, 5419, 5431, 5437, 5441, 5443, + 5449, 5471, 5477, 5479, 5483, 5501, 5503, 5507, 5519, 5521, + 5527, 5531, 5557, 5563, 5569, 5573, 5581, 5591, 5623, 5639, + 5641, 5647, 5651, 5653, 5657, 5659, 5669, 5683, 5689, 5693, + 5701, 5711, 5717, 5737, 5741, 5743, 5749, 5779, 5783, 5791, + 5801, 5807, 5813, 5821, 5827, 5839, 5843, 5849, 5851, 5857, + 5861, 5867, 5869, 5879, 5881, 5897, 5903, 5923, 5927, 5939, + 5953, 5981, 5987, 6007, 6011, 6029, 6037, 6043, 6047, 6053, + 6067, 6073, 6079, 6089, 6091, 6101, 6113, 6121, 6131, 6133, + 6143, 6151, 6163, 6173, 6197, 6199, 6203, 6211, 6217, 6221, + 6229, 6247, 6257, 6263, 6269, 6271, 6277, 6287, 6299, 6301, + 6311, 6317, 6323, 6329, 6337, 6343, 6353, 6359, 6361, 6367, + 6373, 6379, 6389, 6397, 6421, 6427, 6449, 6451, 6469, 6473, + 6481, 6491, 6521, 6529, 6547, 6551, 6553, 6563, 6569, 6571, + 6577, 6581, 6599, 6607, 6619, 6637, 6653, 6659, 6661, 6673, + 6679, 6689, 6691, 6701, 6703, 6709, 6719, 6733, 6737, 6761, + 6763, 6779, 6781, 6791, 6793, 6803, 6823, 6827, 6829, 6833, + 6841, 6857, 6863, 6869, 6871, 6883, 6899, 6907, 6911, 6917, + 6947, 6949, 6959, 6961, 6967, 6971, 6977, 6983, 6991, 6997, + 7001, 7013, 7019, 7027, 7039, 7043, 7057, 7069, 7079, 7103, + 7109, 7121, 7127, 7129, 7151, 7159, 7177, 7187, 7193, 7207, + 7211, 7213, 7219, 7229, 7237, 7243, 7247, 7253, 7283, 7297, + 7307, 7309, 7321, 7331, 7333, 7349, 7351, 7369, 7393, 7411, + 7417, 7433, 7451, 7457, 7459, 7477, 7481, 7487, 7489, 7499, + 7507, 7517, 7523, 7529, 7537, 7541, 7547, 7549, 7559, 7561, + 7573, 7577, 7583, 7589, 7591, 7603, 7607, 7621, 7639, 7643, + 7649, 7669, 7673, 7681, 7687, 7691, 7699, 7703, 7717, 7723, + 7727, 7741, 7753, 7757, 7759, 7789, 7793, 7817, 7823, 7829, + 7841, 7853, 7867, 7873, 7877, 7879, 7883, 7901, 7907, 7919, + 7927, 7933, 7937, 7949, 7951, 7963, 7993, 8009, 8011, 8017, + 8039, 8053, 8059, 8069, 8081, 8087, 8089, 8093, 8101, 8111, + 8117, 8123, 8147, 8161, 8167, 8171, 8179, 8191, 8209, 8219, + 8221, 8231, 8233, 8237, 8243, 8263, 8269, 8273, 8287, 8291, + 8293, 8297, 8311, 8317, 8329, 8353, 8363, 8369, 8377, 8387, + 8389, 8419, 8423, 8429, 8431, 8443, 8447, 8461, 8467, 8501, + 8513, 8521, 8527, 8537, 8539, 8543, 8563, 8573, 8581, 8597, + 8599, 8609, 8623, 8627, 8629, 8641, 8647, 8663, 8669, 8677, + 8681, 8689, 8693, 8699, 8707, 8713, 8719, 8731, 8737, 8741, + 8747, 8753, 8761, 8779, 8783, 8803, 8807, 8819, 8821, 8831, + 8837, 8839, 8849, 8861, 8863, 8867, 8887, 8893, 8923, 8929, + 8933, 8941, 8951, 8963, 8969, 8971, 8999, 9001, 9007, 9011, + 9013, 9029, 9041, 9043, 9049, 9059, 9067, 9091, 9103, 9109, + 9127, 9133, 9137, 9151, 9157, 9161, 9173, 9181, 9187, 9199, + 9203, 9209, 9221, 9227, 9239, 9241, 9257, 9277, 9281, 9283, + 9293, 9311, 9319, 9323, 9337, 9341, 9343, 9349, 9371, 9377, + 9391, 9397, 9403, 9413, 9419, 9421, 9431, 9433, 9437, 9439, + 9461, 9463, 9467, 9473, 9479, 9491, 9497, 9511, 9521, 9533, + 9539, 9547, 9551, 9587, 9601, 9613, 9619, 9623, 9629, 9631, + 9643, 9649, 9661, 9677, 9679, 9689, 9697, 9719, 9721, 9733, + 9739, 9743, 9749, 9767, 9769, 9781, 9787, 9791, 9803, 9811, + 9817, 9829, 9833, 9839, 9851, 9857, 9859, 9871, 9883, 9887, + 9901, 9907, 9923, 9929, 9931, 9941, 9949, 9967, 9973, 10007, + 10009, 10037, 10039, 10061, 10067, 10069, 10079, 10091, 10093, 10099, + 10103, 10111, 10133, 10139, 10141, 10151, 10159, 10163, 10169, 10177, + 10181, 10193, 10211, 10223, 10243, 10247, 10253, 10259, 10267, 10271, + 10273, 10289, 10301, 10303, 10313, 10321, 10331, 10333, 10337, 10343, + 10357, 10369, 10391, 10399, 10427, 10429, 10433, 10453, 10457, 10459, + 10463, 10477, 10487, 10499, 10501, 10513, 10529, 10531, 10559, 10567, + 10589, 10597, 10601, 10607, 10613, 10627, 10631, 10639, 10651, 10657, + 10663, 10667, 10687, 10691, 10709, 10711, 10723, 10729, 10733, 10739, + 10753, 10771, 10781, 10789, 10799, 10831, 10837, 10847, 10853, 10859, + 10861, 10867, 10883, 10889, 10891, 10903, 10909, 10937, 10939, 10949, + 10957, 10973, 10979, 10987, 10993, 11003, 11027, 11047, 11057, 11059, + 11069, 11071, 11083, 11087, 11093, 11113, 11117, 11119, 11131, 11149, + 11159, 11161, 11171, 11173, 11177, 11197, 11213, 11239, 11243, 11251, + 11257, 11261, 11273, 11279, 11287, 11299, 11311, 11317, 11321, 11329, + 11351, 11353, 11369, 11383, 11393, 11399, 11411, 11423, 11437, 11443, + 11447, 11467, 11471, 11483, 11489, 11491, 11497, 11503, 11519, 11527, + 11549, 11551, 11579, 11587, 11593, 11597, 11617, 11621, 11633, 11657, + 11677, 11681, 11689, 11699, 11701, 11717, 11719, 11731, 11743, 11777, + 11779, 11783, 11789, 11801, 11807, 11813, 11821, 11827, 11831, 11833, + 11839, 11863, 11867, 11887, 11897, 11903, 11909, 11923, 11927, 11933, + 11939, 11941, 11953, 11959, 11969, 11971, 11981, 11987, 12007, 12011, + 12037, 12041, 12043, 12049, 12071, 12073, 12097, 12101, 12107, 12109, + 12113, 12119, 12143, 12149, 12157, 12161, 12163, 12197, 12203, 12211, + 12227, 12239, 12241, 12251, 12253, 12263, 12269, 12277, 12281, 12289, + 12301, 12323, 12329, 12343, 12347, 12373, 12377, 12379, 12391, 12401, + 12409, 12413, 12421, 12433, 12437, 12451, 12457, 12473, 12479, 12487, + 12491, 12497, 12503, 12511, 12517, 12527, 12539, 12541, 12547, 12553, + 12569, 12577, 12583, 12589, 12601, 12611, 12613, 12619, 12637, 12641, + 12647, 12653, 12659, 12671, 12689, 12697, 12703, 12713, 12721, 12739, + 12743, 12757, 12763, 12781, 12791, 12799, 12809, 12821, 12823, 12829, + 12841, 12853, 12889, 12893, 12899, 12907, 12911, 12917, 12919, 12923, + 12941, 12953, 12959, 12967, 12973, 12979, 12983, 13001, 13003, 13007, + 13009, 13033, 13037, 13043, 13049, 13063, 13093, 13099, 13103, 13109, + 13121, 13127, 13147, 13151, 13159, 13163, 13171, 13177, 13183, 13187, + 13217, 13219, 13229, 13241, 13249, 13259, 13267, 13291, 13297, 13309, + 13313, 13327, 13331, 13337, 13339, 13367, 13381, 13397, 13399, 13411, + 13417, 13421, 13441, 13451, 13457, 13463, 13469, 13477, 13487, 13499, + 13513, 13523, 13537, 13553, 13567, 13577, 13591, 13597, 13613, 13619, + 13627, 13633, 13649, 13669, 13679, 13681, 13687, 13691, 13693, 13697, + 13709, 13711, 13721, 13723, 13729, 13751, 13757, 13759, 13763, 13781, + 13789, 13799, 13807, 13829, 13831, 13841, 13859, 13873, 13877, 13879, + 13883, 13901, 13903, 13907, 13913, 13921, 13931, 13933, 13963, 13967, + 13997, 13999, 14009, 14011, 14029, 14033, 14051, 14057, 14071, 14081, + 14083, 14087, 14107, 14143, 14149, 14153, 14159, 14173, 14177, 14197, + 14207, 14221, 14243, 14249, 14251, 14281, 14293, 14303, 14321, 14323, + 14327, 14341, 14347, 14369, 14387, 14389, 14401, 14407, 14411, 14419, + 14423, 14431, 14437, 14447, 14449, 14461, 14479, 14489, 14503, 14519, + 14533, 14537, 14543, 14549, 14551, 14557, 14561, 14563, 14591, 14593, + 14621, 14627, 14629, 14633, 14639, 14653, 14657, 14669, 14683, 14699, + 14713, 14717, 14723, 14731, 14737, 14741, 14747, 14753, 14759, 14767, + 14771, 14779, 14783, 14797, 14813, 14821, 14827, 14831, 14843, 14851, + 14867, 14869, 14879, 14887, 14891, 14897, 14923, 14929, 14939, 14947, + 14951, 14957, 14969, 14983, 15013, 15017, 15031, 15053, 15061, 15073, + 15077, 15083, 15091, 15101, 15107, 15121, 15131, 15137, 15139, 15149, + 15161, 15173, 15187, 15193, 15199, 15217, 15227, 15233, 15241, 15259, + 15263, 15269, 15271, 15277, 15287, 15289, 15299, 15307, 15313, 15319, + 15329, 15331, 15349, 15359, 15361, 15373, 15377, 15383, 15391, 15401, + 15413, 15427, 15439, 15443, 15451, 15461, 15467, 15473, 15493, 15497, + 15511, 15527, 15541, 15551, 15559, 15569, 15581, 15583, 15601, 15607, + 15619, 15629, 15641, 15643, 15647, 15649, 15661, 15667, 15671, 15679, + 15683, 15727, 15731, 15733, 15737, 15739, 15749, 15761, 15767, 15773, + 15787, 15791, 15797, 15803, 15809, 15817, 15823, 15859, 15877, 15881, + 15887, 15889, 15901, 15907, 15913, 15919, 15923, 15937, 15959, 15971, + 15973, 15991, 16001, 16007, 16033, 16057, 16061, 16063, 16067, 16069, + 16073, 16087, 16091, 16097, 16103, 16111, 16127, 16139, 16141, 16183, + 16187, 16189, 16193, 16217, 16223, 16229, 16231, 16249, 16253, 16267, + 16273, 16301, 16319, 16333, 16339, 16349, 16361, 16363, 16369, 16381, + 16411, 16417, 16421, 16427, 16433, 16447, 16451, 16453, 16477, 16481, + 16487, 16493, 16519, 16529, 16547, 16553, 16561, 16567, 16573, 16603, + 16607, 16619, 16631, 16633, 16649, 16651, 16657, 16661, 16673, 16691, + 16693, 16699, 16703, 16729, 16741, 16747, 16759, 16763, 16787, 16811, + 16823, 16829, 16831, 16843, 16871, 16879, 16883, 16889, 16901, 16903, + 16921, 16927, 16931, 16937, 16943, 16963, 16979, 16981, 16987, 16993, + 17011, 17021, 17027, 17029, 17033, 17041, 17047, 17053, 17077, 17093, + 17099, 17107, 17117, 17123, 17137, 17159, 17167, 17183, 17189, 17191, + 17203, 17207, 17209, 17231, 17239, 17257, 17291, 17293, 17299, 17317, + 17321, 17327, 17333, 17341, 17351, 17359, 17377, 17383, 17387, 17389, + 17393, 17401, 17417, 17419, 17431, 17443, 17449, 17467, 17471, 17477, + 17483, 17489, 17491, 17497, 17509, 17519, 17539, 17551, 17569, 17573, + 17579, 17581, 17597, 17599, 17609, 17623, 17627, 17657, 17659, 17669, + 17681, 17683, 17707, 17713, 17729, 17737, 17747, 17749, 17761, 17783, + 17789, 17791, 17807, 17827, 17837, 17839, 17851, 17863, 17881, 17891, + 17903, 17909, 17911, 17921, 17923, 17929, 17939, 17957, 17959, 17971, + 17977, 17981, 17987, 17989, 18013, 18041, 18043, 18047, 18049, 18059, + 18061, 18077, 18089, 18097, 18119, 18121, 18127, 18131, 18133, 18143, + 18149, 18169, 18181, 18191, 18199, 18211, 18217, 18223, 18229, 18233, + 18251, 18253, 18257, 18269, 18287, 18289, 18301, 18307, 18311, 18313, + 18329, 18341, 18353, 18367, 18371, 18379, 18397, 18401, 18413, 18427, + 18433, 18439, 18443, 18451, 18457, 18461, 18481, 18493, 18503, 18517, + 18521, 18523, 18539, 18541, 18553, 18583, 18587, 18593, 18617, 18637, + 18661, 18671, 18679, 18691, 18701, 18713, 18719, 18731, 18743, 18749, + 18757, 18773, 18787, 18793, 18797, 18803, 18839, 18859, 18869, 18899, + 18911, 18913, 18917, 18919, 18947, 18959, 18973, 18979, 19001, 19009, + 19013, 19031, 19037, 19051, 19069, 19073, 19079, 19081, 19087, 19121, + 19139, 19141, 19157, 19163, 19181, 19183, 19207, 19211, 19213, 19219, + 19231, 19237, 19249, 19259, 19267, 19273, 19289, 19301, 19309, 19319, + 19333, 19373, 19379, 19381, 19387, 19391, 19403, 19417, 19421, 19423, + 19427, 19429, 19433, 19441, 19447, 19457, 19463, 19469, 19471, 19477, + 19483, 19489, 19501, 19507, 19531, 19541, 19543, 19553, 19559, 19571, + 19577, 19583, 19597, 19603, 19609, 19661, 19681, 19687, 19697, 19699, + 19709, 19717, 19727, 19739, 19751, 19753, 19759, 19763, 19777, 19793, + 19801, 19813, 19819, 19841, 19843, 19853, 19861, 19867, 19889, 19891, + 19913, 19919, 19927, 19937, 19949, 19961, 19963, 19973, 19979, 19991, + 19993, 19997, 20011, 20021, 20023, 20029, 20047, 20051, 20063, 20071, + 20089, 20101, 20107, 20113, 20117, 20123, 20129, 20143, 20147, 20149, + 20161, 20173, 20177, 20183, 20201, 20219, 20231, 20233, 20249, 20261, + 20269, 20287, 20297, 20323, 20327, 20333, 20341, 20347, 20353, 20357, + 20359, 20369, 20389, 20393, 20399, 20407, 20411, 20431, 20441, 20443, + 20477, 20479, 20483, 20507, 20509, 20521, 20533, 20543, 20549, 20551, + 20563, 20593, 20599, 20611, 20627, 20639, 20641, 20663, 20681, 20693, + 20707, 20717, 20719, 20731, 20743, 20747, 20749, 20753, 20759, 20771, + 20773, 20789, 20807, 20809, 20849, 20857, 20873, 20879, 20887, 20897, + 20899, 20903, 20921, 20929, 20939, 20947, 20959, 20963, 20981, 20983, + 21001, 21011, 21013, 21017, 21019, 21023, 21031, 21059, 21061, 21067, + 21089, 21101, 21107, 21121, 21139, 21143, 21149, 21157, 21163, 21169, + 21179, 21187, 21191, 21193, 21211, 21221, 21227, 21247, 21269, 21277, + 21283, 21313, 21317, 21319, 21323, 21341, 21347, 21377, 21379, 21383, + 21391, 21397, 21401, 21407, 21419, 21433, 21467, 21481, 21487, 21491, + 21493, 21499, 21503, 21517, 21521, 21523, 21529, 21557, 21559, 21563, + 21569, 21577, 21587, 21589, 21599, 21601, 21611, 21613, 21617, 21647, + 21649, 21661, 21673, 21683, 21701, 21713, 21727, 21737, 21739, 21751, + 21757, 21767, 21773, 21787, 21799, 21803, 21817, 21821, 21839, 21841, + 21851, 21859, 21863, 21871, 21881, 21893, 21911, 21929, 21937, 21943, + 21961, 21977, 21991, 21997, 22003, 22013, 22027, 22031, 22037, 22039, + 22051, 22063, 22067, 22073, 22079, 22091, 22093, 22109, 22111, 22123, + 22129, 22133, 22147, 22153, 22157, 22159, 22171, 22189, 22193, 22229, + 22247, 22259, 22271, 22273, 22277, 22279, 22283, 22291, 22303, 22307, + 22343, 22349, 22367, 22369, 22381, 22391, 22397, 22409, 22433, 22441, + 22447, 22453, 22469, 22481, 22483, 22501, 22511, 22531, 22541, 22543, + 22549, 22567, 22571, 22573, 22613, 22619, 22621, 22637, 22639, 22643, + 22651, 22669, 22679, 22691, 22697, 22699, 22709, 22717, 22721, 22727, + 22739, 22741, 22751, 22769, 22777, 22783, 22787, 22807, 22811, 22817, + 22853, 22859, 22861, 22871, 22877, 22901, 22907, 22921, 22937, 22943, + 22961, 22963, 22973, 22993, 23003, 23011, 23017, 23021, 23027, 23029, + 23039, 23041, 23053, 23057, 23059, 23063, 23071, 23081, 23087, 23099, + 23117, 23131, 23143, 23159, 23167, 23173, 23189, 23197, 23201, 23203, + 23209, 23227, 23251, 23269, 23279, 23291, 23293, 23297, 23311, 23321, + 23327, 23333, 23339, 23357, 23369, 23371, 23399, 23417, 23431, 23447, + 23459, 23473, 23497, 23509, 23531, 23537, 23539, 23549, 23557, 23561, + 23563, 23567, 23581, 23593, 23599, 23603, 23609, 23623, 23627, 23629, + 23633, 23663, 23669, 23671, 23677, 23687, 23689, 23719, 23741, 23743, + 23747, 23753, 23761, 23767, 23773, 23789, 23801, 23813, 23819, 23827, + 23831, 23833, 23857, 23869, 23873, 23879, 23887, 23893, 23899, 23909, + 23911, 23917, 23929, 23957, 23971, 23977, 23981, 23993, 24001, 24007, + 24019, 24023, 24029, 24043, 24049, 24061, 24071, 24077, 24083, 24091, + 24097, 24103, 24107, 24109, 24113, 24121, 24133, 24137, 24151, 24169, + 24179, 24181, 24197, 24203, 24223, 24229, 24239, 24247, 24251, 24281, + 24317, 24329, 24337, 24359, 24371, 24373, 24379, 24391, 24407, 24413, + 24419, 24421, 24439, 24443, 24469, 24473, 24481, 24499, 24509, 24517, + 24527, 24533, 24547, 24551, 24571, 24593, 24611, 24623, 24631, 24659, + 24671, 24677, 24683, 24691, 24697, 24709, 24733, 24749, 24763, 24767, + 24781, 24793, 24799, 24809, 24821, 24841, 24847, 24851, 24859, 24877, + 24889, 24907, 24917, 24919, 24923, 24943, 24953, 24967, 24971, 24977, + 24979, 24989, 25013, 25031, 25033, 25037, 25057, 25073, 25087, 25097, + 25111, 25117, 25121, 25127, 25147, 25153, 25163, 25169, 25171, 25183, + 25189, 25219, 25229, 25237, 25243, 25247, 25253, 25261, 25301, 25303, + 25307, 25309, 25321, 25339, 25343, 25349, 25357, 25367, 25373, 25391, + 25409, 25411, 25423, 25439, 25447, 25453, 25457, 25463, 25469, 25471, + 25523, 25537, 25541, 25561, 25577, 25579, 25583, 25589, 25601, 25603, + 25609, 25621, 25633, 25639, 25643, 25657, 25667, 25673, 25679, 25693, + 25703, 25717, 25733, 25741, 25747, 25759, 25763, 25771, 25793, 25799, + 25801, 25819, 25841, 25847, 25849, 25867, 25873, 25889, 25903, 25913, + 25919, 25931, 25933, 25939, 25943, 25951, 25969, 25981, 25997, 25999, + 26003, 26017, 26021, 26029, 26041, 26053, 26083, 26099, 26107, 26111, + 26113, 26119, 26141, 26153, 26161, 26171, 26177, 26183, 26189, 26203, + 26209, 26227, 26237, 26249, 26251, 26261, 26263, 26267, 26293, 26297, + 26309, 26317, 26321, 26339, 26347, 26357, 26371, 26387, 26393, 26399, + 26407, 26417, 26423, 26431, 26437, 26449, 26459, 26479, 26489, 26497, + 26501, 26513, 26539, 26557, 26561, 26573, 26591, 26597, 26627, 26633, + 26641, 26647, 26669, 26681, 26683, 26687, 26693, 26699, 26701, 26711, + 26713, 26717, 26723, 26729, 26731, 26737, 26759, 26777, 26783, 26801, + 26813, 26821, 26833, 26839, 26849, 26861, 26863, 26879, 26881, 26891, + 26893, 26903, 26921, 26927, 26947, 26951, 26953, 26959, 26981, 26987, + 26993, 27011, 27017, 27031, 27043, 27059, 27061, 27067, 27073, 27077, + 27091, 27103, 27107, 27109, 27127, 27143, 27179, 27191, 27197, 27211, + 27239, 27241, 27253, 27259, 27271, 27277, 27281, 27283, 27299, 27329, + 27337, 27361, 27367, 27397, 27407, 27409, 27427, 27431, 27437, 27449, + 27457, 27479, 27481, 27487, 27509, 27527, 27529, 27539, 27541, 27551, + 27581, 27583, 27611, 27617, 27631, 27647, 27653, 27673, 27689, 27691, + 27697, 27701, 27733, 27737, 27739, 27743, 27749, 27751, 27763, 27767, + 27773, 27779, 27791, 27793, 27799, 27803, 27809, 27817, 27823, 27827, + 27847, 27851, 27883, 27893, 27901, 27917, 27919, 27941, 27943, 27947, + 27953, 27961, 27967, 27983, 27997, 28001, 28019, 28027, 28031, 28051, + 28057, 28069, 28081, 28087, 28097, 28099, 28109, 28111, 28123, 28151, + 28163, 28181, 28183, 28201, 28211, 28219, 28229, 28277, 28279, 28283, + 28289, 28297, 28307, 28309, 28319, 28349, 28351, 28387, 28393, 28403, + 28409, 28411, 28429, 28433, 28439, 28447, 28463, 28477, 28493, 28499, + 28513, 28517, 28537, 28541, 28547, 28549, 28559, 28571, 28573, 28579, + 28591, 28597, 28603, 28607, 28619, 28621, 28627, 28631, 28643, 28649, + 28657, 28661, 28663, 28669, 28687, 28697, 28703, 28711, 28723, 28729, + 28751, 28753, 28759, 28771, 28789, 28793, 28807, 28813, 28817, 28837, + 28843, 28859, 28867, 28871, 28879, 28901, 28909, 28921, 28927, 28933, + 28949, 28961, 28979, 29009, 29017, 29021, 29023, 29027, 29033, 29059, + 29063, 29077, 29101, 29123, 29129, 29131, 29137, 29147, 29153, 29167, + 29173, 29179, 29191, 29201, 29207, 29209, 29221, 29231, 29243, 29251, + 29269, 29287, 29297, 29303, 29311, 29327, 29333, 29339, 29347, 29363, + 29383, 29387, 29389, 29399, 29401, 29411, 29423, 29429, 29437, 29443, + 29453, 29473, 29483, 29501, 29527, 29531, 29537, 29567, 29569, 29573, + 29581, 29587, 29599, 29611, 29629, 29633, 29641, 29663, 29669, 29671, + 29683, 29717, 29723, 29741, 29753, 29759, 29761, 29789, 29803, 29819, + 29833, 29837, 29851, 29863, 29867, 29873, 29879, 29881, 29917, 29921, + 29927, 29947, 29959, 29983, 29989, 30011, 30013, 30029, 30047, 30059, + 30071, 30089, 30091, 30097, 30103, 30109, 30113, 30119, 30133, 30137, + 30139, 30161, 30169, 30181, 30187, 30197, 30203, 30211, 30223, 30241, + 30253, 30259, 30269, 30271, 30293, 30307, 30313, 30319, 30323, 30341, + 30347, 30367, 30389, 30391, 30403, 30427, 30431, 30449, 30467, 30469, + 30491, 30493, 30497, 30509, 30517, 30529, 30539, 30553, 30557, 30559, + 30577, 30593, 30631, 30637, 30643, 30649, 30661, 30671, 30677, 30689, + 30697, 30703, 30707, 30713, 30727, 30757, 30763, 30773, 30781, 30803, + 30809, 30817, 30829, 30839, 30841, 30851, 30853, 30859, 30869, 30871, + 30881, 30893, 30911, 30931, 30937, 30941, 30949, 30971, 30977, 30983, + 31013, 31019, 31033, 31039, 31051, 31063, 31069, 31079, 31081, 31091, + 31121, 31123, 31139, 31147, 31151, 31153, 31159, 31177, 31181, 31183, + 31189, 31193, 31219, 31223, 31231, 31237, 31247, 31249, 31253, 31259, + 31267, 31271, 31277, 31307, 31319, 31321, 31327, 31333, 31337, 31357, + 31379, 31387, 31391, 31393, 31397, 31469, 31477, 31481, 31489, 31511, + 31513, 31517, 31531, 31541, 31543, 31547, 31567, 31573, 31583, 31601, + 31607, 31627, 31643, 31649, 31657, 31663, 31667, 31687, 31699, 31721, + 31723, 31727, 31729, 31741, 31751, 31769, 31771, 31793, 31799, 31817, + 31847, 31849, 31859, 31873, 31883, 31891, 31907, 31957, 31963, 31973, + 31981, 31991, 32003, 32009, 32027, 32029, 32051, 32057, 32059, 32063, + 32069, 32077, 32083, 32089, 32099, 32117, 32119, 32141, 32143, 32159, + 32173, 32183, 32189, 32191, 32203, 32213, 32233, 32237, 32251, 32257, + 32261, 32297, 32299, 32303, 32309, 32321, 32323, 32327, 32341, 32353, + 32359, 32363, 32369, 32371, 32377, 32381, 32401, 32411, 32413, 32423, + 32429, 32441, 32443, 32467, 32479, 32491, 32497, 32503, 32507, 32531, + 32533, 32537, 32561, 32563, 32569, 32573, 32579, 32587, 32603, 32609, + 32611, 32621, 32633, 32647, 32653, 32687, 32693, 32707, 32713, 32717, + 32719, 32749, 32771, 32779, 32783, 32789, 32797, 32801, 32803, 32831, + 32833, 32839, 32843, 32869, 32887, 32909, 32911, 32917, 32933, 32939, + 32941, 32957, 32969, 32971, 32983, 32987, 32993, 32999, 33013, 33023, + 33029, 33037, 33049, 33053, 33071, 33073, 33083, 33091, 33107, 33113, + 33119, 33149, 33151, 33161, 33179, 33181, 33191, 33199, 33203, 33211, + 33223, 33247, 33287, 33289, 33301, 33311, 33317, 33329, 33331, 33343, + 33347, 33349, 33353, 33359, 33377, 33391, 33403, 33409, 33413, 33427, + 33457, 33461, 33469, 33479, 33487, 33493, 33503, 33521, 33529, 33533, + 33547, 33563, 33569, 33577, 33581, 33587, 33589, 33599, 33601, 33613, + 33617, 33619, 33623, 33629, 33637, 33641, 33647, 33679, 33703, 33713, + 33721, 33739, 33749, 33751, 33757, 33767, 33769, 33773, 33791, 33797, + 33809, 33811, 33827, 33829, 33851, 33857, 33863, 33871, 33889, 33893, + 33911, 33923, 33931, 33937, 33941, 33961, 33967, 33997, 34019, 34031, + 34033, 34039, 34057, 34061, 34123, 34127, 34129, 34141, 34147, 34157, + 34159, 34171, 34183, 34211, 34213, 34217, 34231, 34253, 34259, 34261, + 34267, 34273, 34283, 34297, 34301, 34303, 34313, 34319, 34327, 34337, + 34351, 34361, 34367, 34369, 34381, 34403, 34421, 34429, 34439, 34457, + 34469, 34471, 34483, 34487, 34499, 34501, 34511, 34513, 34519, 34537, + 34543, 34549, 34583, 34589, 34591, 34603, 34607, 34613, 34631, 34649, + 34651, 34667, 34673, 34679, 34687, 34693, 34703, 34721, 34729, 34739, + 34747, 34757, 34759, 34763, 34781, 34807, 34819, 34841, 34843, 34847, + 34849, 34871, 34877, 34883, 34897, 34913, 34919, 34939, 34949, 34961, + 34963, 34981, 35023, 35027, 35051, 35053, 35059, 35069, 35081, 35083, + 35089, 35099, 35107, 35111, 35117, 35129, 35141, 35149, 35153, 35159, + 35171, 35201, 35221, 35227, 35251, 35257, 35267, 35279, 35281, 35291, + 35311, 35317, 35323, 35327, 35339, 35353, 35363, 35381, 35393, 35401, + 35407, 35419, 35423, 35437, 35447, 35449, 35461, 35491, 35507, 35509, + 35521, 35527, 35531, 35533, 35537, 35543, 35569, 35573, 35591, 35593, + 35597, 35603, 35617, 35671, 35677, 35729, 35731, 35747, 35753, 35759, + 35771, 35797, 35801, 35803, 35809, 35831, 35837, 35839, 35851, 35863, + 35869, 35879, 35897, 35899, 35911, 35923, 35933, 35951, 35963, 35969, + 35977, 35983, 35993, 35999, 36007, 36011, 36013, 36017, 36037, 36061, + 36067, 36073, 36083, 36097, 36107, 36109, 36131, 36137, 36151, 36161, + 36187, 36191, 36209, 36217, 36229, 36241, 36251, 36263, 36269, 36277, + 36293, 36299, 36307, 36313, 36319, 36341, 36343, 36353, 36373, 36383, + 36389, 36433, 36451, 36457, 36467, 36469, 36473, 36479, 36493, 36497, + 36523, 36527, 36529, 36541, 36551, 36559, 36563, 36571, 36583, 36587, + 36599, 36607, 36629, 36637, 36643, 36653, 36671, 36677, 36683, 36691, + 36697, 36709, 36713, 36721, 36739, 36749, 36761, 36767, 36779, 36781, + 36787, 36791, 36793, 36809, 36821, 36833, 36847, 36857, 36871, 36877, + 36887, 36899, 36901, 36913, 36919, 36923, 36929, 36931, 36943, 36947, + 36973, 36979, 36997, 37003, 37013, 37019, 37021, 37039, 37049, 37057, + 37061, 37087, 37097, 37117, 37123, 37139, 37159, 37171, 37181, 37189, + 37199, 37201, 37217, 37223, 37243, 37253, 37273, 37277, 37307, 37309, + 37313, 37321, 37337, 37339, 37357, 37361, 37363, 37369, 37379, 37397, + 37409, 37423, 37441, 37447, 37463, 37483, 37489, 37493, 37501, 37507, + 37511, 37517, 37529, 37537, 37547, 37549, 37561, 37567, 37571, 37573, + 37579, 37589, 37591, 37607, 37619, 37633, 37643, 37649, 37657, 37663, + 37691, 37693, 37699, 37717, 37747, 37781, 37783, 37799, 37811, 37813, + 37831, 37847, 37853, 37861, 37871, 37879, 37889, 37897, 37907, 37951, + 37957, 37963, 37967, 37987, 37991, 37993, 37997, 38011, 38039, 38047, + 38053, 38069, 38083, 38113, 38119, 38149, 38153, 38167, 38177, 38183, + 38189, 38197, 38201, 38219, 38231, 38237, 38239, 38261, 38273, 38281, + 38287, 38299, 38303, 38317, 38321, 38327, 38329, 38333, 38351, 38371, + 38377, 38393, 38431, 38447, 38449, 38453, 38459, 38461, 38501, 38543, + 38557, 38561, 38567, 38569, 38593, 38603, 38609, 38611, 38629, 38639, + 38651, 38653, 38669, 38671, 38677, 38693, 38699, 38707, 38711, 38713, + 38723, 38729, 38737, 38747, 38749, 38767, 38783, 38791, 38803, 38821, + 38833, 38839, 38851, 38861, 38867, 38873, 38891, 38903, 38917, 38921, + 38923, 38933, 38953, 38959, 38971, 38977, 38993, 39019, 39023, 39041, + 39043, 39047, 39079, 39089, 39097, 39103, 39107, 39113, 39119, 39133, + 39139, 39157, 39161, 39163, 39181, 39191, 39199, 39209, 39217, 39227, + 39229, 39233, 39239, 39241, 39251, 39293, 39301, 39313, 39317, 39323, + 39341, 39343, 39359, 39367, 39371, 39373, 39383, 39397, 39409, 39419, + 39439, 39443, 39451, 39461, 39499, 39503, 39509, 39511, 39521, 39541, + 39551, 39563, 39569, 39581, 39607, 39619, 39623, 39631, 39659, 39667, + 39671, 39679, 39703, 39709, 39719, 39727, 39733, 39749, 39761, 39769, + 39779, 39791, 39799, 39821, 39827, 39829, 39839, 39841, 39847, 39857, + 39863, 39869, 39877, 39883, 39887, 39901, 39929, 39937, 39953, 39971, + 39979, 39983, 39989, 40009, 40013, 40031, 40037, 40039, 40063, 40087, + 40093, 40099, 40111, 40123, 40127, 40129, 40151, 40153, 40163, 40169, + 40177, 40189, 40193, 40213, 40231, 40237, 40241, 40253, 40277, 40283, + 40289, 40343, 40351, 40357, 40361, 40387, 40423, 40427, 40429, 40433, + 40459, 40471, 40483, 40487, 40493, 40499, 40507, 40519, 40529, 40531, + 40543, 40559, 40577, 40583, 40591, 40597, 40609, 40627, 40637, 40639, + 40693, 40697, 40699, 40709, 40739, 40751, 40759, 40763, 40771, 40787, + 40801, 40813, 40819, 40823, 40829, 40841, 40847, 40849, 40853, 40867, + 40879, 40883, 40897, 40903, 40927, 40933, 40939, 40949, 40961, 40973, + 40993, 41011, 41017, 41023, 41039, 41047, 41051, 41057, 41077, 41081, + 41113, 41117, 41131, 41141, 41143, 41149, 41161, 41177, 41179, 41183, + 41189, 41201, 41203, 41213, 41221, 41227, 41231, 41233, 41243, 41257, + 41263, 41269, 41281, 41299, 41333, 41341, 41351, 41357, 41381, 41387, + 41389, 41399, 41411, 41413, 41443, 41453, 41467, 41479, 41491, 41507, + 41513, 41519, 41521, 41539, 41543, 41549, 41579, 41593, 41597, 41603, + 41609, 41611, 41617, 41621, 41627, 41641, 41647, 41651, 41659, 41669, + 41681, 41687, 41719, 41729, 41737, 41759, 41761, 41771, 41777, 41801, + 41809, 41813, 41843, 41849, 41851, 41863, 41879, 41887, 41893, 41897, + 41903, 41911, 41927, 41941, 41947, 41953, 41957, 41959, 41969, 41981, + 41983, 41999, 42013, 42017, 42019, 42023, 42043, 42061, 42071, 42073, + 42083, 42089, 42101, 42131, 42139, 42157, 42169, 42179, 42181, 42187, + 42193, 42197, 42209, 42221, 42223, 42227, 42239, 42257, 42281, 42283, + 42293, 42299, 42307, 42323, 42331, 42337, 42349, 42359, 42373, 42379, + 42391, 42397, 42403, 42407, 42409, 42433, 42437, 42443, 42451, 42457, + 42461, 42463, 42467, 42473, 42487, 42491, 42499, 42509, 42533, 42557, + 42569, 42571, 42577, 42589, 42611, 42641, 42643, 42649, 42667, 42677, + 42683, 42689, 42697, 42701, 42703, 42709, 42719, 42727, 42737, 42743, + 42751, 42767, 42773, 42787, 42793, 42797, 42821, 42829, 42839, 42841, + 42853, 42859, 42863, 42899, 42901, 42923, 42929, 42937, 42943, 42953, + 42961, 42967, 42979, 42989, 43003, 43013, 43019, 43037, 43049, 43051, + 43063, 43067, 43093, 43103, 43117, 43133, 43151, 43159, 43177, 43189, + 43201, 43207, 43223, 43237, 43261, 43271, 43283, 43291, 43313, 43319, + 43321, 43331, 43391, 43397, 43399, 43403, 43411, 43427, 43441, 43451, + 43457, 43481, 43487, 43499, 43517, 43541, 43543, 43573, 43577, 43579, + 43591, 43597, 43607, 43609, 43613, 43627, 43633, 43649, 43651, 43661, + 43669, 43691, 43711, 43717, 43721, 43753, 43759, 43777, 43781, 43783, + 43787, 43789, 43793, 43801, 43853, 43867, 43889, 43891, 43913, 43933, + 43943, 43951, 43961, 43963, 43969, 43973, 43987, 43991, 43997, 44017, + 44021, 44027, 44029, 44041, 44053, 44059, 44071, 44087, 44089, 44101, + 44111, 44119, 44123, 44129, 44131, 44159, 44171, 44179, 44189, 44201, + 44203, 44207, 44221, 44249, 44257, 44263, 44267, 44269, 44273, 44279, + 44281, 44293, 44351, 44357, 44371, 44381, 44383, 44389, 44417, 44449, + 44453, 44483, 44491, 44497, 44501, 44507, 44519, 44531, 44533, 44537, + 44543, 44549, 44563, 44579, 44587, 44617, 44621, 44623, 44633, 44641, + 44647, 44651, 44657, 44683, 44687, 44699, 44701, 44711, 44729, 44741, + 44753, 44771, 44773, 44777, 44789, 44797, 44809, 44819, 44839, 44843, + 44851, 44867, 44879, 44887, 44893, 44909, 44917, 44927, 44939, 44953, + 44959, 44963, 44971, 44983, 44987, 45007, 45013, 45053, 45061, 45077, + 45083, 45119, 45121, 45127, 45131, 45137, 45139, 45161, 45179, 45181, + 45191, 45197, 45233, 45247, 45259, 45263, 45281, 45289, 45293, 45307, + 45317, 45319, 45329, 45337, 45341, 45343, 45361, 45377, 45389, 45403, + 45413, 45427, 45433, 45439, 45481, 45491, 45497, 45503, 45523, 45533, + 45541, 45553, 45557, 45569, 45587, 45589, 45599, 45613, 45631, 45641, + 45659, 45667, 45673, 45677, 45691, 45697, 45707, 45737, 45751, 45757, + 45763, 45767, 45779, 45817, 45821, 45823, 45827, 45833, 45841, 45853, + 45863, 45869, 45887, 45893, 45943, 45949, 45953, 45959, 45971, 45979, + 45989, 46021, 46027, 46049, 46051, 46061, 46073, 46091, 46093, 46099, + 46103, 46133, 46141, 46147, 46153, 46171, 46181, 46183, 46187, 46199, + 46219, 46229, 46237, 46261, 46271, 46273, 46279, 46301, 46307, 46309, + 46327, 46337, 46349, 46351, 46381, 46399, 46411, 46439, 46441, 46447, + 46451, 46457, 46471, 46477, 46489, 46499, 46507, 46511, 46523, 46549, + 46559, 46567, 46573, 46589, 46591, 46601, 46619, 46633, 46639, 46643, + 46649, 46663, 46679, 46681, 46687, 46691, 46703, 46723, 46727, 46747, + 46751, 46757, 46769, 46771, 46807, 46811, 46817, 46819, 46829, 46831, + 46853, 46861, 46867, 46877, 46889, 46901, 46919, 46933, 46957, 46993, + 46997, 47017, 47041, 47051, 47057, 47059, 47087, 47093, 47111, 47119, + 47123, 47129, 47137, 47143, 47147, 47149, 47161, 47189, 47207, 47221, + 47237, 47251, 47269, 47279, 47287, 47293, 47297, 47303, 47309, 47317, + 47339, 47351, 47353, 47363, 47381, 47387, 47389, 47407, 47417, 47419, + 47431, 47441, 47459, 47491, 47497, 47501, 47507, 47513, 47521, 47527, + 47533, 47543, 47563, 47569, 47581, 47591, 47599, 47609, 47623, 47629, + 47639, 47653, 47657, 47659, 47681, 47699, 47701, 47711, 47713, 47717, + 47737, 47741, 47743, 47777, 47779, 47791, 47797, 47807, 47809, 47819, + 47837, 47843, 47857, 47869, 47881, 47903, 47911, 47917, 47933, 47939, + 47947, 47951, 47963, 47969, 47977, 47981, 48017, 48023, 48029, 48049, + 48073, 48079, 48091, 48109, 48119, 48121, 48131, 48157, 48163, 48179, + 48187, 48193, 48197, 48221, 48239, 48247, 48259, 48271, 48281, 48299, + 48311, 48313, 48337, 48341, 48353, 48371, 48383, 48397, 48407, 48409, + 48413, 48437, 48449, 48463, 48473, 48479, 48481, 48487, 48491, 48497, + 48523, 48527, 48533, 48539, 48541, 48563, 48571, 48589, 48593, 48611, + 48619, 48623, 48647, 48649, 48661, 48673, 48677, 48679, 48731, 48733, + 48751, 48757, 48761, 48767, 48779, 48781, 48787, 48799, 48809, 48817, + 48821, 48823, 48847, 48857, 48859, 48869, 48871, 48883, 48889, 48907, + 48947, 48953, 48973, 48989, 48991, 49003, 49009, 49019, 49031, 49033, + 49037, 49043, 49057, 49069, 49081, 49103, 49109, 49117, 49121, 49123, + 49139, 49157, 49169, 49171, 49177, 49193, 49199, 49201, 49207, 49211, + 49223, 49253, 49261, 49277, 49279, 49297, 49307, 49331, 49333, 49339, + 49363, 49367, 49369, 49391, 49393, 49409, 49411, 49417, 49429, 49433, + 49451, 49459, 49463, 49477, 49481, 49499, 49523, 49529, 49531, 49537, + 49547, 49549, 49559, 49597, 49603, 49613, 49627, 49633, 49639, 49663, + 49667, 49669, 49681, 49697, 49711, 49727, 49739, 49741, 49747, 49757, + 49783, 49787, 49789, 49801, 49807, 49811, 49823, 49831, 49843, 49853, + 49871, 49877, 49891, 49919, 49921, 49927, 49937, 49939, 49943, 49957, + 49991, 49993, 49999, 50021, 50023, 50033, 50047, 50051, 50053, 50069, + 50077, 50087, 50093, 50101, 50111, 50119, 50123, 50129, 50131, 50147, + 50153, 50159, 50177, 50207, 50221, 50227, 50231, 50261, 50263, 50273, + 50287, 50291, 50311, 50321, 50329, 50333, 50341, 50359, 50363, 50377, + 50383, 50387, 50411, 50417, 50423, 50441, 50459, 50461, 50497, 50503, + 50513, 50527, 50539, 50543, 50549, 50551, 50581, 50587, 50591, 50593, + 50599, 50627, 50647, 50651, 50671, 50683, 50707, 50723, 50741, 50753, + 50767, 50773, 50777, 50789, 50821, 50833, 50839, 50849, 50857, 50867, + 50873, 50891, 50893, 50909, 50923, 50929, 50951, 50957, 50969, 50971, + 50989, 50993, 51001, 51031, 51043, 51047, 51059, 51061, 51071, 51109, + 51131, 51133, 51137, 51151, 51157, 51169, 51193, 51197, 51199, 51203, + 51217, 51229, 51239, 51241, 51257, 51263, 51283, 51287, 51307, 51329, + 51341, 51343, 51347, 51349, 51361, 51383, 51407, 51413, 51419, 51421, + 51427, 51431, 51437, 51439, 51449, 51461, 51473, 51479, 51481, 51487, + 51503, 51511, 51517, 51521, 51539, 51551, 51563, 51577, 51581, 51593, + 51599, 51607, 51613, 51631, 51637, 51647, 51659, 51673, 51679, 51683, + 51691, 51713, 51719, 51721, 51749, 51767, 51769, 51787, 51797, 51803, + 51817, 51827, 51829, 51839, 51853, 51859, 51869, 51871, 51893, 51899, + 51907, 51913, 51929, 51941, 51949, 51971, 51973, 51977, 51991, 52009, + 52021, 52027, 52051, 52057, 52067, 52069, 52081, 52103, 52121, 52127, + 52147, 52153, 52163, 52177, 52181, 52183, 52189, 52201, 52223, 52237, + 52249, 52253, 52259, 52267, 52289, 52291, 52301, 52313, 52321, 52361, + 52363, 52369, 52379, 52387, 52391, 52433, 52453, 52457, 52489, 52501, + 52511, 52517, 52529, 52541, 52543, 52553, 52561, 52567, 52571, 52579, + 52583, 52609, 52627, 52631, 52639, 52667, 52673, 52691, 52697, 52709, + 52711, 52721, 52727, 52733, 52747, 52757, 52769, 52783, 52807, 52813, + 52817, 52837, 52859, 52861, 52879, 52883, 52889, 52901, 52903, 52919, + 52937, 52951, 52957, 52963, 52967, 52973, 52981, 52999, 53003, 53017, + 53047, 53051, 53069, 53077, 53087, 53089, 53093, 53101, 53113, 53117, + 53129, 53147, 53149, 53161, 53171, 53173, 53189, 53197, 53201, 53231, + 53233, 53239, 53267, 53269, 53279, 53281, 53299, 53309, 53323, 53327, + 53353, 53359, 53377, 53381, 53401, 53407, 53411, 53419, 53437, 53441, + 53453, 53479, 53503, 53507, 53527, 53549, 53551, 53569, 53591, 53593, + 53597, 53609, 53611, 53617, 53623, 53629, 53633, 53639, 53653, 53657, + 53681, 53693, 53699, 53717, 53719, 53731, 53759, 53773, 53777, 53783, + 53791, 53813, 53819, 53831, 53849, 53857, 53861, 53881, 53887, 53891, + 53897, 53899, 53917, 53923, 53927, 53939, 53951, 53959, 53987, 53993, + 54001, 54011, 54013, 54037, 54049, 54059, 54083, 54091, 54101, 54121, + 54133, 54139, 54151, 54163, 54167, 54181, 54193, 54217, 54251, 54269, + 54277, 54287, 54293, 54311, 54319, 54323, 54331, 54347, 54361, 54367, + 54371, 54377, 54401, 54403, 54409, 54413, 54419, 54421, 54437, 54443, + 54449, 54469, 54493, 54497, 54499, 54503, 54517, 54521, 54539, 54541, + 54547, 54559, 54563, 54577, 54581, 54583, 54601, 54617, 54623, 54629, + 54631, 54647, 54667, 54673, 54679, 54709, 54713, 54721, 54727, 54751, + 54767, 54773, 54779, 54787, 54799, 54829, 54833, 54851, 54869, 54877, + 54881, 54907, 54917, 54919, 54941, 54949, 54959, 54973, 54979, 54983, + 55001, 55009, 55021, 55049, 55051, 55057, 55061, 55073, 55079, 55103, + 55109, 55117, 55127, 55147, 55163, 55171, 55201, 55207, 55213, 55217, + 55219, 55229, 55243, 55249, 55259, 55291, 55313, 55331, 55333, 55337, + 55339, 55343, 55351, 55373, 55381, 55399, 55411, 55439, 55441, 55457, + 55469, 55487, 55501, 55511, 55529, 55541, 55547, 55579, 55589, 55603, + 55609, 55619, 55621, 55631, 55633, 55639, 55661, 55663, 55667, 55673, + 55681, 55691, 55697, 55711, 55717, 55721, 55733, 55763, 55787, 55793, + 55799, 55807, 55813, 55817, 55819, 55823, 55829, 55837, 55843, 55849, + 55871, 55889, 55897, 55901, 55903, 55921, 55927, 55931, 55933, 55949, + 55967, 55987, 55997, 56003, 56009, 56039, 56041, 56053, 56081, 56087, + 56093, 56099, 56101, 56113, 56123, 56131, 56149, 56167, 56171, 56179, + 56197, 56207, 56209, 56237, 56239, 56249, 56263, 56267, 56269, 56299, + 56311, 56333, 56359, 56369, 56377, 56383, 56393, 56401, 56417, 56431, + 56437, 56443, 56453, 56467, 56473, 56477, 56479, 56489, 56501, 56503, + 56509, 56519, 56527, 56531, 56533, 56543, 56569, 56591, 56597, 56599, + 56611, 56629, 56633, 56659, 56663, 56671, 56681, 56687, 56701, 56711, + 56713, 56731, 56737, 56747, 56767, 56773, 56779, 56783, 56807, 56809, + 56813, 56821, 56827, 56843, 56857, 56873, 56891, 56893, 56897, 56909, + 56911, 56921, 56923, 56929, 56941, 56951, 56957, 56963, 56983, 56989, + 56993, 56999, 57037, 57041, 57047, 57059, 57073, 57077, 57089, 57097, + 57107, 57119, 57131, 57139, 57143, 57149, 57163, 57173, 57179, 57191, + 57193, 57203, 57221, 57223, 57241, 57251, 57259, 57269, 57271, 57283, + 57287, 57301, 57329, 57331, 57347, 57349, 57367, 57373, 57383, 57389, + 57397, 57413, 57427, 57457, 57467, 57487, 57493, 57503, 57527, 57529, + 57557, 57559, 57571, 57587, 57593, 57601, 57637, 57641, 57649, 57653, + 57667, 57679, 57689, 57697, 57709, 57713, 57719, 57727, 57731, 57737, + 57751, 57773, 57781, 57787, 57791, 57793, 57803, 57809, 57829, 57839, + 57847, 57853, 57859, 57881, 57899, 57901, 57917, 57923, 57943, 57947, + 57973, 57977, 57991, 58013, 58027, 58031, 58043, 58049, 58057, 58061, + 58067, 58073, 58099, 58109, 58111, 58129, 58147, 58151, 58153, 58169, + 58171, 58189, 58193, 58199, 58207, 58211, 58217, 58229, 58231, 58237, + 58243, 58271, 58309, 58313, 58321, 58337, 58363, 58367, 58369, 58379, + 58391, 58393, 58403, 58411, 58417, 58427, 58439, 58441, 58451, 58453, + 58477, 58481, 58511, 58537, 58543, 58549, 58567, 58573, 58579, 58601, + 58603, 58613, 58631, 58657, 58661, 58679, 58687, 58693, 58699, 58711, + 58727, 58733, 58741, 58757, 58763, 58771, 58787, 58789, 58831, 58889, + 58897, 58901, 58907, 58909, 58913, 58921, 58937, 58943, 58963, 58967, + 58979, 58991, 58997, 59009, 59011, 59021, 59023, 59029, 59051, 59053, + 59063, 59069, 59077, 59083, 59093, 59107, 59113, 59119, 59123, 59141, + 59149, 59159, 59167, 59183, 59197, 59207, 59209, 59219, 59221, 59233, + 59239, 59243, 59263, 59273, 59281, 59333, 59341, 59351, 59357, 59359, + 59369, 59377, 59387, 59393, 59399, 59407, 59417, 59419, 59441, 59443, + 59447, 59453, 59467, 59471, 59473, 59497, 59509, 59513, 59539, 59557, + 59561, 59567, 59581, 59611, 59617, 59621, 59627, 59629, 59651, 59659, + 59663, 59669, 59671, 59693, 59699, 59707, 59723, 59729, 59743, 59747, + 59753, 59771, 59779, 59791, 59797, 59809, 59833, 59863, 59879, 59887, + 59921, 59929, 59951, 59957, 59971, 59981, 59999, 60013, 60017, 60029, + 60037, 60041, 60077, 60083, 60089, 60091, 60101, 60103, 60107, 60127, + 60133, 60139, 60149, 60161, 60167, 60169, 60209, 60217, 60223, 60251, + 60257, 60259, 60271, 60289, 60293, 60317, 60331, 60337, 60343, 60353, + 60373, 60383, 60397, 60413, 60427, 60443, 60449, 60457, 60493, 60497, + 60509, 60521, 60527, 60539, 60589, 60601, 60607, 60611, 60617, 60623, + 60631, 60637, 60647, 60649, 60659, 60661, 60679, 60689, 60703, 60719, + 60727, 60733, 60737, 60757, 60761, 60763, 60773, 60779, 60793, 60811, + 60821, 60859, 60869, 60887, 60889, 60899, 60901, 60913, 60917, 60919, + 60923, 60937, 60943, 60953, 60961, 61001, 61007, 61027, 61031, 61043, + 61051, 61057, 61091, 61099, 61121, 61129, 61141, 61151, 61153, 61169, + 61211, 61223, 61231, 61253, 61261, 61283, 61291, 61297, 61331, 61333, + 61339, 61343, 61357, 61363, 61379, 61381, 61403, 61409, 61417, 61441, + 61463, 61469, 61471, 61483, 61487, 61493, 61507, 61511, 61519, 61543, + 61547, 61553, 61559, 61561, 61583, 61603, 61609, 61613, 61627, 61631, + 61637, 61643, 61651, 61657, 61667, 61673, 61681, 61687, 61703, 61717, + 61723, 61729, 61751, 61757, 61781, 61813, 61819, 61837, 61843, 61861, + 61871, 61879, 61909, 61927, 61933, 61949, 61961, 61967, 61979, 61981, + 61987, 61991, 62003, 62011, 62017, 62039, 62047, 62053, 62057, 62071, + 62081, 62099, 62119, 62129, 62131, 62137, 62141, 62143, 62171, 62189, + 62191, 62201, 62207, 62213, 62219, 62233, 62273, 62297, 62299, 62303, + 62311, 62323, 62327, 62347, 62351, 62383, 62401, 62417, 62423, 62459, + 62467, 62473, 62477, 62483, 62497, 62501, 62507, 62533, 62539, 62549, + 62563, 62581, 62591, 62597, 62603, 62617, 62627, 62633, 62639, 62653, + 62659, 62683, 62687, 62701, 62723, 62731, 62743, 62753, 62761, 62773, + 62791, 62801, 62819, 62827, 62851, 62861, 62869, 62873, 62897, 62903, + 62921, 62927, 62929, 62939, 62969, 62971, 62981, 62983, 62987, 62989, + 63029, 63031, 63059, 63067, 63073, 63079, 63097, 63103, 63113, 63127, + 63131, 63149, 63179, 63197, 63199, 63211, 63241, 63247, 63277, 63281, + 63299, 63311, 63313, 63317, 63331, 63337, 63347, 63353, 63361, 63367, + 63377, 63389, 63391, 63397, 63409, 63419, 63421, 63439, 63443, 63463, + 63467, 63473, 63487, 63493, 63499, 63521, 63527, 63533, 63541, 63559, + 63577, 63587, 63589, 63599, 63601, 63607, 63611, 63617, 63629, 63647, + 63649, 63659, 63667, 63671, 63689, 63691, 63697, 63703, 63709, 63719, + 63727, 63737, 63743, 63761, 63773, 63781, 63793, 63799, 63803, 63809, + 63823, 63839, 63841, 63853, 63857, 63863, 63901, 63907, 63913, 63929, + 63949, 63977, 63997, 64007, 64013, 64019, 64033, 64037, 64063, 64067, + 64081, 64091, 64109, 64123, 64151, 64153, 64157, 64171, 64187, 64189, + 64217, 64223, 64231, 64237, 64271, 64279, 64283, 64301, 64303, 64319, + 64327, 64333, 64373, 64381, 64399, 64403, 64433, 64439, 64451, 64453, + 64483, 64489, 64499, 64513, 64553, 64567, 64577, 64579, 64591, 64601, + 64609, 64613, 64621, 64627, 64633, 64661, 64663, 64667, 64679, 64693, + 64709, 64717, 64747, 64763, 64781, 64783, 64793, 64811, 64817, 64849, + 64853, 64871, 64877, 64879, 64891, 64901, 64919, 64921, 64927, 64937, + 64951, 64969, 64997, 65003, 65011, 65027, 65029, 65033, 65053, 65063, + 65071, 65089, 65099, 65101, 65111, 65119, 65123, 65129, 65141, 65147, + 65167, 65171, 65173, 65179, 65183, 65203, 65213, 65239, 65257, 65267, + 65269, 65287, 65293, 65309, 65323, 65327, 65353, 65357, 65371, 65381, + 65393, 65407, 65413, 65419, 65423, 65437, 65447, 65449, 65479, 65497, + 65519, 65521, 65537, 65539, 65543, 65551, 65557, 65563, 65579, 65581, + 65587, 65599, 65609, 65617, 65629, 65633, 65647, 65651, 65657, 65677, + 65687, 65699, 65701, 65707, 65713, 65717, 65719, 65729, 65731, 65761, + 65777, 65789, 65809, 65827, 65831, 65837, 65839, 65843, 65851, 65867, + 65881, 65899, 65921, 65927, 65929, 65951, 65957, 65963, 65981, 65983, + 65993, 66029, 66037, 66041, 66047, 66067, 66071, 66083, 66089, 66103, + 66107, 66109, 66137, 66161, 66169, 66173, 66179, 66191, 66221, 66239, + 66271, 66293, 66301, 66337, 66343, 66347, 66359, 66361, 66373, 66377, + 66383, 66403, 66413, 66431, 66449, 66457, 66463, 66467, 66491, 66499, + 66509, 66523, 66529, 66533, 66541, 66553, 66569, 66571, 66587, 66593, + 66601, 66617, 66629, 66643, 66653, 66683, 66697, 66701, 66713, 66721, + 66733, 66739, 66749, 66751, 66763, 66791, 66797, 66809, 66821, 66841, + 66851, 66853, 66863, 66877, 66883, 66889, 66919, 66923, 66931, 66943, + 66947, 66949, 66959, 66973, 66977, 67003, 67021, 67033, 67043, 67049, + 67057, 67061, 67073, 67079, 67103, 67121, 67129, 67139, 67141, 67153, + 67157, 67169, 67181, 67187, 67189, 67211, 67213, 67217, 67219, 67231, + 67247, 67261, 67271, 67273, 67289, 67307, 67339, 67343, 67349, 67369, + 67391, 67399, 67409, 67411, 67421, 67427, 67429, 67433, 67447, 67453, + 67477, 67481, 67489, 67493, 67499, 67511, 67523, 67531, 67537, 67547, + 67559, 67567, 67577, 67579, 67589, 67601, 67607, 67619, 67631, 67651, + 67679, 67699, 67709, 67723, 67733, 67741, 67751, 67757, 67759, 67763, + 67777, 67783, 67789, 67801, 67807, 67819, 67829, 67843, 67853, 67867, + 67883, 67891, 67901, 67927, 67931, 67933, 67939, 67943, 67957, 67961, + 67967, 67979, 67987, 67993, 68023, 68041, 68053, 68059, 68071, 68087, + 68099, 68111, 68113, 68141, 68147, 68161, 68171, 68207, 68209, 68213, + 68219, 68227, 68239, 68261, 68279, 68281, 68311, 68329, 68351, 68371, + 68389, 68399, 68437, 68443, 68447, 68449, 68473, 68477, 68483, 68489, + 68491, 68501, 68507, 68521, 68531, 68539, 68543, 68567, 68581, 68597, + 68611, 68633, 68639, 68659, 68669, 68683, 68687, 68699, 68711, 68713, + 68729, 68737, 68743, 68749, 68767, 68771, 68777, 68791, 68813, 68819, + 68821, 68863, 68879, 68881, 68891, 68897, 68899, 68903, 68909, 68917, + 68927, 68947, 68963, 68993, 69001, 69011, 69019, 69029, 69031, 69061, + 69067, 69073, 69109, 69119, 69127, 69143, 69149, 69151, 69163, 69191, + 69193, 69197, 69203, 69221, 69233, 69239, 69247, 69257, 69259, 69263, + 69313, 69317, 69337, 69341, 69371, 69379, 69383, 69389, 69401, 69403, + 69427, 69431, 69439, 69457, 69463, 69467, 69473, 69481, 69491, 69493, + 69497, 69499, 69539, 69557, 69593, 69623, 69653, 69661, 69677, 69691, + 69697, 69709, 69737, 69739, 69761, 69763, 69767, 69779, 69809, 69821, + 69827, 69829, 69833, 69847, 69857, 69859, 69877, 69899, 69911, 69929, + 69931, 69941, 69959, 69991, 69997, 70001, 70003, 70009, 70019, 70039, + 70051, 70061, 70067, 70079, 70099, 70111, 70117, 70121, 70123, 70139, + 70141, 70157, 70163, 70177, 70181, 70183, 70199, 70201, 70207, 70223, + 70229, 70237, 70241, 70249, 70271, 70289, 70297, 70309, 70313, 70321, + 70327, 70351, 70373, 70379, 70381, 70393, 70423, 70429, 70439, 70451, + 70457, 70459, 70481, 70487, 70489, 70501, 70507, 70529, 70537, 70549, + 70571, 70573, 70583, 70589, 70607, 70619, 70621, 70627, 70639, 70657, + 70663, 70667, 70687, 70709, 70717, 70729, 70753, 70769, 70783, 70793, + 70823, 70841, 70843, 70849, 70853, 70867, 70877, 70879, 70891, 70901, + 70913, 70919, 70921, 70937, 70949, 70951, 70957, 70969, 70979, 70981, + 70991, 70997, 70999, 71011, 71023, 71039, 71059, 71069, 71081, 71089, + 71119, 71129, 71143, 71147, 71153, 71161, 71167, 71171, 71191, 71209, + 71233, 71237, 71249, 71257, 71261, 71263, 71287, 71293, 71317, 71327, + 71329, 71333, 71339, 71341, 71347, 71353, 71359, 71363, 71387, 71389, + 71399, 71411, 71413, 71419, 71429, 71437, 71443, 71453, 71471, 71473, + 71479, 71483, 71503, 71527, 71537, 71549, 71551, 71563, 71569, 71593, + 71597, 71633, 71647, 71663, 71671, 71693, 71699, 71707, 71711, 71713, + 71719, 71741, 71761, 71777, 71789, 71807, 71809, 71821, 71837, 71843, + 71849, 71861, 71867, 71879, 71881, 71887, 71899, 71909, 71917, 71933, + 71941, 71947, 71963, 71971, 71983, 71987, 71993, 71999, 72019, 72031, + 72043, 72047, 72053, 72073, 72077, 72089, 72091, 72101, 72103, 72109, + 72139, 72161, 72167, 72169, 72173, 72211, 72221, 72223, 72227, 72229, + 72251, 72253, 72269, 72271, 72277, 72287, 72307, 72313, 72337, 72341, + 72353, 72367, 72379, 72383, 72421, 72431, 72461, 72467, 72469, 72481, + 72493, 72497, 72503, 72533, 72547, 72551, 72559, 72577, 72613, 72617, + 72623, 72643, 72647, 72649, 72661, 72671, 72673, 72679, 72689, 72701, + 72707, 72719, 72727, 72733, 72739, 72763, 72767, 72797, 72817, 72823, + 72859, 72869, 72871, 72883, 72889, 72893, 72901, 72907, 72911, 72923, + 72931, 72937, 72949, 72953, 72959, 72973, 72977, 72997, 73009, 73013, + 73019, 73037, 73039, 73043, 73061, 73063, 73079, 73091, 73121, 73127, + 73133, 73141, 73181, 73189, 73237, 73243, 73259, 73277, 73291, 73303, + 73309, 73327, 73331, 73351, 73361, 73363, 73369, 73379, 73387, 73417, + 73421, 73433, 73453, 73459, 73471, 73477, 73483, 73517, 73523, 73529, + 73547, 73553, 73561, 73571, 73583, 73589, 73597, 73607, 73609, 73613, + 73637, 73643, 73651, 73673, 73679, 73681, 73693, 73699, 73709, 73721, + 73727, 73751, 73757, 73771, 73783, 73819, 73823, 73847, 73849, 73859, + 73867, 73877, 73883, 73897, 73907, 73939, 73943, 73951, 73961, 73973, + 73999, 74017, 74021, 74027, 74047, 74051, 74071, 74077, 74093, 74099, + 74101, 74131, 74143, 74149, 74159, 74161, 74167, 74177, 74189, 74197, + 74201, 74203, 74209, 74219, 74231, 74257, 74279, 74287, 74293, 74297, + 74311, 74317, 74323, 74353, 74357, 74363, 74377, 74381, 74383, 74411, + 74413, 74419, 74441, 74449, 74453, 74471, 74489, 74507, 74509, 74521, + 74527, 74531, 74551, 74561, 74567, 74573, 74587, 74597, 74609, 74611, + 74623, 74653, 74687, 74699, 74707, 74713, 74717, 74719, 74729, 74731, + 74747, 74759, 74761, 74771, 74779, 74797, 74821, 74827, 74831, 74843, + 74857, 74861, 74869, 74873, 74887, 74891, 74897, 74903, 74923, 74929, + 74933, 74941, 74959, 75011, 75013, 75017, 75029, 75037, 75041, 75079, + 75083, 75109, 75133, 75149, 75161, 75167, 75169, 75181, 75193, 75209, + 75211, 75217, 75223, 75227, 75239, 75253, 75269, 75277, 75289, 75307, + 75323, 75329, 75337, 75347, 75353, 75367, 75377, 75389, 75391, 75401, + 75403, 75407, 75431, 75437, 75479, 75503, 75511, 75521, 75527, 75533, + 75539, 75541, 75553, 75557, 75571, 75577, 75583, 75611, 75617, 75619, + 75629, 75641, 75653, 75659, 75679, 75683, 75689, 75703, 75707, 75709, + 75721, 75731, 75743, 75767, 75773, 75781, 75787, 75793, 75797, 75821, + 75833, 75853, 75869, 75883, 75913, 75931, 75937, 75941, 75967, 75979, + 75983, 75989, 75991, 75997, 76001, 76003, 76031, 76039, 76079, 76081, + 76091, 76099, 76103, 76123, 76129, 76147, 76157, 76159, 76163, 76207, + 76213, 76231, 76243, 76249, 76253, 76259, 76261, 76283, 76289, 76303, + 76333, 76343, 76367, 76369, 76379, 76387, 76403, 76421, 76423, 76441, + 76463, 76471, 76481, 76487, 76493, 76507, 76511, 76519, 76537, 76541, + 76543, 76561, 76579, 76597, 76603, 76607, 76631, 76649, 76651, 76667, + 76673, 76679, 76697, 76717, 76733, 76753, 76757, 76771, 76777, 76781, + 76801, 76819, 76829, 76831, 76837, 76847, 76871, 76873, 76883, 76907, + 76913, 76919, 76943, 76949, 76961, 76963, 76991, 77003, 77017, 77023, + 77029, 77041, 77047, 77069, 77081, 77093, 77101, 77137, 77141, 77153, + 77167, 77171, 77191, 77201, 77213, 77237, 77239, 77243, 77249, 77261, + 77263, 77267, 77269, 77279, 77291, 77317, 77323, 77339, 77347, 77351, + 77359, 77369, 77377, 77383, 77417, 77419, 77431, 77447, 77471, 77477, + 77479, 77489, 77491, 77509, 77513, 77521, 77527, 77543, 77549, 77551, + 77557, 77563, 77569, 77573, 77587, 77591, 77611, 77617, 77621, 77641, + 77647, 77659, 77681, 77687, 77689, 77699, 77711, 77713, 77719, 77723, + 77731, 77743, 77747, 77761, 77773, 77783, 77797, 77801, 77813, 77839, + 77849, 77863, 77867, 77893, 77899, 77929, 77933, 77951, 77969, 77977, + 77983, 77999, 78007, 78017, 78031, 78041, 78049, 78059, 78079, 78101, + 78121, 78137, 78139, 78157, 78163, 78167, 78173, 78179, 78191, 78193, + 78203, 78229, 78233, 78241, 78259, 78277, 78283, 78301, 78307, 78311, + 78317, 78341, 78347, 78367, 78401, 78427, 78437, 78439, 78467, 78479, + 78487, 78497, 78509, 78511, 78517, 78539, 78541, 78553, 78569, 78571, + 78577, 78583, 78593, 78607, 78623, 78643, 78649, 78653, 78691, 78697, + 78707, 78713, 78721, 78737, 78779, 78781, 78787, 78791, 78797, 78803, + 78809, 78823, 78839, 78853, 78857, 78877, 78887, 78889, 78893, 78901, + 78919, 78929, 78941, 78977, 78979, 78989, 79031, 79039, 79043, 79063, + 79087, 79103, 79111, 79133, 79139, 79147, 79151, 79153, 79159, 79181, + 79187, 79193, 79201, 79229, 79231, 79241, 79259, 79273, 79279, 79283, + 79301, 79309, 79319, 79333, 79337, 79349, 79357, 79367, 79379, 79393, + 79397, 79399, 79411, 79423, 79427, 79433, 79451, 79481, 79493, 79531, + 79537, 79549, 79559, 79561, 79579, 79589, 79601, 79609, 79613, 79621, + 79627, 79631, 79633, 79657, 79669, 79687, 79691, 79693, 79697, 79699, + 79757, 79769, 79777, 79801, 79811, 79813, 79817, 79823, 79829, 79841, + 79843, 79847, 79861, 79867, 79873, 79889, 79901, 79903, 79907, 79939, + 79943, 79967, 79973, 79979, 79987, 79997, 79999, 80021, 80039, 80051, + 80071, 80077, 80107, 80111, 80141, 80147, 80149, 80153, 80167, 80173, + 80177, 80191, 80207, 80209, 80221, 80231, 80233, 80239, 80251, 80263, + 80273, 80279, 80287, 80309, 80317, 80329, 80341, 80347, 80363, 80369, + 80387, 80407, 80429, 80447, 80449, 80471, 80473, 80489, 80491, 80513, + 80527, 80537, 80557, 80567, 80599, 80603, 80611, 80621, 80627, 80629, + 80651, 80657, 80669, 80671, 80677, 80681, 80683, 80687, 80701, 80713, + 80737, 80747, 80749, 80761, 80777, 80779, 80783, 80789, 80803, 80809, + 80819, 80831, 80833, 80849, 80863, 80897, 80909, 80911, 80917, 80923, + 80929, 80933, 80953, 80963, 80989, 81001, 81013, 81017, 81019, 81023, + 81031, 81041, 81043, 81047, 81049, 81071, 81077, 81083, 81097, 81101, + 81119, 81131, 81157, 81163, 81173, 81181, 81197, 81199, 81203, 81223, + 81233, 81239, 81281, 81283, 81293, 81299, 81307, 81331, 81343, 81349, + 81353, 81359, 81371, 81373, 81401, 81409, 81421, 81439, 81457, 81463, + 81509, 81517, 81527, 81533, 81547, 81551, 81553, 81559, 81563, 81569, + 81611, 81619, 81629, 81637, 81647, 81649, 81667, 81671, 81677, 81689, + 81701, 81703, 81707, 81727, 81737, 81749, 81761, 81769, 81773, 81799, + 81817, 81839, 81847, 81853, 81869, 81883, 81899, 81901, 81919, 81929, + 81931, 81937, 81943, 81953, 81967, 81971, 81973, 82003, 82007, 82009, + 82013, 82021, 82031, 82037, 82039, 82051, 82067, 82073, 82129, 82139, + 82141, 82153, 82163, 82171, 82183, 82189, 82193, 82207, 82217, 82219, + 82223, 82231, 82237, 82241, 82261, 82267, 82279, 82301, 82307, 82339, + 82349, 82351, 82361, 82373, 82387, 82393, 82421, 82457, 82463, 82469, + 82471, 82483, 82487, 82493, 82499, 82507, 82529, 82531, 82549, 82559, + 82561, 82567, 82571, 82591, 82601, 82609, 82613, 82619, 82633, 82651, + 82657, 82699, 82721, 82723, 82727, 82729, 82757, 82759, 82763, 82781, + 82787, 82793, 82799, 82811, 82813, 82837, 82847, 82883, 82889, 82891, + 82903, 82913, 82939, 82963, 82981, 82997, 83003, 83009, 83023, 83047, + 83059, 83063, 83071, 83077, 83089, 83093, 83101, 83117, 83137, 83177, + 83203, 83207, 83219, 83221, 83227, 83231, 83233, 83243, 83257, 83267, + 83269, 83273, 83299, 83311, 83339, 83341, 83357, 83383, 83389, 83399, + 83401, 83407, 83417, 83423, 83431, 83437, 83443, 83449, 83459, 83471, + 83477, 83497, 83537, 83557, 83561, 83563, 83579, 83591, 83597, 83609, + 83617, 83621, 83639, 83641, 83653, 83663, 83689, 83701, 83717, 83719, + 83737, 83761, 83773, 83777, 83791, 83813, 83833, 83843, 83857, 83869, + 83873, 83891, 83903, 83911, 83921, 83933, 83939, 83969, 83983, 83987, + 84011, 84017, 84047, 84053, 84059, 84061, 84067, 84089, 84121, 84127, + 84131, 84137, 84143, 84163, 84179, 84181, 84191, 84199, 84211, 84221, + 84223, 84229, 84239, 84247, 84263, 84299, 84307, 84313, 84317, 84319, + 84347, 84349, 84377, 84389, 84391, 84401, 84407, 84421, 84431, 84437, + 84443, 84449, 84457, 84463, 84467, 84481, 84499, 84503, 84509, 84521, + 84523, 84533, 84551, 84559, 84589, 84629, 84631, 84649, 84653, 84659, + 84673, 84691, 84697, 84701, 84713, 84719, 84731, 84737, 84751, 84761, + 84787, 84793, 84809, 84811, 84827, 84857, 84859, 84869, 84871, 84913, + 84919, 84947, 84961, 84967, 84977, 84979, 84991, 85009, 85021, 85027, + 85037, 85049, 85061, 85081, 85087, 85091, 85093, 85103, 85109, 85121, + 85133, 85147, 85159, 85193, 85199, 85201, 85213, 85223, 85229, 85237, + 85243, 85247, 85259, 85297, 85303, 85313, 85331, 85333, 85361, 85363, + 85369, 85381, 85411, 85427, 85429, 85439, 85447, 85451, 85453, 85469, + 85487, 85513, 85517, 85523, 85531, 85549, 85571, 85577, 85597, 85601, + 85607, 85619, 85621, 85627, 85639, 85643, 85661, 85667, 85669, 85691, + 85703, 85711, 85717, 85733, 85751, 85781, 85793, 85817, 85819, 85829, + 85831, 85837, 85843, 85847, 85853, 85889, 85903, 85909, 85931, 85933, + 85991, 85999, 86011, 86017, 86027, 86029, 86069, 86077, 86083, 86111, + 86113, 86117, 86131, 86137, 86143, 86161, 86171, 86179, 86183, 86197, + 86201, 86209, 86239, 86243, 86249, 86257, 86263, 86269, 86287, 86291, + 86293, 86297, 86311, 86323, 86341, 86351, 86353, 86357, 86369, 86371, + 86381, 86389, 86399, 86413, 86423, 86441, 86453, 86461, 86467, 86477, + 86491, 86501, 86509, 86531, 86533, 86539, 86561, 86573, 86579, 86587, + 86599, 86627, 86629, 86677, 86689, 86693, 86711, 86719, 86729, 86743, + 86753, 86767, 86771, 86783, 86813, 86837, 86843, 86851, 86857, 86861, + 86869, 86923, 86927, 86929, 86939, 86951, 86959, 86969, 86981, 86993, + 87011, 87013, 87037, 87041, 87049, 87071, 87083, 87103, 87107, 87119, + 87121, 87133, 87149, 87151, 87179, 87181, 87187, 87211, 87221, 87223, + 87251, 87253, 87257, 87277, 87281, 87293, 87299, 87313, 87317, 87323, + 87337, 87359, 87383, 87403, 87407, 87421, 87427, 87433, 87443, 87473, + 87481, 87491, 87509, 87511, 87517, 87523, 87539, 87541, 87547, 87553, + 87557, 87559, 87583, 87587, 87589, 87613, 87623, 87629, 87631, 87641, + 87643, 87649, 87671, 87679, 87683, 87691, 87697, 87701, 87719, 87721, + 87739, 87743, 87751, 87767, 87793, 87797, 87803, 87811, 87833, 87853, + 87869, 87877, 87881, 87887, 87911, 87917, 87931, 87943, 87959, 87961, + 87973, 87977, 87991, 88001, 88003, 88007, 88019, 88037, 88069, 88079, + 88093, 88117, 88129, 88169, 88177, 88211, 88223, 88237, 88241, 88259, + 88261, 88289, 88301, 88321, 88327, 88337, 88339, 88379, 88397, 88411, + 88423, 88427, 88463, 88469, 88471, 88493, 88499, 88513, 88523, 88547, + 88589, 88591, 88607, 88609, 88643, 88651, 88657, 88661, 88663, 88667, + 88681, 88721, 88729, 88741, 88747, 88771, 88789, 88793, 88799, 88801, + 88807, 88811, 88813, 88817, 88819, 88843, 88853, 88861, 88867, 88873, + 88883, 88897, 88903, 88919, 88937, 88951, 88969, 88993, 88997, 89003, + 89009, 89017, 89021, 89041, 89051, 89057, 89069, 89071, 89083, 89087, + 89101, 89107, 89113, 89119, 89123, 89137, 89153, 89189, 89203, 89209, + 89213, 89227, 89231, 89237, 89261, 89269, 89273, 89293, 89303, 89317, + 89329, 89363, 89371, 89381, 89387, 89393, 89399, 89413, 89417, 89431, + 89443, 89449, 89459, 89477, 89491, 89501, 89513, 89519, 89521, 89527, + 89533, 89561, 89563, 89567, 89591, 89597, 89599, 89603, 89611, 89627, + 89633, 89653, 89657, 89659, 89669, 89671, 89681, 89689, 89753, 89759, + 89767, 89779, 89783, 89797, 89809, 89819, 89821, 89833, 89839, 89849, + 89867, 89891, 89897, 89899, 89909, 89917, 89923, 89939, 89959, 89963, + 89977, 89983, 89989, 90001, 90007, 90011, 90017, 90019, 90023, 90031, + 90053, 90059, 90067, 90071, 90073, 90089, 90107, 90121, 90127, 90149, + 90163, 90173, 90187, 90191, 90197, 90199, 90203, 90217, 90227, 90239, + 90247, 90263, 90271, 90281, 90289, 90313, 90353, 90359, 90371, 90373, + 90379, 90397, 90401, 90403, 90407, 90437, 90439, 90469, 90473, 90481, + 90499, 90511, 90523, 90527, 90529, 90533, 90547, 90583, 90599, 90617, + 90619, 90631, 90641, 90647, 90659, 90677, 90679, 90697, 90703, 90709, + 90731, 90749, 90787, 90793, 90803, 90821, 90823, 90833, 90841, 90847, + 90863, 90887, 90901, 90907, 90911, 90917, 90931, 90947, 90971, 90977, + 90989, 90997, 91009, 91019, 91033, 91079, 91081, 91097, 91099, 91121, + 91127, 91129, 91139, 91141, 91151, 91153, 91159, 91163, 91183, 91193, + 91199, 91229, 91237, 91243, 91249, 91253, 91283, 91291, 91297, 91303, + 91309, 91331, 91367, 91369, 91373, 91381, 91387, 91393, 91397, 91411, + 91423, 91433, 91453, 91457, 91459, 91463, 91493, 91499, 91513, 91529, + 91541, 91571, 91573, 91577, 91583, 91591, 91621, 91631, 91639, 91673, + 91691, 91703, 91711, 91733, 91753, 91757, 91771, 91781, 91801, 91807, + 91811, 91813, 91823, 91837, 91841, 91867, 91873, 91909, 91921, 91939, + 91943, 91951, 91957, 91961, 91967, 91969, 91997, 92003, 92009, 92033, + 92041, 92051, 92077, 92083, 92107, 92111, 92119, 92143, 92153, 92173, + 92177, 92179, 92189, 92203, 92219, 92221, 92227, 92233, 92237, 92243, + 92251, 92269, 92297, 92311, 92317, 92333, 92347, 92353, 92357, 92363, + 92369, 92377, 92381, 92383, 92387, 92399, 92401, 92413, 92419, 92431, + 92459, 92461, 92467, 92479, 92489, 92503, 92507, 92551, 92557, 92567, + 92569, 92581, 92593, 92623, 92627, 92639, 92641, 92647, 92657, 92669, + 92671, 92681, 92683, 92693, 92699, 92707, 92717, 92723, 92737, 92753, + 92761, 92767, 92779, 92789, 92791, 92801, 92809, 92821, 92831, 92849, + 92857, 92861, 92863, 92867, 92893, 92899, 92921, 92927, 92941, 92951, + 92957, 92959, 92987, 92993, 93001, 93047, 93053, 93059, 93077, 93083, + 93089, 93097, 93103, 93113, 93131, 93133, 93139, 93151, 93169, 93179, + 93187, 93199, 93229, 93239, 93241, 93251, 93253, 93257, 93263, 93281, + 93283, 93287, 93307, 93319, 93323, 93329, 93337, 93371, 93377, 93383, + 93407, 93419, 93427, 93463, 93479, 93481, 93487, 93491, 93493, 93497, + 93503, 93523, 93529, 93553, 93557, 93559, 93563, 93581, 93601, 93607, + 93629, 93637, 93683, 93701, 93703, 93719, 93739, 93761, 93763, 93787, + 93809, 93811, 93827, 93851, 93871, 93887, 93889, 93893, 93901, 93911, + 93913, 93923, 93937, 93941, 93949, 93967, 93971, 93979, 93983, 93997, + 94007, 94009, 94033, 94049, 94057, 94063, 94079, 94099, 94109, 94111, + 94117, 94121, 94151, 94153, 94169, 94201, 94207, 94219, 94229, 94253, + 94261, 94273, 94291, 94307, 94309, 94321, 94327, 94331, 94343, 94349, + 94351, 94379, 94397, 94399, 94421, 94427, 94433, 94439, 94441, 94447, + 94463, 94477, 94483, 94513, 94529, 94531, 94541, 94543, 94547, 94559, + 94561, 94573, 94583, 94597, 94603, 94613, 94621, 94649, 94651, 94687, + 94693, 94709, 94723, 94727, 94747, 94771, 94777, 94781, 94789, 94793, + 94811, 94819, 94823, 94837, 94841, 94847, 94849, 94873, 94889, 94903, + 94907, 94933, 94949, 94951, 94961, 94993, 94999, 95003, 95009, 95021, + 95027, 95063, 95071, 95083, 95087, 95089, 95093, 95101, 95107, 95111, + 95131, 95143, 95153, 95177, 95189, 95191, 95203, 95213, 95219, 95231, + 95233, 95239, 95257, 95261, 95267, 95273, 95279, 95287, 95311, 95317, + 95327, 95339, 95369, 95383, 95393, 95401, 95413, 95419, 95429, 95441, + 95443, 95461, 95467, 95471, 95479, 95483, 95507, 95527, 95531, 95539, + 95549, 95561, 95569, 95581, 95597, 95603, 95617, 95621, 95629, 95633, + 95651, 95701, 95707, 95713, 95717, 95723, 95731, 95737, 95747, 95773, + 95783, 95789, 95791, 95801, 95803, 95813, 95819, 95857, 95869, 95873, + 95881, 95891, 95911, 95917, 95923, 95929, 95947, 95957, 95959, 95971, + 95987, 95989, 96001, 96013, 96017, 96043, 96053, 96059, 96079, 96097, + 96137, 96149, 96157, 96167, 96179, 96181, 96199, 96211, 96221, 96223, + 96233, 96259, 96263, 96269, 96281, 96289, 96293, 96323, 96329, 96331, + 96337, 96353, 96377, 96401, 96419, 96431, 96443, 96451, 96457, 96461, + 96469, 96479, 96487, 96493, 96497, 96517, 96527, 96553, 96557, 96581, + 96587, 96589, 96601, 96643, 96661, 96667, 96671, 96697, 96703, 96731, + 96737, 96739, 96749, 96757, 96763, 96769, 96779, 96787, 96797, 96799, + 96821, 96823, 96827, 96847, 96851, 96857, 96893, 96907, 96911, 96931, + 96953, 96959, 96973, 96979, 96989, 96997, 97001, 97003, 97007, 97021, + 97039, 97073, 97081, 97103, 97117, 97127, 97151, 97157, 97159, 97169, + 97171, 97177, 97187, 97213, 97231, 97241, 97259, 97283, 97301, 97303, + 97327, 97367, 97369, 97373, 97379, 97381, 97387, 97397, 97423, 97429, + 97441, 97453, 97459, 97463, 97499, 97501, 97511, 97523, 97547, 97549, + 97553, 97561, 97571, 97577, 97579, 97583, 97607, 97609, 97613, 97649, + 97651, 97673, 97687, 97711, 97729, 97771, 97777, 97787, 97789, 97813, + 97829, 97841, 97843, 97847, 97849, 97859, 97861, 97871, 97879, 97883, + 97919, 97927, 97931, 97943, 97961, 97967, 97973, 97987, 98009, 98011, + 98017, 98041, 98047, 98057, 98081, 98101, 98123, 98129, 98143, 98179, + 98207, 98213, 98221, 98227, 98251, 98257, 98269, 98297, 98299, 98317, + 98321, 98323, 98327, 98347, 98369, 98377, 98387, 98389, 98407, 98411, + 98419, 98429, 98443, 98453, 98459, 98467, 98473, 98479, 98491, 98507, + 98519, 98533, 98543, 98561, 98563, 98573, 98597, 98621, 98627, 98639, + 98641, 98663, 98669, 98689, 98711, 98713, 98717, 98729, 98731, 98737, + 98773, 98779, 98801, 98807, 98809, 98837, 98849, 98867, 98869, 98873, + 98887, 98893, 98897, 98899, 98909, 98911, 98927, 98929, 98939, 98947, + 98953, 98963, 98981, 98993, 98999, 99013, 99017, 99023, 99041, 99053, + 99079, 99083, 99089, 99103, 99109, 99119, 99131, 99133, 99137, 99139, + 99149, 99173, 99181, 99191, 99223, 99233, 99241, 99251, 99257, 99259, + 99277, 99289, 99317, 99347, 99349, 99367, 99371, 99377, 99391, 99397, + 99401, 99409, 99431, 99439, 99469, 99487, 99497, 99523, 99527, 99529, + 99551, 99559, 99563, 99571, 99577, 99581, 99607, 99611, 99623, 99643, + 99661, 99667, 99679, 99689, 99707, 99709, 99713, 99719, 99721, 99733, + 99761, 99767, 99787, 99793, 99809, 99817, 99823, 99829, 99833, 99839, + 99859, 99871, 99877, 99881, 99901, 99907, 99923, 99929, 99961, 99971, + 99989, 99991, 100003, 100019, 100043, 100049, 100057, 100069, 100103, 100109, +100129, 100151, 100153, 100169, 100183, 100189, 100193, 100207, 100213, 100237, +100267, 100271, 100279, 100291, 100297, 100313, 100333, 100343, 100357, 100361, +100363, 100379, 100391, 100393, 100403, 100411, 100417, 100447, 100459, 100469, +100483, 100493, 100501, 100511, 100517, 100519, 100523, 100537, 100547, 100549, +100559, 100591, 100609, 100613, 100621, 100649, 100669, 100673, 100693, 100699, +100703, 100733, 100741, 100747, 100769, 100787, 100799, 100801, 100811, 100823, +100829, 100847, 100853, 100907, 100913, 100927, 100931, 100937, 100943, 100957, +100981, 100987, 100999, 101009, 101021, 101027, 101051, 101063, 101081, 101089, +101107, 101111, 101113, 101117, 101119, 101141, 101149, 101159, 101161, 101173, +101183, 101197, 101203, 101207, 101209, 101221, 101267, 101273, 101279, 101281, +101287, 101293, 101323, 101333, 101341, 101347, 101359, 101363, 101377, 101383, +101399, 101411, 101419, 101429, 101449, 101467, 101477, 101483, 101489, 101501, +101503, 101513, 101527, 101531, 101533, 101537, 101561, 101573, 101581, 101599, +101603, 101611, 101627, 101641, 101653, 101663, 101681, 101693, 101701, 101719, +101723, 101737, 101741, 101747, 101749, 101771, 101789, 101797, 101807, 101833, +101837, 101839, 101863, 101869, 101873, 101879, 101891, 101917, 101921, 101929, +101939, 101957, 101963, 101977, 101987, 101999, 102001, 102013, 102019, 102023, +102031, 102043, 102059, 102061, 102071, 102077, 102079, 102101, 102103, 102107, +102121, 102139, 102149, 102161, 102181, 102191, 102197, 102199, 102203, 102217, +102229, 102233, 102241, 102251, 102253, 102259, 102293, 102299, 102301, 102317, +102329, 102337, 102359, 102367, 102397, 102407, 102409, 102433, 102437, 102451, +102461, 102481, 102497, 102499, 102503, 102523, 102533, 102539, 102547, 102551, +102559, 102563, 102587, 102593, 102607, 102611, 102643, 102647, 102653, 102667, +102673, 102677, 102679, 102701, 102761, 102763, 102769, 102793, 102797, 102811, +102829, 102841, 102859, 102871, 102877, 102881, 102911, 102913, 102929, 102931, +102953, 102967, 102983, 103001, 103007, 103043, 103049, 103067, 103069, 103079, +103087, 103091, 103093, 103099, 103123, 103141, 103171, 103177, 103183, 103217, +103231, 103237, 103289, 103291, 103307, 103319, 103333, 103349, 103357, 103387, +103391, 103393, 103399, 103409, 103421, 103423, 103451, 103457, 103471, 103483, +103511, 103529, 103549, 103553, 103561, 103567, 103573, 103577, 103583, 103591, +103613, 103619, 103643, 103651, 103657, 103669, 103681, 103687, 103699, 103703, +103723, 103769, 103787, 103801, 103811, 103813, 103837, 103841, 103843, 103867, +103889, 103903, 103913, 103919, 103951, 103963, 103967, 103969, 103979, 103981, +103991, 103993, 103997, 104003, 104009, 104021, 104033, 104047, 104053, 104059, +104087, 104089, 104107, 104113, 104119, 104123, 104147, 104149, 104161, 104173, +104179, 104183, 104207, 104231, 104233, 104239, 104243, 104281, 104287, 104297, +104309, 104311, 104323, 104327, 104347, 104369, 104381, 104383, 104393, 104399, +104417, 104459, 104471, 104473, 104479, 104491, 104513, 104527, 104537, 104543, +104549, 104551, 104561, 104579, 104593, 104597, 104623, 104639, 104651, 104659, +104677, 104681, 104683, 104693, 104701, 104707, 104711, 104717, 104723, 104729, +) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/number.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/number.pyi new file mode 100644 index 000000000..f8680bf84 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/number.pyi @@ -0,0 +1,19 @@ +from typing import List, Optional, Callable + + +def ceil_div(n: int, d: int) -> int: ... +def size (N: int) -> int: ... +def getRandomInteger(N: int, randfunc: Optional[Callable]=None) -> int: ... +def getRandomRange(a: int, b: int, randfunc: Optional[Callable]=None) -> int: ... +def getRandomNBitInteger(N: int, randfunc: Optional[Callable]=None) -> int: ... +def GCD(x: int,y: int) -> int: ... +def inverse(u: int, v: int) -> int: ... +def getPrime(N: int, randfunc: Optional[Callable]=None) -> int: ... +def getStrongPrime(N: int, e: Optional[int]=0, false_positive_prob: Optional[float]=1e-6, randfunc: Optional[Callable]=None) -> int: ... +def isPrime(N: int, false_positive_prob: Optional[float]=1e-6, randfunc: Optional[Callable]=None) -> bool: ... +def long_to_bytes(n: int, blocksize: Optional[int]=0) -> bytes: ... +def bytes_to_long(s: bytes) -> int: ... +def long2str(n: int, blocksize: Optional[int]=0) -> bytes: ... +def str2long(s: bytes) -> int: ... + +sieve_base: List[int] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/py3compat.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/py3compat.py new file mode 100644 index 000000000..087550d4b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/py3compat.py @@ -0,0 +1,185 @@ +# -*- coding: utf-8 -*- +# +# Util/py3compat.py : Compatibility code for handling Py3k / Python 2.x +# +# Written in 2010 by Thorsten Behrens +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Compatibility code for handling string/bytes changes from Python 2.x to Py3k + +In Python 2.x, strings (of type ''str'') contain binary data, including encoded +Unicode text (e.g. UTF-8). The separate type ''unicode'' holds Unicode text. +Unicode literals are specified via the u'...' prefix. Indexing or slicing +either type always produces a string of the same type as the original. +Data read from a file is always of '''str'' type. + +In Python 3.x, strings (type ''str'') may only contain Unicode text. The u'...' +prefix and the ''unicode'' type are now redundant. A new type (called +''bytes'') has to be used for binary data (including any particular +''encoding'' of a string). The b'...' prefix allows one to specify a binary +literal. Indexing or slicing a string produces another string. Slicing a byte +string produces another byte string, but the indexing operation produces an +integer. Data read from a file is of '''str'' type if the file was opened in +text mode, or of ''bytes'' type otherwise. + +Since PyCrypto aims at supporting both Python 2.x and 3.x, the following helper +functions are used to keep the rest of the library as independent as possible +from the actual Python version. + +In general, the code should always deal with binary strings, and use integers +instead of 1-byte character strings. + +b(s) + Take a text string literal (with no prefix or with u'...' prefix) and + make a byte string. +bchr(c) + Take an integer and make a 1-character byte string. +bord(c) + Take the result of indexing on a byte string and make an integer. +tobytes(s) + Take a text string, a byte string, or a sequence of character taken from + a byte string and make a byte string. +""" + +import sys +import abc + + +if sys.version_info[0] == 2: + def b(s): + return s + def bchr(s): + return chr(s) + def bstr(s): + return str(s) + def bord(s): + return ord(s) + def tobytes(s, encoding="latin-1"): + if isinstance(s, unicode): + return s.encode(encoding) + elif isinstance(s, str): + return s + elif isinstance(s, bytearray): + return bytes(s) + elif isinstance(s, memoryview): + return s.tobytes() + else: + return ''.join(s) + def tostr(bs): + return bs + def byte_string(s): + return isinstance(s, str) + + # In Python 2, a memoryview does not support concatenation + def concat_buffers(a, b): + if isinstance(a, memoryview): + a = a.tobytes() + if isinstance(b, memoryview): + b = b.tobytes() + return a + b + + from StringIO import StringIO + BytesIO = StringIO + + from sys import maxint + + iter_range = xrange + + def is_native_int(x): + return isinstance(x, (int, long)) + + def is_string(x): + return isinstance(x, basestring) + + def is_bytes(x): + return isinstance(x, str) or \ + isinstance(x, bytearray) or \ + isinstance(x, memoryview) + + ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()}) + + FileNotFoundError = IOError + +else: + def b(s): + return s.encode("latin-1") # utf-8 would cause some side-effects we don't want + def bchr(s): + return bytes([s]) + def bstr(s): + if isinstance(s,str): + return bytes(s,"latin-1") + else: + return bytes(s) + def bord(s): + return s + def tobytes(s, encoding="latin-1"): + if isinstance(s, bytes): + return s + elif isinstance(s, bytearray): + return bytes(s) + elif isinstance(s,str): + return s.encode(encoding) + elif isinstance(s, memoryview): + return s.tobytes() + else: + return bytes([s]) + def tostr(bs): + return bs.decode("latin-1") + def byte_string(s): + return isinstance(s, bytes) + + def concat_buffers(a, b): + return a + b + + from io import BytesIO + from io import StringIO + from sys import maxsize as maxint + + iter_range = range + + def is_native_int(x): + return isinstance(x, int) + + def is_string(x): + return isinstance(x, str) + + def is_bytes(x): + return isinstance(x, bytes) or \ + isinstance(x, bytearray) or \ + isinstance(x, memoryview) + + from abc import ABC + + FileNotFoundError = FileNotFoundError + + +def _copy_bytes(start, end, seq): + """Return an immutable copy of a sequence (byte string, byte array, memoryview) + in a certain interval [start:seq]""" + + if isinstance(seq, memoryview): + return seq[start:end].tobytes() + elif isinstance(seq, bytearray): + return bytes(seq[start:end]) + else: + return seq[start:end] + +del sys +del abc diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/py3compat.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/py3compat.pyi new file mode 100644 index 000000000..74e04a2bf --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/py3compat.pyi @@ -0,0 +1,33 @@ +from typing import Union, Any, Optional, IO + +Buffer = Union[bytes, bytearray, memoryview] + +import sys + +def b(s: str) -> bytes: ... +def bchr(s: int) -> bytes: ... +def bord(s: bytes) -> int: ... +def tobytes(s: Union[bytes, str]) -> bytes: ... +def tostr(b: bytes) -> str: ... +def bytestring(x: Any) -> bool: ... + +def is_native_int(s: Any) -> bool: ... +def is_string(x: Any) -> bool: ... +def is_bytes(x: Any) -> bool: ... + +def BytesIO(b: bytes) -> IO[bytes]: ... +def StringIO(s: str) -> IO[str]: ... + +if sys.version_info[0] == 2: + from sys import maxint + iter_range = xrange + +else: + from sys import maxsize as maxint + iter_range = range + +class FileNotFoundError: + def __init__(self, err: int, msg: str, filename: str) -> None: + pass + +def _copy_bytes(start: Optional[int], end: Optional[int], seq: Buffer) -> bytes: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/strxor.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/strxor.py new file mode 100644 index 000000000..362db6e68 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/strxor.py @@ -0,0 +1,146 @@ +# =================================================================== +# +# Copyright (c) 2014, Legrandin +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# =================================================================== + +from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, c_size_t, + create_string_buffer, get_raw_buffer, + c_uint8_ptr, is_writeable_buffer) + +_raw_strxor = load_pycryptodome_raw_lib( + "Crypto.Util._strxor", + """ + void strxor(const uint8_t *in1, + const uint8_t *in2, + uint8_t *out, size_t len); + void strxor_c(const uint8_t *in, + uint8_t c, + uint8_t *out, + size_t len); + """) + + +def strxor(term1, term2, output=None): + """From two byte strings of equal length, + create a third one which is the byte-by-byte XOR of the two. + + Args: + term1 (bytes/bytearray/memoryview): + The first byte string to XOR. + term2 (bytes/bytearray/memoryview): + The second byte string to XOR. + output (bytearray/memoryview): + The location where the result will be written to. + It must have the same length as ``term1`` and ``term2``. + If ``None``, the result is returned. + :Return: + If ``output`` is ``None``, a new byte string with the result. + Otherwise ``None``. + + .. note:: + ``term1`` and ``term2`` must have the same length. + """ + + if len(term1) != len(term2): + raise ValueError("Only byte strings of equal length can be xored") + + if output is None: + result = create_string_buffer(len(term1)) + else: + # Note: output may overlap with either input + result = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(term1) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(term1)) + + _raw_strxor.strxor(c_uint8_ptr(term1), + c_uint8_ptr(term2), + c_uint8_ptr(result), + c_size_t(len(term1))) + + if output is None: + return get_raw_buffer(result) + else: + return None + + +def strxor_c(term, c, output=None): + """From a byte string, create a second one of equal length + where each byte is XOR-red with the same value. + + Args: + term(bytes/bytearray/memoryview): + The byte string to XOR. + c (int): + Every byte in the string will be XOR-ed with this value. + It must be between 0 and 255 (included). + output (None or bytearray/memoryview): + The location where the result will be written to. + It must have the same length as ``term``. + If ``None``, the result is returned. + + Return: + If ``output`` is ``None``, a new ``bytes`` string with the result. + Otherwise ``None``. + """ + + if not 0 <= c < 256: + raise ValueError("c must be in range(256)") + + if output is None: + result = create_string_buffer(len(term)) + else: + # Note: output may overlap with either input + result = output + + if not is_writeable_buffer(output): + raise TypeError("output must be a bytearray or a writeable memoryview") + + if len(term) != len(output): + raise ValueError("output must have the same length as the input" + " (%d bytes)" % len(term)) + + _raw_strxor.strxor_c(c_uint8_ptr(term), + c, + c_uint8_ptr(result), + c_size_t(len(term)) + ) + + if output is None: + return get_raw_buffer(result) + else: + return None + + +def _strxor_direct(term1, term2, result): + """Very fast XOR - check conditions!""" + _raw_strxor.strxor(term1, term2, result, c_size_t(len(term1))) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/strxor.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/strxor.pyi new file mode 100644 index 000000000..ca896f3b1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/Util/strxor.pyi @@ -0,0 +1,6 @@ +from typing import Union, Optional + +Buffer = Union[bytes, bytearray, memoryview] + +def strxor(term1: bytes, term2: bytes, output: Optional[Buffer]=...) -> bytes: ... +def strxor_c(term: bytes, c: int, output: Optional[Buffer]=...) -> bytes: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/__init__.py new file mode 100644 index 000000000..c33481e26 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/__init__.py @@ -0,0 +1,6 @@ +__all__ = ['Cipher', 'Hash', 'Protocol', 'PublicKey', 'Util', 'Signature', + 'IO', 'Math'] + +version_info = (3, 20, '0') + +__version__ = ".".join([str(x) for x in version_info]) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/__init__.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/__init__.pyi new file mode 100644 index 000000000..bc73446dc --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/__init__.pyi @@ -0,0 +1,4 @@ +from typing import Tuple, Union + +version_info : Tuple[int, int, Union[int, str]] +__version__ : str diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/py.typed b/.direnv/python-3.8.20/lib/python3.8/site-packages/Crypto/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/GitPython-3.1.43.dist-info/AUTHORS b/.direnv/python-3.8.20/lib/python3.8/site-packages/GitPython-3.1.43.dist-info/AUTHORS new file mode 100644 index 000000000..9311b3962 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/GitPython-3.1.43.dist-info/AUTHORS @@ -0,0 +1,58 @@ +GitPython was originally written by Michael Trier. +GitPython 0.2 was partially (re)written by Sebastian Thiel, based on 0.1.6 and git-dulwich. + +Contributors are: + +-Michael Trier +-Alan Briolat +-Florian Apolloner +-David Aguilar +-Jelmer Vernooij +-Steve Frécinaux +-Kai Lautaportti +-Paul Sowden +-Sebastian Thiel +-Jonathan Chu +-Vincent Driessen +-Phil Elson +-Bernard `Guyzmo` Pratz +-Timothy B. Hartman +-Konstantin Popov +-Peter Jones +-Anson Mansfield +-Ken Odegard +-Alexis Horgix Chotard +-Piotr Babij +-Mikuláš Poul +-Charles Bouchard-Légaré +-Yaroslav Halchenko +-Tim Swast +-William Luc Ritchie +-David Host +-A. Jesse Jiryu Davis +-Steven Whitman +-Stefan Stancu +-César Izurieta +-Arthur Milchior +-Anil Khatri +-JJ Graham +-Ben Thayer +-Dries Kennes +-Pratik Anurag +-Harmon +-Liam Beguin +-Ram Rachum +-Alba Mendez +-Robert Westman +-Hugo van Kemenade +-Hiroki Tokunaga +-Julien Mauroy +-Patrick Gerard +-Luke Twist +-Joseph Hale +-Santos Gallegos +-Wenhan Zhu +-Eliah Kagan +-Ethan Lin + +Portions derived from other open source works and are clearly marked. diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/GitPython-3.1.43.dist-info/INSTALLER b/.direnv/python-3.8.20/lib/python3.8/site-packages/GitPython-3.1.43.dist-info/INSTALLER new file mode 100644 index 000000000..ce3133dc1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/GitPython-3.1.43.dist-info/INSTALLER @@ -0,0 +1 @@ +Poetry 2.2.1 \ No newline at end of file diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/GitPython-3.1.43.dist-info/LICENSE b/.direnv/python-3.8.20/lib/python3.8/site-packages/GitPython-3.1.43.dist-info/LICENSE new file mode 100644 index 000000000..ba8a219fe --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/GitPython-3.1.43.dist-info/LICENSE @@ -0,0 +1,29 @@ +Copyright (C) 2008, 2009 Michael Trier and contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +* Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer in the +documentation and/or other materials provided with the distribution. + +* Neither the name of the GitPython project nor the names of +its contributors may be used to endorse or promote products derived +from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/GitPython-3.1.43.dist-info/METADATA b/.direnv/python-3.8.20/lib/python3.8/site-packages/GitPython-3.1.43.dist-info/METADATA new file mode 100644 index 000000000..be6281005 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/GitPython-3.1.43.dist-info/METADATA @@ -0,0 +1,297 @@ +Metadata-Version: 2.1 +Name: GitPython +Version: 3.1.43 +Summary: GitPython is a Python library used to interact with Git repositories +Home-page: https://github.com/gitpython-developers/GitPython +Author: Sebastian Thiel, Michael Trier +Author-email: byronimo@gmail.com, mtrier@gmail.com +License: BSD-3-Clause +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Operating System :: POSIX +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Typing :: Typed +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +License-File: LICENSE +License-File: AUTHORS +Requires-Dist: gitdb <5,>=4.0.1 +Requires-Dist: typing-extensions >=3.7.4.3 ; python_version < "3.8" +Provides-Extra: doc +Requires-Dist: sphinx ==4.3.2 ; extra == 'doc' +Requires-Dist: sphinx-rtd-theme ; extra == 'doc' +Requires-Dist: sphinxcontrib-applehelp <=1.0.4,>=1.0.2 ; extra == 'doc' +Requires-Dist: sphinxcontrib-devhelp ==1.0.2 ; extra == 'doc' +Requires-Dist: sphinxcontrib-htmlhelp <=2.0.1,>=2.0.0 ; extra == 'doc' +Requires-Dist: sphinxcontrib-qthelp ==1.0.3 ; extra == 'doc' +Requires-Dist: sphinxcontrib-serializinghtml ==1.1.5 ; extra == 'doc' +Requires-Dist: sphinx-autodoc-typehints ; extra == 'doc' +Provides-Extra: test +Requires-Dist: coverage[toml] ; extra == 'test' +Requires-Dist: ddt !=1.4.3,>=1.1.1 ; extra == 'test' +Requires-Dist: mypy ; extra == 'test' +Requires-Dist: pre-commit ; extra == 'test' +Requires-Dist: pytest >=7.3.1 ; extra == 'test' +Requires-Dist: pytest-cov ; extra == 'test' +Requires-Dist: pytest-instafail ; extra == 'test' +Requires-Dist: pytest-mock ; extra == 'test' +Requires-Dist: pytest-sugar ; extra == 'test' +Requires-Dist: typing-extensions ; (python_version < "3.11") and extra == 'test' +Requires-Dist: mock ; (python_version < "3.8") and extra == 'test' + +![Python package](https://github.com/gitpython-developers/GitPython/workflows/Python%20package/badge.svg) +[![Documentation Status](https://readthedocs.org/projects/gitpython/badge/?version=stable)](https://readthedocs.org/projects/gitpython/?badge=stable) +[![Packaging status](https://repology.org/badge/tiny-repos/python:gitpython.svg)](https://repology.org/metapackage/python:gitpython/versions) + +## [Gitoxide](https://github.com/Byron/gitoxide): A peek into the future… + +I started working on GitPython in 2009, back in the days when Python was 'my thing' and I had great plans with it. +Of course, back in the days, I didn't really know what I was doing and this shows in many places. Somewhat similar to +Python this happens to be 'good enough', but at the same time is deeply flawed and broken beyond repair. + +By now, GitPython is widely used and I am sure there is a good reason for that, it's something to be proud of and happy about. +The community is maintaining the software and is keeping it relevant for which I am absolutely grateful. For the time to come I am happy to continue maintaining GitPython, remaining hopeful that one day it won't be needed anymore. + +More than 15 years after my first meeting with 'git' I am still in excited about it, and am happy to finally have the tools and +probably the skills to scratch that itch of mine: implement `git` in a way that makes tool creation a piece of cake for most. + +If you like the idea and want to learn more, please head over to [gitoxide](https://github.com/Byron/gitoxide), an +implementation of 'git' in [Rust](https://www.rust-lang.org). + +*(Please note that `gitoxide` is not currently available for use in Python, and that Rust is required.)* + +## GitPython + +GitPython is a python library used to interact with git repositories, high-level like git-porcelain, +or low-level like git-plumbing. + +It provides abstractions of git objects for easy access of repository data often backed by calling the `git` +command-line program. + +### DEVELOPMENT STATUS + +This project is in **maintenance mode**, which means that + +- …there will be no feature development, unless these are contributed +- …there will be no bug fixes, unless they are relevant to the safety of users, or contributed +- …issues will be responded to with waiting times of up to a month + +The project is open to contributions of all kinds, as well as new maintainers. + +### REQUIREMENTS + +GitPython needs the `git` executable to be installed on the system and available in your +`PATH` for most operations. If it is not in your `PATH`, you can help GitPython find it +by setting the `GIT_PYTHON_GIT_EXECUTABLE=` environment variable. + +- Git (1.7.x or newer) +- Python >= 3.7 + +The list of dependencies are listed in `./requirements.txt` and `./test-requirements.txt`. +The installer takes care of installing them for you. + +### INSTALL + +GitPython and its required package dependencies can be installed in any of the following ways, all of which should typically be done in a [virtual environment](https://docs.python.org/3/tutorial/venv.html). + +#### From PyPI + +To obtain and install a copy [from PyPI](https://pypi.org/project/GitPython/), run: + +```sh +pip install GitPython +``` + +(A distribution package can also be downloaded for manual installation at [the PyPI page](https://pypi.org/project/GitPython/).) + +#### From downloaded source code + +If you have downloaded the source code, run this from inside the unpacked `GitPython` directory: + +```sh +pip install . +``` + +#### By cloning the source code repository + +To clone the [the GitHub repository](https://github.com/gitpython-developers/GitPython) from source to work on the code, you can do it like so: + +```sh +git clone https://github.com/gitpython-developers/GitPython +cd GitPython +./init-tests-after-clone.sh +``` + +On Windows, `./init-tests-after-clone.sh` can be run in a Git Bash shell. + +If you are cloning [your own fork](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/about-forks), then replace the above `git clone` command with one that gives the URL of your fork. Or use this [`gh`](https://cli.github.com/) command (assuming you have `gh` and your fork is called `GitPython`): + +```sh +gh repo clone GitPython +``` + +Having cloned the repo, create and activate your [virtual environment](https://docs.python.org/3/tutorial/venv.html). + +Then make an [editable install](https://pip.pypa.io/en/stable/topics/local-project-installs/#editable-installs): + +```sh +pip install -e ".[test]" +``` + +In the less common case that you do not want to install test dependencies, `pip install -e .` can be used instead. + +#### With editable *dependencies* (not preferred, and rarely needed) + +In rare cases, you may want to work on GitPython and one or both of its [gitdb](https://github.com/gitpython-developers/gitdb) and [smmap](https://github.com/gitpython-developers/smmap) dependencies at the same time, with changes in your local working copy of gitdb or smmap immediatley reflected in the behavior of your local working copy of GitPython. This can be done by making editable installations of those dependencies in the same virtual environment where you install GitPython. + +If you want to do that *and* you want the versions in GitPython's git submodules to be used, then pass `-e git/ext/gitdb` and/or `-e git/ext/gitdb/gitdb/ext/smmap` to `pip install`. This can be done in any order, and in separate `pip install` commands or the same one, so long as `-e` appears before *each* path. For example, you can install GitPython, gitdb, and smmap editably in the currently active virtual environment this way: + +```sh +pip install -e ".[test]" -e git/ext/gitdb -e git/ext/gitdb/gitdb/ext/smmap +``` + +The submodules must have been cloned for that to work, but that will already be the case if you have run `./init-tests-after-clone.sh`. You can use `pip list` to check which packages are installed editably and which are installed normally. + +To reiterate, this approach should only rarely be used. For most development it is preferable to allow the gitdb and smmap dependencices to be retrieved automatically from PyPI in their latest stable packaged versions. + +### Limitations + +#### Leakage of System Resources + +GitPython is not suited for long-running processes (like daemons) as it tends to +leak system resources. It was written in a time where destructors (as implemented +in the `__del__` method) still ran deterministically. + +In case you still want to use it in such a context, you will want to search the +codebase for `__del__` implementations and call these yourself when you see fit. + +Another way assure proper cleanup of resources is to factor out GitPython into a +separate process which can be dropped periodically. + +#### Windows support + +See [Issue #525](https://github.com/gitpython-developers/GitPython/issues/525). + +### RUNNING TESTS + +_Important_: Right after cloning this repository, please be sure to have executed +the `./init-tests-after-clone.sh` script in the repository root. Otherwise +you will encounter test failures. + +#### Install test dependencies + +Ensure testing libraries are installed. This is taken care of already if you installed with: + +```sh +pip install -e ".[test]" +``` + +If you had installed with a command like `pip install -e .` instead, you can still run +the above command to add the testing dependencies. + +#### Test commands + +To test, run: + +```sh +pytest +``` + +To lint, and apply some linting fixes as well as automatic code formatting, run: + +```sh +pre-commit run --all-files +``` + +This includes the linting and autoformatting done by Ruff, as well as some other checks. + +To typecheck, run: + +```sh +mypy +``` + +#### CI (and tox) + +Style and formatting checks, and running tests on all the different supported Python versions, will be performed: + +- Upon submitting a pull request. +- On each push, *if* you have a fork with GitHub Actions enabled. +- Locally, if you run [`tox`](https://tox.wiki/) (this skips any Python versions you don't have installed). + +#### Configuration files + +Specific tools are all configured in the `./pyproject.toml` file: + +- `pytest` (test runner) +- `coverage.py` (code coverage) +- `ruff` (linter and formatter) +- `mypy` (type checker) + +Orchestration tools: + +- Configuration for `pre-commit` is in the `./.pre-commit-config.yaml` file. +- Configuration for `tox` is in `./tox.ini`. +- Configuration for GitHub Actions (CI) is in files inside `./.github/workflows/`. + +### Contributions + +Please have a look at the [contributions file][contributing]. + +### INFRASTRUCTURE + +- [User Documentation](http://gitpython.readthedocs.org) +- [Questions and Answers](http://stackexchange.com/filters/167317/gitpython) +- Please post on Stack Overflow and use the `gitpython` tag +- [Issue Tracker](https://github.com/gitpython-developers/GitPython/issues) + - Post reproducible bugs and feature requests as a new issue. + Please be sure to provide the following information if posting bugs: + - GitPython version (e.g. `import git; git.__version__`) + - Python version (e.g. `python --version`) + - The encountered stack-trace, if applicable + - Enough information to allow reproducing the issue + +### How to make a new release + +1. Update/verify the **version** in the `VERSION` file. +2. Update/verify that the `doc/source/changes.rst` changelog file was updated. It should include a link to the forthcoming release page: `https://github.com/gitpython-developers/GitPython/releases/tag/` +3. Commit everything. +4. Run `git tag -s ` to tag the version in Git. +5. _Optionally_ create and activate a [virtual environment](https://packaging.python.org/en/latest/guides/installing-using-pip-and-virtual-environments/#creating-a-virtual-environment). (Then the next step can install `build` and `twine`.) +6. Run `make release`. +7. Go to [GitHub Releases](https://github.com/gitpython-developers/GitPython/releases) and publish a new one with the recently pushed tag. Generate the changelog. + +### Projects using GitPython + +- [PyDriller](https://github.com/ishepard/pydriller) +- [Kivy Designer](https://github.com/kivy/kivy-designer) +- [Prowl](https://github.com/nettitude/Prowl) +- [Python Taint](https://github.com/python-security/pyt) +- [Buster](https://github.com/axitkhurana/buster) +- [git-ftp](https://github.com/ezyang/git-ftp) +- [Git-Pandas](https://github.com/wdm0006/git-pandas) +- [PyGitUp](https://github.com/msiemens/PyGitUp) +- [PyJFuzz](https://github.com/mseclab/PyJFuzz) +- [Loki](https://github.com/Neo23x0/Loki) +- [Omniwallet](https://github.com/OmniLayer/omniwallet) +- [GitViper](https://github.com/BeayemX/GitViper) +- [Git Gud](https://github.com/bthayer2365/git-gud) + +### LICENSE + +[3-Clause BSD License](https://opensource.org/license/bsd-3-clause/), also known as the New BSD License. See the [LICENSE file][license]. + +[contributing]: https://github.com/gitpython-developers/GitPython/blob/main/CONTRIBUTING.md +[license]: https://github.com/gitpython-developers/GitPython/blob/main/LICENSE diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/GitPython-3.1.43.dist-info/RECORD b/.direnv/python-3.8.20/lib/python3.8/site-packages/GitPython-3.1.43.dist-info/RECORD new file mode 100644 index 000000000..fbfa91927 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/GitPython-3.1.43.dist-info/RECORD @@ -0,0 +1,45 @@ +git/__init__.py,sha256=w6fnS0QmwTfEFUSL6rfnpP0lUId2goSguZFOvVX3N3U,8899 +git/cmd.py,sha256=qd-gIHSk4mfsYjd9YA08cPyO8TMxaibTXAbFnHK71uc,67659 +git/compat.py,sha256=y1E6y6O2q5r8clSlr8ZNmuIWG9nmHuehQEsVsmBffs8,4526 +git/config.py,sha256=Ald8Xc-G9Shcgx3QCISyXTkL4a6nbc3qll-xUw4YdyY,34924 +git/db.py,sha256=vIW9uWSbqu99zbuU2ZDmOhVOv1UPTmxrnqiCtRHCfjE,2368 +git/diff.py,sha256=IE5aeHL7aP9yxBluYj06IX8nZjoJ_TOM3gG31-Evf_8,27058 +git/exc.py,sha256=Gc7g1pHpn8OmTse30NHmJVsBJ2CYH8LxaR8y8UA3lIM,7119 +git/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +git/remote.py,sha256=IHQ3BvXgoIN1EvHlyH3vrSaQoDkLOE6nooSC0w183sU,46561 +git/types.py,sha256=xCwpp2Y01lhS0MapHhj04m0P_x34kwSD1Gsou_ZPWj8,10251 +git/util.py,sha256=1E883mnPAFLyFk7ivwnEremsp-uJOTc3ks_QypyLung,43651 +git/index/__init__.py,sha256=i-Nqb8Lufp9aFbmxpQBORmmQnjEVVM1Pn58fsQkyGgQ,406 +git/index/base.py,sha256=A4q4cN_Ifxi8CsAR-7h4KsQ2d3JazBNFZ1ltbAKttgs,60734 +git/index/fun.py,sha256=37cA3DBC9vpAnSVu5TGA072SnoF5XZOkOukExwlejHs,16736 +git/index/typ.py,sha256=uuKNwitUw83FhVaLSwo4pY7PHDQudtZTLJrLGym4jcI,6570 +git/index/util.py,sha256=fULi7GPG-MvprKrRCD5c15GNdzku_1E38We0d97WB3A,3659 +git/objects/__init__.py,sha256=O6ZL_olX7e5-8iIbKviRPkVSJxN37WA-EC0q9d48U5Y,637 +git/objects/base.py,sha256=0dqNkSRVH0mk0-7ZKIkGBK7iNYrzLTVxwQFUd6CagsE,10277 +git/objects/blob.py,sha256=zwwq0KfOMYeP5J2tW5CQatoLyeqFRlfkxP1Vwx1h07s,1215 +git/objects/commit.py,sha256=vLZNl1I9zp17Rpge7J66CvsryirEs90jyPTQzoP0JJs,30208 +git/objects/fun.py,sha256=B4jCqhAjm6Hl79GK58FPzW1H9K6Wc7Tx0rssyWmAcEE,8935 +git/objects/tag.py,sha256=gAx8i-DEwy_Z3R2zLkvetYRV8A56BCcTr3iLuTUTfEM,4467 +git/objects/tree.py,sha256=jJH888SHiP4dGzE-ra1yenQOyya_0C_MkHr06c1gHpM,13849 +git/objects/util.py,sha256=Ml2eqZPKO4y9Hc2vWbXJgpsK3nkN3KGMzbn8AlzLyYQ,23834 +git/objects/submodule/__init__.py,sha256=6xySp767LVz3UylWgUalntS_nGXRuVzXxDuFAv_Wc2c,303 +git/objects/submodule/base.py,sha256=MQ-2xV8JznGwy2hLQv1aeQNgAkhBhgc5tdtClFL3DmE,63901 +git/objects/submodule/root.py,sha256=5eTtYNHasqdPq6q0oDCPr7IaO6uAHL3b4DxMoiO2LhE,20246 +git/objects/submodule/util.py,sha256=sQqAYaiSJdFkZa9NlAuK_wTsMNiS-kkQnQjvIoJtc_o,3509 +git/refs/__init__.py,sha256=DWlJNnsx-4jM_E-VycbP-FZUdn6iWhjnH_uZ_pZXBro,509 +git/refs/head.py,sha256=GAZpD5EfqSciDXPtgjHY8ZbBixKExJRhojUB-HrrJPg,10491 +git/refs/log.py,sha256=kXiuAgTo1DIuM_BfbDUk9gQ0YO-mutIMVdHv1_ES90o,12493 +git/refs/reference.py,sha256=l6mhF4YLSEwtjz6b9PpOQH-fkng7EYWMaJhkjn-2jXA,5630 +git/refs/remote.py,sha256=WwqV9T7BbYf3F_WZNUQivu9xktIIKGklCjDpwQrhD-A,2806 +git/refs/symbolic.py,sha256=c8zOwaqzcg-J-rGrpuWdvh8zwMvSUqAHghd4vJoYG_s,34552 +git/refs/tag.py,sha256=kgzV2vhpL4FD2TqHb0BJuMRAHgAvJF-TcoyWlaB-djQ,5010 +git/repo/__init__.py,sha256=CILSVH36fX_WxVFSjD9o1WF5LgsNedPiJvSngKZqfVU,210 +git/repo/base.py,sha256=mitfJ8u99CsMpDd7_VRyx-SF8omu2tpf3lqzSaQkKoQ,59353 +git/repo/fun.py,sha256=tEsClpmbOrKMSNIdncOB_6JdikrL1-AfkOFd7xMpD8k,13582 +GitPython-3.1.43.dist-info/AUTHORS,sha256=h1TlPKfp05GA1eKQ15Yl4biR0C0FgivuGSeRA6Q1dz0,2286 +GitPython-3.1.43.dist-info/LICENSE,sha256=hvyUwyGpr7wRUUcTURuv3tIl8lEA3MD3NQ6CvCMbi-s,1503 +GitPython-3.1.43.dist-info/METADATA,sha256=sAh3r1BMVw5_olGgDmpMS69zBpVr7UEOeRivNHKznfU,13376 +GitPython-3.1.43.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92 +GitPython-3.1.43.dist-info/top_level.txt,sha256=0hzDuIp8obv624V3GmbqsagBWkk8ohtGU-Bc1PmTT0o,4 +GitPython-3.1.43.dist-info/INSTALLER,sha256=sz0Tnp99msIbbLB51q7U9nA6AvRKcsOeUKhrHH0Ulg4,12 +GitPython-3.1.43.dist-info/RECORD,, diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/GitPython-3.1.43.dist-info/WHEEL b/.direnv/python-3.8.20/lib/python3.8/site-packages/GitPython-3.1.43.dist-info/WHEEL new file mode 100644 index 000000000..bab98d675 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/GitPython-3.1.43.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.43.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/GitPython-3.1.43.dist-info/top_level.txt b/.direnv/python-3.8.20/lib/python3.8/site-packages/GitPython-3.1.43.dist-info/top_level.txt new file mode 100644 index 000000000..5664e303b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/GitPython-3.1.43.dist-info/top_level.txt @@ -0,0 +1 @@ +git diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/MarkupSafe-2.1.5.dist-info/INSTALLER b/.direnv/python-3.8.20/lib/python3.8/site-packages/MarkupSafe-2.1.5.dist-info/INSTALLER new file mode 100644 index 000000000..ce3133dc1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/MarkupSafe-2.1.5.dist-info/INSTALLER @@ -0,0 +1 @@ +Poetry 2.2.1 \ No newline at end of file diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/MarkupSafe-2.1.5.dist-info/LICENSE.rst b/.direnv/python-3.8.20/lib/python3.8/site-packages/MarkupSafe-2.1.5.dist-info/LICENSE.rst new file mode 100644 index 000000000..9d227a0cc --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/MarkupSafe-2.1.5.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2010 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/MarkupSafe-2.1.5.dist-info/METADATA b/.direnv/python-3.8.20/lib/python3.8/site-packages/MarkupSafe-2.1.5.dist-info/METADATA new file mode 100644 index 000000000..dfe37d52d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/MarkupSafe-2.1.5.dist-info/METADATA @@ -0,0 +1,93 @@ +Metadata-Version: 2.1 +Name: MarkupSafe +Version: 2.1.5 +Summary: Safely add untrusted strings to HTML/XML markup. +Home-page: https://palletsprojects.com/p/markupsafe/ +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://markupsafe.palletsprojects.com/ +Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/markupsafe/ +Project-URL: Issue Tracker, https://github.com/pallets/markupsafe/issues/ +Project-URL: Chat, https://discord.gg/pallets +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Text Processing :: Markup :: HTML +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst + +MarkupSafe +========== + +MarkupSafe implements a text object that escapes characters so it is +safe to use in HTML and XML. Characters that have special meanings are +replaced so that they display as the actual characters. This mitigates +injection attacks, meaning untrusted user input can safely be displayed +on a page. + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + pip install -U MarkupSafe + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +Examples +-------- + +.. code-block:: pycon + + >>> from markupsafe import Markup, escape + + >>> # escape replaces special characters and wraps in Markup + >>> escape("") + Markup('<script>alert(document.cookie);</script>') + + >>> # wrap in Markup to mark text "safe" and prevent escaping + >>> Markup("Hello") + Markup('hello') + + >>> escape(Markup("Hello")) + Markup('hello') + + >>> # Markup is a str subclass + >>> # methods and operators escape their arguments + >>> template = Markup("Hello {name}") + >>> template.format(name='"World"') + Markup('Hello "World"') + + +Donate +------ + +The Pallets organization develops and supports MarkupSafe and other +popular packages. In order to grow the community of contributors and +users, and allow the maintainers to devote more time to the projects, +`please donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://markupsafe.palletsprojects.com/ +- Changes: https://markupsafe.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/MarkupSafe/ +- Source Code: https://github.com/pallets/markupsafe/ +- Issue Tracker: https://github.com/pallets/markupsafe/issues/ +- Chat: https://discord.gg/pallets diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/MarkupSafe-2.1.5.dist-info/RECORD b/.direnv/python-3.8.20/lib/python3.8/site-packages/MarkupSafe-2.1.5.dist-info/RECORD new file mode 100644 index 000000000..ebec50a44 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/MarkupSafe-2.1.5.dist-info/RECORD @@ -0,0 +1,12 @@ +markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +markupsafe/_speedups.pyi,sha256=vfMCsOgbAXRNLUXkyuyonG8uEWKYU4PDqNuMaDELAYw,229 +markupsafe/_native.py,sha256=GR86Qvo_GcgKmKreA1WmYN9ud17OFwkww8E-fiW-57s,1713 +markupsafe/_speedups.cpython-38-x86_64-linux-gnu.so,sha256=dR4WFvrcpISjeacwjKYJrqkz-0j0ZXF4oAHtTK0cKpw,45024 +markupsafe/__init__.py,sha256=r7VOTjUq7EMQ4v3p4R1LoVOGJg6ysfYRncLr34laRBs,10958 +markupsafe/_speedups.c,sha256=X2XvQVtIdcK4Usz70BvkzoOfjTCmQlDkkjYSn-swE0g,7083 +MarkupSafe-2.1.5.dist-info/WHEEL,sha256=zTDqV7OR0em6fvysya0bwC-51Mb7EQ0x5PBJySRF2iQ,148 +MarkupSafe-2.1.5.dist-info/METADATA,sha256=2dRDPam6OZLfpX0wg1JN5P3u9arqACxVSfdGmsJU7o8,3003 +MarkupSafe-2.1.5.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 +MarkupSafe-2.1.5.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11 +MarkupSafe-2.1.5.dist-info/INSTALLER,sha256=sz0Tnp99msIbbLB51q7U9nA6AvRKcsOeUKhrHH0Ulg4,12 +MarkupSafe-2.1.5.dist-info/RECORD,, diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/MarkupSafe-2.1.5.dist-info/WHEEL b/.direnv/python-3.8.20/lib/python3.8/site-packages/MarkupSafe-2.1.5.dist-info/WHEEL new file mode 100644 index 000000000..c825336ae --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/MarkupSafe-2.1.5.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.42.0) +Root-Is-Purelib: false +Tag: cp38-cp38-manylinux_2_17_x86_64 +Tag: cp38-cp38-manylinux2014_x86_64 + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/MarkupSafe-2.1.5.dist-info/top_level.txt b/.direnv/python-3.8.20/lib/python3.8/site-packages/MarkupSafe-2.1.5.dist-info/top_level.txt new file mode 100644 index 000000000..75bf72925 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/MarkupSafe-2.1.5.dist-info/top_level.txt @@ -0,0 +1 @@ +markupsafe diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/PyJWT-2.8.0.dist-info/AUTHORS.rst b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyJWT-2.8.0.dist-info/AUTHORS.rst new file mode 100644 index 000000000..88e2b6ad7 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyJWT-2.8.0.dist-info/AUTHORS.rst @@ -0,0 +1,7 @@ +Authors +======= + +``pyjwt`` is currently written and maintained by `Jose Padilla `_. +Originally written and maintained by `Jeff Lindsay `_. + +A full list of contributors can be found on GitHub’s `overview `_. diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/PyJWT-2.8.0.dist-info/INSTALLER b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyJWT-2.8.0.dist-info/INSTALLER new file mode 100644 index 000000000..ce3133dc1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyJWT-2.8.0.dist-info/INSTALLER @@ -0,0 +1 @@ +Poetry 2.2.1 \ No newline at end of file diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/PyJWT-2.8.0.dist-info/LICENSE b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyJWT-2.8.0.dist-info/LICENSE new file mode 100644 index 000000000..fd0ecbc88 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyJWT-2.8.0.dist-info/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2022 José Padilla + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/PyJWT-2.8.0.dist-info/METADATA b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyJWT-2.8.0.dist-info/METADATA new file mode 100644 index 000000000..b329a46ea --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyJWT-2.8.0.dist-info/METADATA @@ -0,0 +1,107 @@ +Metadata-Version: 2.1 +Name: PyJWT +Version: 2.8.0 +Summary: JSON Web Token implementation in Python +Home-page: https://github.com/jpadilla/pyjwt +Author: Jose Padilla +Author-email: hello@jpadilla.com +License: MIT +Keywords: json,jwt,security,signing,token,web +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Topic :: Utilities +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE +License-File: AUTHORS.rst +Requires-Dist: typing-extensions ; python_version <= "3.7" +Provides-Extra: crypto +Requires-Dist: cryptography (>=3.4.0) ; extra == 'crypto' +Provides-Extra: dev +Requires-Dist: sphinx (<5.0.0,>=4.5.0) ; extra == 'dev' +Requires-Dist: sphinx-rtd-theme ; extra == 'dev' +Requires-Dist: zope.interface ; extra == 'dev' +Requires-Dist: cryptography (>=3.4.0) ; extra == 'dev' +Requires-Dist: pytest (<7.0.0,>=6.0.0) ; extra == 'dev' +Requires-Dist: coverage[toml] (==5.0.4) ; extra == 'dev' +Requires-Dist: pre-commit ; extra == 'dev' +Provides-Extra: docs +Requires-Dist: sphinx (<5.0.0,>=4.5.0) ; extra == 'docs' +Requires-Dist: sphinx-rtd-theme ; extra == 'docs' +Requires-Dist: zope.interface ; extra == 'docs' +Provides-Extra: tests +Requires-Dist: pytest (<7.0.0,>=6.0.0) ; extra == 'tests' +Requires-Dist: coverage[toml] (==5.0.4) ; extra == 'tests' + +PyJWT +===== + +.. image:: https://github.com/jpadilla/pyjwt/workflows/CI/badge.svg + :target: https://github.com/jpadilla/pyjwt/actions?query=workflow%3ACI + +.. image:: https://img.shields.io/pypi/v/pyjwt.svg + :target: https://pypi.python.org/pypi/pyjwt + +.. image:: https://codecov.io/gh/jpadilla/pyjwt/branch/master/graph/badge.svg + :target: https://codecov.io/gh/jpadilla/pyjwt + +.. image:: https://readthedocs.org/projects/pyjwt/badge/?version=stable + :target: https://pyjwt.readthedocs.io/en/stable/ + +A Python implementation of `RFC 7519 `_. Original implementation was written by `@progrium `_. + +Sponsor +------- + ++--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| |auth0-logo| | If you want to quickly add secure token-based authentication to Python projects, feel free to check Auth0's Python SDK and free plan at `auth0.com/developers `_. | ++--------------+-----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + +.. |auth0-logo| image:: https://user-images.githubusercontent.com/83319/31722733-de95bbde-b3ea-11e7-96bf-4f4e8f915588.png + +Installing +---------- + +Install with **pip**: + +.. code-block:: console + + $ pip install PyJWT + + +Usage +----- + +.. code-block:: pycon + + >>> import jwt + >>> encoded = jwt.encode({"some": "payload"}, "secret", algorithm="HS256") + >>> print(encoded) + eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzb21lIjoicGF5bG9hZCJ9.4twFt5NiznN84AWoo1d7KO1T_yoc0Z6XOpOVswacPZg + >>> jwt.decode(encoded, "secret", algorithms=["HS256"]) + {'some': 'payload'} + +Documentation +------------- + +View the full docs online at https://pyjwt.readthedocs.io/en/stable/ + + +Tests +----- + +You can run tests from the project root after cloning with: + +.. code-block:: console + + $ tox diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/PyJWT-2.8.0.dist-info/RECORD b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyJWT-2.8.0.dist-info/RECORD new file mode 100644 index 000000000..a3292b97f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyJWT-2.8.0.dist-info/RECORD @@ -0,0 +1,20 @@ +jwt/__init__.py,sha256=mV9lg6n4-0xiqCKaE1eEPC9a4j6sEkEYQcKghULE7kU,1670 +jwt/algorithms.py,sha256=RDsv5Lm3bzwsiWT3TynT7JR41R6H6s_fWUGOIqd9x_I,29800 +jwt/api_jwk.py,sha256=HPxVqgBZm7RTaEXydciNBCuYNKDYOC_prTdaN9toGbo,4196 +jwt/api_jws.py,sha256=da17RrDe0PDccTbx3rx2lLezEG_c_YGw_vVHa335IOk,11099 +jwt/api_jwt.py,sha256=yF9DwF1kt3PA5n_TiU0OmHd0LtPHfe4JCE1XOfKPjw0,12638 +jwt/exceptions.py,sha256=KDC3M7cTrpR4OQXVURlVMThem0pfANSgBxRz-ttivmo,1046 +jwt/help.py,sha256=Jrp84fG43sCwmSIaDtY08I6ZR2VE7NhrTff89tYSE40,1749 +jwt/jwk_set_cache.py,sha256=hBKmN-giU7-G37L_XKgc_OZu2ah4wdbj1ZNG_GkoSE8,959 +jwt/jwks_client.py,sha256=9W8JVyGByQgoLbBN1u5iY1_jlgfnnukeOBTpqaM_9SE,4222 +jwt/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +jwt/types.py,sha256=VnhGv_VFu5a7_mrPoSCB7HaNLrJdhM8Sq1sSfEg0gLU,99 +jwt/utils.py,sha256=PAI05_8MHQCxWQTDlwN0hTtTIT2DTTZ28mm1x6-26UY,3903 +jwt/warnings.py,sha256=50XWOnyNsIaqzUJTk6XHNiIDykiL763GYA92MjTKmok,59 +PyJWT-2.8.0.dist-info/AUTHORS.rst,sha256=klzkNGECnu2_VY7At89_xLBF3vUSDruXk3xwgUBxzwc,322 +PyJWT-2.8.0.dist-info/LICENSE,sha256=eXp6ICMdTEM-nxkR2xcx0GtYKLmPSZgZoDT3wPVvXOU,1085 +PyJWT-2.8.0.dist-info/METADATA,sha256=pV2XZjvithGcVesLHWAv0J4T5t8Qc66fip2sbxwoz1o,4160 +PyJWT-2.8.0.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 +PyJWT-2.8.0.dist-info/top_level.txt,sha256=RP5DHNyJbMq2ka0FmfTgoSaQzh7e3r5XuCWCO8a00k8,4 +PyJWT-2.8.0.dist-info/INSTALLER,sha256=sz0Tnp99msIbbLB51q7U9nA6AvRKcsOeUKhrHH0Ulg4,12 +PyJWT-2.8.0.dist-info/RECORD,, diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/PyJWT-2.8.0.dist-info/WHEEL b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyJWT-2.8.0.dist-info/WHEEL new file mode 100644 index 000000000..1f37c02f2 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyJWT-2.8.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.40.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/PyJWT-2.8.0.dist-info/top_level.txt b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyJWT-2.8.0.dist-info/top_level.txt new file mode 100644 index 000000000..27ccc9bc3 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyJWT-2.8.0.dist-info/top_level.txt @@ -0,0 +1 @@ +jwt diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/PyYAML-6.0.1.dist-info/INSTALLER b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyYAML-6.0.1.dist-info/INSTALLER new file mode 100644 index 000000000..ce3133dc1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyYAML-6.0.1.dist-info/INSTALLER @@ -0,0 +1 @@ +Poetry 2.2.1 \ No newline at end of file diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/PyYAML-6.0.1.dist-info/LICENSE b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyYAML-6.0.1.dist-info/LICENSE new file mode 100644 index 000000000..2f1b8e15e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyYAML-6.0.1.dist-info/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2017-2021 Ingy döt Net +Copyright (c) 2006-2016 Kirill Simonov + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/PyYAML-6.0.1.dist-info/METADATA b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyYAML-6.0.1.dist-info/METADATA new file mode 100644 index 000000000..c8905983e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyYAML-6.0.1.dist-info/METADATA @@ -0,0 +1,46 @@ +Metadata-Version: 2.1 +Name: PyYAML +Version: 6.0.1 +Summary: YAML parser and emitter for Python +Home-page: https://pyyaml.org/ +Download-URL: https://pypi.org/project/PyYAML/ +Author: Kirill Simonov +Author-email: xi@resolvent.net +License: MIT +Project-URL: Bug Tracker, https://github.com/yaml/pyyaml/issues +Project-URL: CI, https://github.com/yaml/pyyaml/actions +Project-URL: Documentation, https://pyyaml.org/wiki/PyYAMLDocumentation +Project-URL: Mailing lists, http://lists.sourceforge.net/lists/listinfo/yaml-core +Project-URL: Source Code, https://github.com/yaml/pyyaml +Platform: Any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Cython +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Markup +Requires-Python: >=3.6 +License-File: LICENSE + +YAML is a data serialization format designed for human readability +and interaction with scripting languages. PyYAML is a YAML parser +and emitter for Python. + +PyYAML features a complete YAML 1.1 parser, Unicode support, pickle +support, capable extension API, and sensible error messages. PyYAML +supports standard YAML tags and provides Python-specific tags that +allow to represent an arbitrary Python object. + +PyYAML is applicable for a broad range of tasks from complex +configuration files to object serialization and persistence. diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/PyYAML-6.0.1.dist-info/RECORD b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyYAML-6.0.1.dist-info/RECORD new file mode 100644 index 000000000..6bee24f0e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyYAML-6.0.1.dist-info/RECORD @@ -0,0 +1,25 @@ +yaml/loader.py,sha256=UVa-zIqmkFSCIYq_PgSGm4NSJttHY2Rf_zQ4_b1fHN0,2061 +yaml/dumper.py,sha256=PLctZlYwZLp7XmeUdwRuv4nYOZ2UBnDIUy8-lKfLF-o,2837 +yaml/resolver.py,sha256=9L-VYfm4mWHxUD1Vg4X7rjDRK_7VZd6b92wzq7Y2IKY,9004 +yaml/representer.py,sha256=IuWP-cAW9sHKEnS0gCqSa894k1Bg4cgTxaDwIcbRQ-Y,14190 +yaml/_yaml.cpython-38-x86_64-linux-gnu.so,sha256=LYS21yWcRUufTfHxLGpF3exE0PjGxwDNtmYGassqUJk,2397464 +yaml/serializer.py,sha256=ChuFgmhU01hj4xgI8GaKv6vfM2Bujwa9i7d2FAHj7cA,4165 +yaml/reader.py,sha256=0dmzirOiDG4Xo41RnuQS7K9rkY3xjHiVasfDMNTqCNw,6794 +yaml/error.py,sha256=Ah9z-toHJUbE9j-M8YpxgSRM5CgLCcwVzJgLLRF2Fxo,2533 +yaml/scanner.py,sha256=YEM3iLZSaQwXcQRg2l2R4MdT0zGP2F9eHkKGKnHyWQY,51279 +yaml/tokens.py,sha256=lTQIzSVw8Mg9wv459-TjiOQe6wVziqaRlqX2_89rp54,2573 +yaml/composer.py,sha256=_Ko30Wr6eDWUeUpauUGT3Lcg9QPBnOPVlTnIMRGJ9FM,4883 +yaml/__init__.py,sha256=bhl05qSeO-1ZxlSRjGrvl2m9nrXb1n9-GQatTN0Mrqc,12311 +yaml/parser.py,sha256=ilWp5vvgoHFGzvOZDItFoGjD6D42nhlZrZyjAwa0oJo,25495 +yaml/emitter.py,sha256=jghtaU7eFwg31bG0B7RZea_29Adi9CKmXq_QjgQpCkQ,43006 +yaml/cyaml.py,sha256=6ZrAG9fAYvdVe2FK_w0hmXoG7ZYsoYUwapG8CiC72H0,3851 +yaml/constructor.py,sha256=kNgkfaeLUkwQYY_Q6Ff1Tz2XVw_pG1xVE9Ak7z-viLA,28639 +yaml/nodes.py,sha256=gPKNj8pKCdh2d4gr3gIYINnPOaOxGhJAUiYhGRnPE84,1440 +yaml/events.py,sha256=50_TksgQiE4up-lKo_V-nBy-tAIxkIPQxY5qDhKCeHw,2445 +_yaml/__init__.py,sha256=04Ae_5osxahpJHa3XBZUAf4wi6XX32gR8D6X6p64GEA,1402 +PyYAML-6.0.1.dist-info/top_level.txt,sha256=rpj0IVMTisAjh_1vG3Ccf9v5jpCQwAz6cD1IVU5ZdhQ,11 +PyYAML-6.0.1.dist-info/LICENSE,sha256=jTko-dxEkP1jVwfLiOsmvXZBAqcoKVQwfT5RZ6V36KQ,1101 +PyYAML-6.0.1.dist-info/WHEEL,sha256=5DQRW5VdkIxH8e7ylwa-YBCXLhSZeqHiz1ZmuOscrlo,148 +PyYAML-6.0.1.dist-info/METADATA,sha256=UNNF8-SzzwOKXVo-kV5lXUGH2_wDWMBmGxqISpp5HQk,2058 +PyYAML-6.0.1.dist-info/INSTALLER,sha256=sz0Tnp99msIbbLB51q7U9nA6AvRKcsOeUKhrHH0Ulg4,12 +PyYAML-6.0.1.dist-info/RECORD,, diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/PyYAML-6.0.1.dist-info/WHEEL b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyYAML-6.0.1.dist-info/WHEEL new file mode 100644 index 000000000..4133c42cc --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyYAML-6.0.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.40.0) +Root-Is-Purelib: false +Tag: cp38-cp38-manylinux_2_17_x86_64 +Tag: cp38-cp38-manylinux2014_x86_64 + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/PyYAML-6.0.1.dist-info/top_level.txt b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyYAML-6.0.1.dist-info/top_level.txt new file mode 100644 index 000000000..e6475e911 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/PyYAML-6.0.1.dist-info/top_level.txt @@ -0,0 +1,2 @@ +_yaml +yaml diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_black_version.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_black_version.py new file mode 100644 index 000000000..66ce02916 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_black_version.py @@ -0,0 +1 @@ +version = "24.4.2" diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_cffi_backend.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/_cffi_backend.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..2c39da67f Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/_cffi_backend.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_distutils_hack/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_distutils_hack/__init__.py new file mode 100644 index 000000000..47ce24944 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_distutils_hack/__init__.py @@ -0,0 +1,128 @@ +import sys +import os +import re +import importlib +import warnings + + +is_pypy = '__pypy__' in sys.builtin_module_names + + +warnings.filterwarnings('ignore', + '.+ distutils .+ deprecated', + DeprecationWarning) + + +def warn_distutils_present(): + if 'distutils' not in sys.modules: + return + if is_pypy and sys.version_info < (3, 7): + # PyPy for 3.6 unconditionally imports distutils, so bypass the warning + # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250 + return + warnings.warn( + "Distutils was imported before Setuptools, but importing Setuptools " + "also replaces the `distutils` module in `sys.modules`. This may lead " + "to undesirable behaviors or errors. To avoid these issues, avoid " + "using distutils directly, ensure that setuptools is installed in the " + "traditional way (e.g. not an editable install), and/or make sure " + "that setuptools is always imported before distutils.") + + +def clear_distutils(): + if 'distutils' not in sys.modules: + return + warnings.warn("Setuptools is replacing distutils.") + mods = [name for name in sys.modules if re.match(r'distutils\b', name)] + for name in mods: + del sys.modules[name] + + +def enabled(): + """ + Allow selection of distutils by environment variable. + """ + which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'stdlib') + return which == 'local' + + +def ensure_local_distutils(): + clear_distutils() + distutils = importlib.import_module('setuptools._distutils') + distutils.__name__ = 'distutils' + sys.modules['distutils'] = distutils + + # sanity check that submodules load as expected + core = importlib.import_module('distutils.core') + assert '_distutils' in core.__file__, core.__file__ + + +def do_override(): + """ + Ensure that the local copy of distutils is preferred over stdlib. + + See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401 + for more motivation. + """ + if enabled(): + warn_distutils_present() + ensure_local_distutils() + + +class DistutilsMetaFinder: + def find_spec(self, fullname, path, target=None): + if path is not None: + return + + method_name = 'spec_for_{fullname}'.format(**locals()) + method = getattr(self, method_name, lambda: None) + return method() + + def spec_for_distutils(self): + import importlib.abc + import importlib.util + + class DistutilsLoader(importlib.abc.Loader): + + def create_module(self, spec): + return importlib.import_module('setuptools._distutils') + + def exec_module(self, module): + pass + + return importlib.util.spec_from_loader('distutils', DistutilsLoader()) + + def spec_for_pip(self): + """ + Ensure stdlib distutils when running under pip. + See pypa/pip#8761 for rationale. + """ + if self.pip_imported_during_build(): + return + clear_distutils() + self.spec_for_distutils = lambda: None + + @staticmethod + def pip_imported_during_build(): + """ + Detect if pip is being imported in a build script. Ref #2355. + """ + import traceback + return any( + frame.f_globals['__file__'].endswith('setup.py') + for frame, line in traceback.walk_stack(None) + ) + + +DISTUTILS_FINDER = DistutilsMetaFinder() + + +def add_shim(): + sys.meta_path.insert(0, DISTUTILS_FINDER) + + +def remove_shim(): + try: + sys.meta_path.remove(DISTUTILS_FINDER) + except ValueError: + pass diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_distutils_hack/override.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_distutils_hack/override.py new file mode 100644 index 000000000..2cc433a4a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_distutils_hack/override.py @@ -0,0 +1 @@ +__import__('_distutils_hack').do_override() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/__init__.py new file mode 100644 index 000000000..b694a5f24 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/__init__.py @@ -0,0 +1,10 @@ +__all__ = ["__version__", "version_tuple"] + +try: + from ._version import version as __version__ + from ._version import version_tuple +except ImportError: # pragma: no cover + # broken installation, we don't even try + # unknown only works because we do poor mans version compare + __version__ = "unknown" + version_tuple = (0, 0, "unknown") diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_argcomplete.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_argcomplete.py new file mode 100644 index 000000000..c24f92520 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_argcomplete.py @@ -0,0 +1,117 @@ +"""Allow bash-completion for argparse with argcomplete if installed. + +Needs argcomplete>=0.5.6 for python 3.2/3.3 (older versions fail +to find the magic string, so _ARGCOMPLETE env. var is never set, and +this does not need special code). + +Function try_argcomplete(parser) should be called directly before +the call to ArgumentParser.parse_args(). + +The filescompleter is what you normally would use on the positional +arguments specification, in order to get "dirname/" after "dirn" +instead of the default "dirname ": + + optparser.add_argument(Config._file_or_dir, nargs='*').completer=filescompleter + +Other, application specific, completers should go in the file +doing the add_argument calls as they need to be specified as .completer +attributes as well. (If argcomplete is not installed, the function the +attribute points to will not be used). + +SPEEDUP +======= + +The generic argcomplete script for bash-completion +(/etc/bash_completion.d/python-argcomplete.sh) +uses a python program to determine startup script generated by pip. +You can speed up completion somewhat by changing this script to include + # PYTHON_ARGCOMPLETE_OK +so the python-argcomplete-check-easy-install-script does not +need to be called to find the entry point of the code and see if that is +marked with PYTHON_ARGCOMPLETE_OK. + +INSTALL/DEBUGGING +================= + +To include this support in another application that has setup.py generated +scripts: + +- Add the line: + # PYTHON_ARGCOMPLETE_OK + near the top of the main python entry point. + +- Include in the file calling parse_args(): + from _argcomplete import try_argcomplete, filescompleter + Call try_argcomplete just before parse_args(), and optionally add + filescompleter to the positional arguments' add_argument(). + +If things do not work right away: + +- Switch on argcomplete debugging with (also helpful when doing custom + completers): + export _ARC_DEBUG=1 + +- Run: + python-argcomplete-check-easy-install-script $(which appname) + echo $? + will echo 0 if the magic line has been found, 1 if not. + +- Sometimes it helps to find early on errors using: + _ARGCOMPLETE=1 _ARC_DEBUG=1 appname + which should throw a KeyError: 'COMPLINE' (which is properly set by the + global argcomplete script). +""" + +import argparse +from glob import glob +import os +import sys +from typing import Any +from typing import List +from typing import Optional + + +class FastFilesCompleter: + """Fast file completer class.""" + + def __init__(self, directories: bool = True) -> None: + self.directories = directories + + def __call__(self, prefix: str, **kwargs: Any) -> List[str]: + # Only called on non option completions. + if os.sep in prefix[1:]: + prefix_dir = len(os.path.dirname(prefix) + os.sep) + else: + prefix_dir = 0 + completion = [] + globbed = [] + if "*" not in prefix and "?" not in prefix: + # We are on unix, otherwise no bash. + if not prefix or prefix[-1] == os.sep: + globbed.extend(glob(prefix + ".*")) + prefix += "*" + globbed.extend(glob(prefix)) + for x in sorted(globbed): + if os.path.isdir(x): + x += "/" + # Append stripping the prefix (like bash, not like compgen). + completion.append(x[prefix_dir:]) + return completion + + +if os.environ.get("_ARGCOMPLETE"): + try: + import argcomplete.completers + except ImportError: + sys.exit(-1) + filescompleter: Optional[FastFilesCompleter] = FastFilesCompleter() + + def try_argcomplete(parser: argparse.ArgumentParser) -> None: + argcomplete.autocomplete(parser, always_complete_options=False) + +else: + + def try_argcomplete(parser: argparse.ArgumentParser) -> None: + pass + + filescompleter = None diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_code/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_code/__init__.py new file mode 100644 index 000000000..b0a418e95 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_code/__init__.py @@ -0,0 +1,24 @@ +"""Python inspection/code generation API.""" + +from .code import Code +from .code import ExceptionInfo +from .code import filter_traceback +from .code import Frame +from .code import getfslineno +from .code import Traceback +from .code import TracebackEntry +from .source import getrawcode +from .source import Source + + +__all__ = [ + "Code", + "ExceptionInfo", + "filter_traceback", + "Frame", + "getfslineno", + "getrawcode", + "Traceback", + "TracebackEntry", + "Source", +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_code/code.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_code/code.py new file mode 100644 index 000000000..c65ce79f7 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_code/code.py @@ -0,0 +1,1411 @@ +# mypy: allow-untyped-defs +import ast +import dataclasses +import inspect +from inspect import CO_VARARGS +from inspect import CO_VARKEYWORDS +from io import StringIO +import os +from pathlib import Path +import re +import sys +import traceback +from traceback import format_exception_only +from types import CodeType +from types import FrameType +from types import TracebackType +from typing import Any +from typing import Callable +from typing import ClassVar +from typing import Dict +from typing import Final +from typing import final +from typing import Generic +from typing import Iterable +from typing import List +from typing import Literal +from typing import Mapping +from typing import Optional +from typing import overload +from typing import Pattern +from typing import Sequence +from typing import Set +from typing import SupportsIndex +from typing import Tuple +from typing import Type +from typing import TypeVar +from typing import Union + +import pluggy + +import _pytest +from _pytest._code.source import findsource +from _pytest._code.source import getrawcode +from _pytest._code.source import getstatementrange_ast +from _pytest._code.source import Source +from _pytest._io import TerminalWriter +from _pytest._io.saferepr import safeformat +from _pytest._io.saferepr import saferepr +from _pytest.compat import get_real_func +from _pytest.deprecated import check_ispytest +from _pytest.pathlib import absolutepath +from _pytest.pathlib import bestrelpath + + +if sys.version_info < (3, 11): + from exceptiongroup import BaseExceptionGroup + +_TracebackStyle = Literal["long", "short", "line", "no", "native", "value", "auto"] + + +class Code: + """Wrapper around Python code objects.""" + + __slots__ = ("raw",) + + def __init__(self, obj: CodeType) -> None: + self.raw = obj + + @classmethod + def from_function(cls, obj: object) -> "Code": + return cls(getrawcode(obj)) + + def __eq__(self, other): + return self.raw == other.raw + + # Ignore type because of https://github.com/python/mypy/issues/4266. + __hash__ = None # type: ignore + + @property + def firstlineno(self) -> int: + return self.raw.co_firstlineno - 1 + + @property + def name(self) -> str: + return self.raw.co_name + + @property + def path(self) -> Union[Path, str]: + """Return a path object pointing to source code, or an ``str`` in + case of ``OSError`` / non-existing file.""" + if not self.raw.co_filename: + return "" + try: + p = absolutepath(self.raw.co_filename) + # maybe don't try this checking + if not p.exists(): + raise OSError("path check failed.") + return p + except OSError: + # XXX maybe try harder like the weird logic + # in the standard lib [linecache.updatecache] does? + return self.raw.co_filename + + @property + def fullsource(self) -> Optional["Source"]: + """Return a _pytest._code.Source object for the full source file of the code.""" + full, _ = findsource(self.raw) + return full + + def source(self) -> "Source": + """Return a _pytest._code.Source object for the code object's source only.""" + # return source only for that part of code + return Source(self.raw) + + def getargs(self, var: bool = False) -> Tuple[str, ...]: + """Return a tuple with the argument names for the code object. + + If 'var' is set True also return the names of the variable and + keyword arguments when present. + """ + # Handy shortcut for getting args. + raw = self.raw + argcount = raw.co_argcount + if var: + argcount += raw.co_flags & CO_VARARGS + argcount += raw.co_flags & CO_VARKEYWORDS + return raw.co_varnames[:argcount] + + +class Frame: + """Wrapper around a Python frame holding f_locals and f_globals + in which expressions can be evaluated.""" + + __slots__ = ("raw",) + + def __init__(self, frame: FrameType) -> None: + self.raw = frame + + @property + def lineno(self) -> int: + return self.raw.f_lineno - 1 + + @property + def f_globals(self) -> Dict[str, Any]: + return self.raw.f_globals + + @property + def f_locals(self) -> Dict[str, Any]: + return self.raw.f_locals + + @property + def code(self) -> Code: + return Code(self.raw.f_code) + + @property + def statement(self) -> "Source": + """Statement this frame is at.""" + if self.code.fullsource is None: + return Source("") + return self.code.fullsource.getstatement(self.lineno) + + def eval(self, code, **vars): + """Evaluate 'code' in the frame. + + 'vars' are optional additional local variables. + + Returns the result of the evaluation. + """ + f_locals = self.f_locals.copy() + f_locals.update(vars) + return eval(code, self.f_globals, f_locals) + + def repr(self, object: object) -> str: + """Return a 'safe' (non-recursive, one-line) string repr for 'object'.""" + return saferepr(object) + + def getargs(self, var: bool = False): + """Return a list of tuples (name, value) for all arguments. + + If 'var' is set True, also include the variable and keyword arguments + when present. + """ + retval = [] + for arg in self.code.getargs(var): + try: + retval.append((arg, self.f_locals[arg])) + except KeyError: + pass # this can occur when using Psyco + return retval + + +class TracebackEntry: + """A single entry in a Traceback.""" + + __slots__ = ("_rawentry", "_repr_style") + + def __init__( + self, + rawentry: TracebackType, + repr_style: Optional['Literal["short", "long"]'] = None, + ) -> None: + self._rawentry: "Final" = rawentry + self._repr_style: "Final" = repr_style + + def with_repr_style( + self, repr_style: Optional['Literal["short", "long"]'] + ) -> "TracebackEntry": + return TracebackEntry(self._rawentry, repr_style) + + @property + def lineno(self) -> int: + return self._rawentry.tb_lineno - 1 + + @property + def frame(self) -> Frame: + return Frame(self._rawentry.tb_frame) + + @property + def relline(self) -> int: + return self.lineno - self.frame.code.firstlineno + + def __repr__(self) -> str: + return "" % (self.frame.code.path, self.lineno + 1) + + @property + def statement(self) -> "Source": + """_pytest._code.Source object for the current statement.""" + source = self.frame.code.fullsource + assert source is not None + return source.getstatement(self.lineno) + + @property + def path(self) -> Union[Path, str]: + """Path to the source code.""" + return self.frame.code.path + + @property + def locals(self) -> Dict[str, Any]: + """Locals of underlying frame.""" + return self.frame.f_locals + + def getfirstlinesource(self) -> int: + return self.frame.code.firstlineno + + def getsource( + self, astcache: Optional[Dict[Union[str, Path], ast.AST]] = None + ) -> Optional["Source"]: + """Return failing source code.""" + # we use the passed in astcache to not reparse asttrees + # within exception info printing + source = self.frame.code.fullsource + if source is None: + return None + key = astnode = None + if astcache is not None: + key = self.frame.code.path + if key is not None: + astnode = astcache.get(key, None) + start = self.getfirstlinesource() + try: + astnode, _, end = getstatementrange_ast( + self.lineno, source, astnode=astnode + ) + except SyntaxError: + end = self.lineno + 1 + else: + if key is not None and astcache is not None: + astcache[key] = astnode + return source[start:end] + + source = property(getsource) + + def ishidden(self, excinfo: Optional["ExceptionInfo[BaseException]"]) -> bool: + """Return True if the current frame has a var __tracebackhide__ + resolving to True. + + If __tracebackhide__ is a callable, it gets called with the + ExceptionInfo instance and can decide whether to hide the traceback. + + Mostly for internal use. + """ + tbh: Union[bool, Callable[[Optional[ExceptionInfo[BaseException]]], bool]] = ( + False + ) + for maybe_ns_dct in (self.frame.f_locals, self.frame.f_globals): + # in normal cases, f_locals and f_globals are dictionaries + # however via `exec(...)` / `eval(...)` they can be other types + # (even incorrect types!). + # as such, we suppress all exceptions while accessing __tracebackhide__ + try: + tbh = maybe_ns_dct["__tracebackhide__"] + except Exception: + pass + else: + break + if tbh and callable(tbh): + return tbh(excinfo) + return tbh + + def __str__(self) -> str: + name = self.frame.code.name + try: + line = str(self.statement).lstrip() + except KeyboardInterrupt: + raise + except BaseException: + line = "???" + # This output does not quite match Python's repr for traceback entries, + # but changing it to do so would break certain plugins. See + # https://github.com/pytest-dev/pytest/pull/7535/ for details. + return " File %r:%d in %s\n %s\n" % ( + str(self.path), + self.lineno + 1, + name, + line, + ) + + @property + def name(self) -> str: + """co_name of underlying code.""" + return self.frame.code.raw.co_name + + +class Traceback(List[TracebackEntry]): + """Traceback objects encapsulate and offer higher level access to Traceback entries.""" + + def __init__( + self, + tb: Union[TracebackType, Iterable[TracebackEntry]], + ) -> None: + """Initialize from given python traceback object and ExceptionInfo.""" + if isinstance(tb, TracebackType): + + def f(cur: TracebackType) -> Iterable[TracebackEntry]: + cur_: Optional[TracebackType] = cur + while cur_ is not None: + yield TracebackEntry(cur_) + cur_ = cur_.tb_next + + super().__init__(f(tb)) + else: + super().__init__(tb) + + def cut( + self, + path: Optional[Union["os.PathLike[str]", str]] = None, + lineno: Optional[int] = None, + firstlineno: Optional[int] = None, + excludepath: Optional["os.PathLike[str]"] = None, + ) -> "Traceback": + """Return a Traceback instance wrapping part of this Traceback. + + By providing any combination of path, lineno and firstlineno, the + first frame to start the to-be-returned traceback is determined. + + This allows cutting the first part of a Traceback instance e.g. + for formatting reasons (removing some uninteresting bits that deal + with handling of the exception/traceback). + """ + path_ = None if path is None else os.fspath(path) + excludepath_ = None if excludepath is None else os.fspath(excludepath) + for x in self: + code = x.frame.code + codepath = code.path + if path is not None and str(codepath) != path_: + continue + if ( + excludepath is not None + and isinstance(codepath, Path) + and excludepath_ in (str(p) for p in codepath.parents) # type: ignore[operator] + ): + continue + if lineno is not None and x.lineno != lineno: + continue + if firstlineno is not None and x.frame.code.firstlineno != firstlineno: + continue + return Traceback(x._rawentry) + return self + + @overload + def __getitem__(self, key: "SupportsIndex") -> TracebackEntry: ... + + @overload + def __getitem__(self, key: slice) -> "Traceback": ... + + def __getitem__( + self, key: Union["SupportsIndex", slice] + ) -> Union[TracebackEntry, "Traceback"]: + if isinstance(key, slice): + return self.__class__(super().__getitem__(key)) + else: + return super().__getitem__(key) + + def filter( + self, + excinfo_or_fn: Union[ + "ExceptionInfo[BaseException]", + Callable[[TracebackEntry], bool], + ], + /, + ) -> "Traceback": + """Return a Traceback instance with certain items removed. + + If the filter is an `ExceptionInfo`, removes all the ``TracebackEntry``s + which are hidden (see ishidden() above). + + Otherwise, the filter is a function that gets a single argument, a + ``TracebackEntry`` instance, and should return True when the item should + be added to the ``Traceback``, False when not. + """ + if isinstance(excinfo_or_fn, ExceptionInfo): + fn = lambda x: not x.ishidden(excinfo_or_fn) # noqa: E731 + else: + fn = excinfo_or_fn + return Traceback(filter(fn, self)) + + def recursionindex(self) -> Optional[int]: + """Return the index of the frame/TracebackEntry where recursion originates if + appropriate, None if no recursion occurred.""" + cache: Dict[Tuple[Any, int, int], List[Dict[str, Any]]] = {} + for i, entry in enumerate(self): + # id for the code.raw is needed to work around + # the strange metaprogramming in the decorator lib from pypi + # which generates code objects that have hash/value equality + # XXX needs a test + key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno + # print "checking for recursion at", key + values = cache.setdefault(key, []) + if values: + f = entry.frame + loc = f.f_locals + for otherloc in values: + if otherloc == loc: + return i + values.append(entry.frame.f_locals) + return None + + +E = TypeVar("E", bound=BaseException, covariant=True) + + +@final +@dataclasses.dataclass +class ExceptionInfo(Generic[E]): + """Wraps sys.exc_info() objects and offers help for navigating the traceback.""" + + _assert_start_repr: ClassVar = "AssertionError('assert " + + _excinfo: Optional[Tuple[Type["E"], "E", TracebackType]] + _striptext: str + _traceback: Optional[Traceback] + + def __init__( + self, + excinfo: Optional[Tuple[Type["E"], "E", TracebackType]], + striptext: str = "", + traceback: Optional[Traceback] = None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self._excinfo = excinfo + self._striptext = striptext + self._traceback = traceback + + @classmethod + def from_exception( + cls, + # Ignoring error: "Cannot use a covariant type variable as a parameter". + # This is OK to ignore because this class is (conceptually) readonly. + # See https://github.com/python/mypy/issues/7049. + exception: E, # type: ignore[misc] + exprinfo: Optional[str] = None, + ) -> "ExceptionInfo[E]": + """Return an ExceptionInfo for an existing exception. + + The exception must have a non-``None`` ``__traceback__`` attribute, + otherwise this function fails with an assertion error. This means that + the exception must have been raised, or added a traceback with the + :py:meth:`~BaseException.with_traceback()` method. + + :param exprinfo: + A text string helping to determine if we should strip + ``AssertionError`` from the output. Defaults to the exception + message/``__str__()``. + + .. versionadded:: 7.4 + """ + assert exception.__traceback__, ( + "Exceptions passed to ExcInfo.from_exception(...)" + " must have a non-None __traceback__." + ) + exc_info = (type(exception), exception, exception.__traceback__) + return cls.from_exc_info(exc_info, exprinfo) + + @classmethod + def from_exc_info( + cls, + exc_info: Tuple[Type[E], E, TracebackType], + exprinfo: Optional[str] = None, + ) -> "ExceptionInfo[E]": + """Like :func:`from_exception`, but using old-style exc_info tuple.""" + _striptext = "" + if exprinfo is None and isinstance(exc_info[1], AssertionError): + exprinfo = getattr(exc_info[1], "msg", None) + if exprinfo is None: + exprinfo = saferepr(exc_info[1]) + if exprinfo and exprinfo.startswith(cls._assert_start_repr): + _striptext = "AssertionError: " + + return cls(exc_info, _striptext, _ispytest=True) + + @classmethod + def from_current( + cls, exprinfo: Optional[str] = None + ) -> "ExceptionInfo[BaseException]": + """Return an ExceptionInfo matching the current traceback. + + .. warning:: + + Experimental API + + :param exprinfo: + A text string helping to determine if we should strip + ``AssertionError`` from the output. Defaults to the exception + message/``__str__()``. + """ + tup = sys.exc_info() + assert tup[0] is not None, "no current exception" + assert tup[1] is not None, "no current exception" + assert tup[2] is not None, "no current exception" + exc_info = (tup[0], tup[1], tup[2]) + return ExceptionInfo.from_exc_info(exc_info, exprinfo) + + @classmethod + def for_later(cls) -> "ExceptionInfo[E]": + """Return an unfilled ExceptionInfo.""" + return cls(None, _ispytest=True) + + def fill_unfilled(self, exc_info: Tuple[Type[E], E, TracebackType]) -> None: + """Fill an unfilled ExceptionInfo created with ``for_later()``.""" + assert self._excinfo is None, "ExceptionInfo was already filled" + self._excinfo = exc_info + + @property + def type(self) -> Type[E]: + """The exception class.""" + assert ( + self._excinfo is not None + ), ".type can only be used after the context manager exits" + return self._excinfo[0] + + @property + def value(self) -> E: + """The exception value.""" + assert ( + self._excinfo is not None + ), ".value can only be used after the context manager exits" + return self._excinfo[1] + + @property + def tb(self) -> TracebackType: + """The exception raw traceback.""" + assert ( + self._excinfo is not None + ), ".tb can only be used after the context manager exits" + return self._excinfo[2] + + @property + def typename(self) -> str: + """The type name of the exception.""" + assert ( + self._excinfo is not None + ), ".typename can only be used after the context manager exits" + return self.type.__name__ + + @property + def traceback(self) -> Traceback: + """The traceback.""" + if self._traceback is None: + self._traceback = Traceback(self.tb) + return self._traceback + + @traceback.setter + def traceback(self, value: Traceback) -> None: + self._traceback = value + + def __repr__(self) -> str: + if self._excinfo is None: + return "" + return f"<{self.__class__.__name__} {saferepr(self._excinfo[1])} tblen={len(self.traceback)}>" + + def exconly(self, tryshort: bool = False) -> str: + """Return the exception as a string. + + When 'tryshort' resolves to True, and the exception is an + AssertionError, only the actual exception part of the exception + representation is returned (so 'AssertionError: ' is removed from + the beginning). + """ + lines = format_exception_only(self.type, self.value) + text = "".join(lines) + text = text.rstrip() + if tryshort: + if text.startswith(self._striptext): + text = text[len(self._striptext) :] + return text + + def errisinstance( + self, exc: Union[Type[BaseException], Tuple[Type[BaseException], ...]] + ) -> bool: + """Return True if the exception is an instance of exc. + + Consider using ``isinstance(excinfo.value, exc)`` instead. + """ + return isinstance(self.value, exc) + + def _getreprcrash(self) -> Optional["ReprFileLocation"]: + # Find last non-hidden traceback entry that led to the exception of the + # traceback, or None if all hidden. + for i in range(-1, -len(self.traceback) - 1, -1): + entry = self.traceback[i] + if not entry.ishidden(self): + path, lineno = entry.frame.code.raw.co_filename, entry.lineno + exconly = self.exconly(tryshort=True) + return ReprFileLocation(path, lineno + 1, exconly) + return None + + def getrepr( + self, + showlocals: bool = False, + style: _TracebackStyle = "long", + abspath: bool = False, + tbfilter: Union[ + bool, Callable[["ExceptionInfo[BaseException]"], Traceback] + ] = True, + funcargs: bool = False, + truncate_locals: bool = True, + chain: bool = True, + ) -> Union["ReprExceptionInfo", "ExceptionChainRepr"]: + """Return str()able representation of this exception info. + + :param bool showlocals: + Show locals per traceback entry. + Ignored if ``style=="native"``. + + :param str style: + long|short|line|no|native|value traceback style. + + :param bool abspath: + If paths should be changed to absolute or left unchanged. + + :param tbfilter: + A filter for traceback entries. + + * If false, don't hide any entries. + * If true, hide internal entries and entries that contain a local + variable ``__tracebackhide__ = True``. + * If a callable, delegates the filtering to the callable. + + Ignored if ``style`` is ``"native"``. + + :param bool funcargs: + Show fixtures ("funcargs" for legacy purposes) per traceback entry. + + :param bool truncate_locals: + With ``showlocals==True``, make sure locals can be safely represented as strings. + + :param bool chain: + If chained exceptions in Python 3 should be shown. + + .. versionchanged:: 3.9 + + Added the ``chain`` parameter. + """ + if style == "native": + return ReprExceptionInfo( + reprtraceback=ReprTracebackNative( + traceback.format_exception( + self.type, + self.value, + self.traceback[0]._rawentry if self.traceback else None, + ) + ), + reprcrash=self._getreprcrash(), + ) + + fmt = FormattedExcinfo( + showlocals=showlocals, + style=style, + abspath=abspath, + tbfilter=tbfilter, + funcargs=funcargs, + truncate_locals=truncate_locals, + chain=chain, + ) + return fmt.repr_excinfo(self) + + def _stringify_exception(self, exc: BaseException) -> str: + try: + notes = getattr(exc, "__notes__", []) + except KeyError: + # Workaround for https://github.com/python/cpython/issues/98778 on + # Python <= 3.9, and some 3.10 and 3.11 patch versions. + HTTPError = getattr(sys.modules.get("urllib.error", None), "HTTPError", ()) + if sys.version_info < (3, 12) and isinstance(exc, HTTPError): + notes = [] + else: + raise + + return "\n".join( + [ + str(exc), + *notes, + ] + ) + + def match(self, regexp: Union[str, Pattern[str]]) -> "Literal[True]": + """Check whether the regular expression `regexp` matches the string + representation of the exception using :func:`python:re.search`. + + If it matches `True` is returned, otherwise an `AssertionError` is raised. + """ + __tracebackhide__ = True + value = self._stringify_exception(self.value) + msg = f"Regex pattern did not match.\n Regex: {regexp!r}\n Input: {value!r}" + if regexp == value: + msg += "\n Did you mean to `re.escape()` the regex?" + assert re.search(regexp, value), msg + # Return True to allow for "assert excinfo.match()". + return True + + def _group_contains( + self, + exc_group: BaseExceptionGroup[BaseException], + expected_exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]], + match: Union[str, Pattern[str], None], + target_depth: Optional[int] = None, + current_depth: int = 1, + ) -> bool: + """Return `True` if a `BaseExceptionGroup` contains a matching exception.""" + if (target_depth is not None) and (current_depth > target_depth): + # already descended past the target depth + return False + for exc in exc_group.exceptions: + if isinstance(exc, BaseExceptionGroup): + if self._group_contains( + exc, expected_exception, match, target_depth, current_depth + 1 + ): + return True + if (target_depth is not None) and (current_depth != target_depth): + # not at the target depth, no match + continue + if not isinstance(exc, expected_exception): + continue + if match is not None: + value = self._stringify_exception(exc) + if not re.search(match, value): + continue + return True + return False + + def group_contains( + self, + expected_exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]], + *, + match: Union[str, Pattern[str], None] = None, + depth: Optional[int] = None, + ) -> bool: + """Check whether a captured exception group contains a matching exception. + + :param Type[BaseException] | Tuple[Type[BaseException]] expected_exception: + The expected exception type, or a tuple if one of multiple possible + exception types are expected. + + :param str | Pattern[str] | None match: + If specified, a string containing a regular expression, + or a regular expression object, that is tested against the string + representation of the exception and its `PEP-678 ` `__notes__` + using :func:`re.search`. + + To match a literal string that may contain :ref:`special characters + `, the pattern can first be escaped with :func:`re.escape`. + + :param Optional[int] depth: + If `None`, will search for a matching exception at any nesting depth. + If >= 1, will only match an exception if it's at the specified depth (depth = 1 being + the exceptions contained within the topmost exception group). + + .. versionadded:: 8.0 + """ + msg = "Captured exception is not an instance of `BaseExceptionGroup`" + assert isinstance(self.value, BaseExceptionGroup), msg + msg = "`depth` must be >= 1 if specified" + assert (depth is None) or (depth >= 1), msg + return self._group_contains(self.value, expected_exception, match, depth) + + +@dataclasses.dataclass +class FormattedExcinfo: + """Presenting information about failing Functions and Generators.""" + + # for traceback entries + flow_marker: ClassVar = ">" + fail_marker: ClassVar = "E" + + showlocals: bool = False + style: _TracebackStyle = "long" + abspath: bool = True + tbfilter: Union[bool, Callable[[ExceptionInfo[BaseException]], Traceback]] = True + funcargs: bool = False + truncate_locals: bool = True + chain: bool = True + astcache: Dict[Union[str, Path], ast.AST] = dataclasses.field( + default_factory=dict, init=False, repr=False + ) + + def _getindent(self, source: "Source") -> int: + # Figure out indent for the given source. + try: + s = str(source.getstatement(len(source) - 1)) + except KeyboardInterrupt: + raise + except BaseException: + try: + s = str(source[-1]) + except KeyboardInterrupt: + raise + except BaseException: + return 0 + return 4 + (len(s) - len(s.lstrip())) + + def _getentrysource(self, entry: TracebackEntry) -> Optional["Source"]: + source = entry.getsource(self.astcache) + if source is not None: + source = source.deindent() + return source + + def repr_args(self, entry: TracebackEntry) -> Optional["ReprFuncArgs"]: + if self.funcargs: + args = [] + for argname, argvalue in entry.frame.getargs(var=True): + args.append((argname, saferepr(argvalue))) + return ReprFuncArgs(args) + return None + + def get_source( + self, + source: Optional["Source"], + line_index: int = -1, + excinfo: Optional[ExceptionInfo[BaseException]] = None, + short: bool = False, + ) -> List[str]: + """Return formatted and marked up source lines.""" + lines = [] + if source is not None and line_index < 0: + line_index += len(source) + if source is None or line_index >= len(source.lines) or line_index < 0: + # `line_index` could still be outside `range(len(source.lines))` if + # we're processing AST with pathological position attributes. + source = Source("???") + line_index = 0 + space_prefix = " " + if short: + lines.append(space_prefix + source.lines[line_index].strip()) + else: + for line in source.lines[:line_index]: + lines.append(space_prefix + line) + lines.append(self.flow_marker + " " + source.lines[line_index]) + for line in source.lines[line_index + 1 :]: + lines.append(space_prefix + line) + if excinfo is not None: + indent = 4 if short else self._getindent(source) + lines.extend(self.get_exconly(excinfo, indent=indent, markall=True)) + return lines + + def get_exconly( + self, + excinfo: ExceptionInfo[BaseException], + indent: int = 4, + markall: bool = False, + ) -> List[str]: + lines = [] + indentstr = " " * indent + # Get the real exception information out. + exlines = excinfo.exconly(tryshort=True).split("\n") + failindent = self.fail_marker + indentstr[1:] + for line in exlines: + lines.append(failindent + line) + if not markall: + failindent = indentstr + return lines + + def repr_locals(self, locals: Mapping[str, object]) -> Optional["ReprLocals"]: + if self.showlocals: + lines = [] + keys = [loc for loc in locals if loc[0] != "@"] + keys.sort() + for name in keys: + value = locals[name] + if name == "__builtins__": + lines.append("__builtins__ = ") + else: + # This formatting could all be handled by the + # _repr() function, which is only reprlib.Repr in + # disguise, so is very configurable. + if self.truncate_locals: + str_repr = saferepr(value) + else: + str_repr = safeformat(value) + # if len(str_repr) < 70 or not isinstance(value, (list, tuple, dict)): + lines.append(f"{name:<10} = {str_repr}") + # else: + # self._line("%-10s =\\" % (name,)) + # # XXX + # pprint.pprint(value, stream=self.excinfowriter) + return ReprLocals(lines) + return None + + def repr_traceback_entry( + self, + entry: Optional[TracebackEntry], + excinfo: Optional[ExceptionInfo[BaseException]] = None, + ) -> "ReprEntry": + lines: List[str] = [] + style = ( + entry._repr_style + if entry is not None and entry._repr_style is not None + else self.style + ) + if style in ("short", "long") and entry is not None: + source = self._getentrysource(entry) + if source is None: + source = Source("???") + line_index = 0 + else: + line_index = entry.lineno - entry.getfirstlinesource() + short = style == "short" + reprargs = self.repr_args(entry) if not short else None + s = self.get_source(source, line_index, excinfo, short=short) + lines.extend(s) + if short: + message = "in %s" % (entry.name) + else: + message = excinfo and excinfo.typename or "" + entry_path = entry.path + path = self._makepath(entry_path) + reprfileloc = ReprFileLocation(path, entry.lineno + 1, message) + localsrepr = self.repr_locals(entry.locals) + return ReprEntry(lines, reprargs, localsrepr, reprfileloc, style) + elif style == "value": + if excinfo: + lines.extend(str(excinfo.value).split("\n")) + return ReprEntry(lines, None, None, None, style) + else: + if excinfo: + lines.extend(self.get_exconly(excinfo, indent=4)) + return ReprEntry(lines, None, None, None, style) + + def _makepath(self, path: Union[Path, str]) -> str: + if not self.abspath and isinstance(path, Path): + try: + np = bestrelpath(Path.cwd(), path) + except OSError: + return str(path) + if len(np) < len(str(path)): + return np + return str(path) + + def repr_traceback(self, excinfo: ExceptionInfo[BaseException]) -> "ReprTraceback": + traceback = excinfo.traceback + if callable(self.tbfilter): + traceback = self.tbfilter(excinfo) + elif self.tbfilter: + traceback = traceback.filter(excinfo) + + if isinstance(excinfo.value, RecursionError): + traceback, extraline = self._truncate_recursive_traceback(traceback) + else: + extraline = None + + if not traceback: + if extraline is None: + extraline = "All traceback entries are hidden. Pass `--full-trace` to see hidden and internal frames." + entries = [self.repr_traceback_entry(None, excinfo)] + return ReprTraceback(entries, extraline, style=self.style) + + last = traceback[-1] + if self.style == "value": + entries = [self.repr_traceback_entry(last, excinfo)] + return ReprTraceback(entries, None, style=self.style) + + entries = [ + self.repr_traceback_entry(entry, excinfo if last == entry else None) + for entry in traceback + ] + return ReprTraceback(entries, extraline, style=self.style) + + def _truncate_recursive_traceback( + self, traceback: Traceback + ) -> Tuple[Traceback, Optional[str]]: + """Truncate the given recursive traceback trying to find the starting + point of the recursion. + + The detection is done by going through each traceback entry and + finding the point in which the locals of the frame are equal to the + locals of a previous frame (see ``recursionindex()``). + + Handle the situation where the recursion process might raise an + exception (for example comparing numpy arrays using equality raises a + TypeError), in which case we do our best to warn the user of the + error and show a limited traceback. + """ + try: + recursionindex = traceback.recursionindex() + except Exception as e: + max_frames = 10 + extraline: Optional[str] = ( + "!!! Recursion error detected, but an error occurred locating the origin of recursion.\n" + " The following exception happened when comparing locals in the stack frame:\n" + f" {type(e).__name__}: {e!s}\n" + f" Displaying first and last {max_frames} stack frames out of {len(traceback)}." + ) + # Type ignored because adding two instances of a List subtype + # currently incorrectly has type List instead of the subtype. + traceback = traceback[:max_frames] + traceback[-max_frames:] # type: ignore + else: + if recursionindex is not None: + extraline = "!!! Recursion detected (same locals & position)" + traceback = traceback[: recursionindex + 1] + else: + extraline = None + + return traceback, extraline + + def repr_excinfo( + self, excinfo: ExceptionInfo[BaseException] + ) -> "ExceptionChainRepr": + repr_chain: List[ + Tuple[ReprTraceback, Optional[ReprFileLocation], Optional[str]] + ] = [] + e: Optional[BaseException] = excinfo.value + excinfo_: Optional[ExceptionInfo[BaseException]] = excinfo + descr = None + seen: Set[int] = set() + while e is not None and id(e) not in seen: + seen.add(id(e)) + + if excinfo_: + # Fall back to native traceback as a temporary workaround until + # full support for exception groups added to ExceptionInfo. + # See https://github.com/pytest-dev/pytest/issues/9159 + if isinstance(e, BaseExceptionGroup): + reprtraceback: Union[ReprTracebackNative, ReprTraceback] = ( + ReprTracebackNative( + traceback.format_exception( + type(excinfo_.value), + excinfo_.value, + excinfo_.traceback[0]._rawentry, + ) + ) + ) + else: + reprtraceback = self.repr_traceback(excinfo_) + reprcrash = excinfo_._getreprcrash() + else: + # Fallback to native repr if the exception doesn't have a traceback: + # ExceptionInfo objects require a full traceback to work. + reprtraceback = ReprTracebackNative( + traceback.format_exception(type(e), e, None) + ) + reprcrash = None + repr_chain += [(reprtraceback, reprcrash, descr)] + + if e.__cause__ is not None and self.chain: + e = e.__cause__ + excinfo_ = ExceptionInfo.from_exception(e) if e.__traceback__ else None + descr = "The above exception was the direct cause of the following exception:" + elif ( + e.__context__ is not None and not e.__suppress_context__ and self.chain + ): + e = e.__context__ + excinfo_ = ExceptionInfo.from_exception(e) if e.__traceback__ else None + descr = "During handling of the above exception, another exception occurred:" + else: + e = None + repr_chain.reverse() + return ExceptionChainRepr(repr_chain) + + +@dataclasses.dataclass(eq=False) +class TerminalRepr: + def __str__(self) -> str: + # FYI this is called from pytest-xdist's serialization of exception + # information. + io = StringIO() + tw = TerminalWriter(file=io) + self.toterminal(tw) + return io.getvalue().strip() + + def __repr__(self) -> str: + return f"<{self.__class__} instance at {id(self):0x}>" + + def toterminal(self, tw: TerminalWriter) -> None: + raise NotImplementedError() + + +# This class is abstract -- only subclasses are instantiated. +@dataclasses.dataclass(eq=False) +class ExceptionRepr(TerminalRepr): + # Provided by subclasses. + reprtraceback: "ReprTraceback" + reprcrash: Optional["ReprFileLocation"] + sections: List[Tuple[str, str, str]] = dataclasses.field( + init=False, default_factory=list + ) + + def addsection(self, name: str, content: str, sep: str = "-") -> None: + self.sections.append((name, content, sep)) + + def toterminal(self, tw: TerminalWriter) -> None: + for name, content, sep in self.sections: + tw.sep(sep, name) + tw.line(content) + + +@dataclasses.dataclass(eq=False) +class ExceptionChainRepr(ExceptionRepr): + chain: Sequence[Tuple["ReprTraceback", Optional["ReprFileLocation"], Optional[str]]] + + def __init__( + self, + chain: Sequence[ + Tuple["ReprTraceback", Optional["ReprFileLocation"], Optional[str]] + ], + ) -> None: + # reprcrash and reprtraceback of the outermost (the newest) exception + # in the chain. + super().__init__( + reprtraceback=chain[-1][0], + reprcrash=chain[-1][1], + ) + self.chain = chain + + def toterminal(self, tw: TerminalWriter) -> None: + for element in self.chain: + element[0].toterminal(tw) + if element[2] is not None: + tw.line("") + tw.line(element[2], yellow=True) + super().toterminal(tw) + + +@dataclasses.dataclass(eq=False) +class ReprExceptionInfo(ExceptionRepr): + reprtraceback: "ReprTraceback" + reprcrash: Optional["ReprFileLocation"] + + def toterminal(self, tw: TerminalWriter) -> None: + self.reprtraceback.toterminal(tw) + super().toterminal(tw) + + +@dataclasses.dataclass(eq=False) +class ReprTraceback(TerminalRepr): + reprentries: Sequence[Union["ReprEntry", "ReprEntryNative"]] + extraline: Optional[str] + style: _TracebackStyle + + entrysep: ClassVar = "_ " + + def toterminal(self, tw: TerminalWriter) -> None: + # The entries might have different styles. + for i, entry in enumerate(self.reprentries): + if entry.style == "long": + tw.line("") + entry.toterminal(tw) + if i < len(self.reprentries) - 1: + next_entry = self.reprentries[i + 1] + if ( + entry.style == "long" + or entry.style == "short" + and next_entry.style == "long" + ): + tw.sep(self.entrysep) + + if self.extraline: + tw.line(self.extraline) + + +class ReprTracebackNative(ReprTraceback): + def __init__(self, tblines: Sequence[str]) -> None: + self.reprentries = [ReprEntryNative(tblines)] + self.extraline = None + self.style = "native" + + +@dataclasses.dataclass(eq=False) +class ReprEntryNative(TerminalRepr): + lines: Sequence[str] + + style: ClassVar[_TracebackStyle] = "native" + + def toterminal(self, tw: TerminalWriter) -> None: + tw.write("".join(self.lines)) + + +@dataclasses.dataclass(eq=False) +class ReprEntry(TerminalRepr): + lines: Sequence[str] + reprfuncargs: Optional["ReprFuncArgs"] + reprlocals: Optional["ReprLocals"] + reprfileloc: Optional["ReprFileLocation"] + style: _TracebackStyle + + def _write_entry_lines(self, tw: TerminalWriter) -> None: + """Write the source code portions of a list of traceback entries with syntax highlighting. + + Usually entries are lines like these: + + " x = 1" + "> assert x == 2" + "E assert 1 == 2" + + This function takes care of rendering the "source" portions of it (the lines without + the "E" prefix) using syntax highlighting, taking care to not highlighting the ">" + character, as doing so might break line continuations. + """ + if not self.lines: + return + + # separate indents and source lines that are not failures: we want to + # highlight the code but not the indentation, which may contain markers + # such as "> assert 0" + fail_marker = f"{FormattedExcinfo.fail_marker} " + indent_size = len(fail_marker) + indents: List[str] = [] + source_lines: List[str] = [] + failure_lines: List[str] = [] + for index, line in enumerate(self.lines): + is_failure_line = line.startswith(fail_marker) + if is_failure_line: + # from this point on all lines are considered part of the failure + failure_lines.extend(self.lines[index:]) + break + else: + if self.style == "value": + source_lines.append(line) + else: + indents.append(line[:indent_size]) + source_lines.append(line[indent_size:]) + + tw._write_source(source_lines, indents) + + # failure lines are always completely red and bold + for line in failure_lines: + tw.line(line, bold=True, red=True) + + def toterminal(self, tw: TerminalWriter) -> None: + if self.style == "short": + if self.reprfileloc: + self.reprfileloc.toterminal(tw) + self._write_entry_lines(tw) + if self.reprlocals: + self.reprlocals.toterminal(tw, indent=" " * 8) + return + + if self.reprfuncargs: + self.reprfuncargs.toterminal(tw) + + self._write_entry_lines(tw) + + if self.reprlocals: + tw.line("") + self.reprlocals.toterminal(tw) + if self.reprfileloc: + if self.lines: + tw.line("") + self.reprfileloc.toterminal(tw) + + def __str__(self) -> str: + return "{}\n{}\n{}".format( + "\n".join(self.lines), self.reprlocals, self.reprfileloc + ) + + +@dataclasses.dataclass(eq=False) +class ReprFileLocation(TerminalRepr): + path: str + lineno: int + message: str + + def __post_init__(self) -> None: + self.path = str(self.path) + + def toterminal(self, tw: TerminalWriter) -> None: + # Filename and lineno output for each entry, using an output format + # that most editors understand. + msg = self.message + i = msg.find("\n") + if i != -1: + msg = msg[:i] + tw.write(self.path, bold=True, red=True) + tw.line(f":{self.lineno}: {msg}") + + +@dataclasses.dataclass(eq=False) +class ReprLocals(TerminalRepr): + lines: Sequence[str] + + def toterminal(self, tw: TerminalWriter, indent="") -> None: + for line in self.lines: + tw.line(indent + line) + + +@dataclasses.dataclass(eq=False) +class ReprFuncArgs(TerminalRepr): + args: Sequence[Tuple[str, object]] + + def toterminal(self, tw: TerminalWriter) -> None: + if self.args: + linesofar = "" + for name, value in self.args: + ns = f"{name} = {value}" + if len(ns) + len(linesofar) + 2 > tw.fullwidth: + if linesofar: + tw.line(linesofar) + linesofar = ns + else: + if linesofar: + linesofar += ", " + ns + else: + linesofar = ns + if linesofar: + tw.line(linesofar) + tw.line("") + + +def getfslineno(obj: object) -> Tuple[Union[str, Path], int]: + """Return source location (path, lineno) for the given object. + + If the source cannot be determined return ("", -1). + + The line number is 0-based. + """ + # xxx let decorators etc specify a sane ordering + # NOTE: this used to be done in _pytest.compat.getfslineno, initially added + # in 6ec13a2b9. It ("place_as") appears to be something very custom. + obj = get_real_func(obj) + if hasattr(obj, "place_as"): + obj = obj.place_as + + try: + code = Code.from_function(obj) + except TypeError: + try: + fn = inspect.getsourcefile(obj) or inspect.getfile(obj) # type: ignore[arg-type] + except TypeError: + return "", -1 + + fspath = fn and absolutepath(fn) or "" + lineno = -1 + if fspath: + try: + _, lineno = findsource(obj) + except OSError: + pass + return fspath, lineno + + return code.path, code.firstlineno + + +# Relative paths that we use to filter traceback entries from appearing to the user; +# see filter_traceback. +# note: if we need to add more paths than what we have now we should probably use a list +# for better maintenance. + +_PLUGGY_DIR = Path(pluggy.__file__.rstrip("oc")) +# pluggy is either a package or a single module depending on the version +if _PLUGGY_DIR.name == "__init__.py": + _PLUGGY_DIR = _PLUGGY_DIR.parent +_PYTEST_DIR = Path(_pytest.__file__).parent + + +def filter_traceback(entry: TracebackEntry) -> bool: + """Return True if a TracebackEntry instance should be included in tracebacks. + + We hide traceback entries of: + + * dynamically generated code (no code to show up for it); + * internal traceback from pytest or its internal libraries, py and pluggy. + """ + # entry.path might sometimes return a str object when the entry + # points to dynamically generated code. + # See https://bitbucket.org/pytest-dev/py/issues/71. + raw_filename = entry.frame.code.raw.co_filename + is_generated = "<" in raw_filename and ">" in raw_filename + if is_generated: + return False + + # entry.path might point to a non-existing file, in which case it will + # also return a str object. See #1133. + p = Path(entry.path) + + parents = p.parents + if _PLUGGY_DIR in parents: + return False + if _PYTEST_DIR in parents: + return False + + return True diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_code/source.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_code/source.py new file mode 100644 index 000000000..7fa577e03 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_code/source.py @@ -0,0 +1,217 @@ +# mypy: allow-untyped-defs +import ast +from bisect import bisect_right +import inspect +import textwrap +import tokenize +import types +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Optional +from typing import overload +from typing import Tuple +from typing import Union +import warnings + + +class Source: + """An immutable object holding a source code fragment. + + When using Source(...), the source lines are deindented. + """ + + def __init__(self, obj: object = None) -> None: + if not obj: + self.lines: List[str] = [] + elif isinstance(obj, Source): + self.lines = obj.lines + elif isinstance(obj, (tuple, list)): + self.lines = deindent(x.rstrip("\n") for x in obj) + elif isinstance(obj, str): + self.lines = deindent(obj.split("\n")) + else: + try: + rawcode = getrawcode(obj) + src = inspect.getsource(rawcode) + except TypeError: + src = inspect.getsource(obj) # type: ignore[arg-type] + self.lines = deindent(src.split("\n")) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Source): + return NotImplemented + return self.lines == other.lines + + # Ignore type because of https://github.com/python/mypy/issues/4266. + __hash__ = None # type: ignore + + @overload + def __getitem__(self, key: int) -> str: ... + + @overload + def __getitem__(self, key: slice) -> "Source": ... + + def __getitem__(self, key: Union[int, slice]) -> Union[str, "Source"]: + if isinstance(key, int): + return self.lines[key] + else: + if key.step not in (None, 1): + raise IndexError("cannot slice a Source with a step") + newsource = Source() + newsource.lines = self.lines[key.start : key.stop] + return newsource + + def __iter__(self) -> Iterator[str]: + return iter(self.lines) + + def __len__(self) -> int: + return len(self.lines) + + def strip(self) -> "Source": + """Return new Source object with trailing and leading blank lines removed.""" + start, end = 0, len(self) + while start < end and not self.lines[start].strip(): + start += 1 + while end > start and not self.lines[end - 1].strip(): + end -= 1 + source = Source() + source.lines[:] = self.lines[start:end] + return source + + def indent(self, indent: str = " " * 4) -> "Source": + """Return a copy of the source object with all lines indented by the + given indent-string.""" + newsource = Source() + newsource.lines = [(indent + line) for line in self.lines] + return newsource + + def getstatement(self, lineno: int) -> "Source": + """Return Source statement which contains the given linenumber + (counted from 0).""" + start, end = self.getstatementrange(lineno) + return self[start:end] + + def getstatementrange(self, lineno: int) -> Tuple[int, int]: + """Return (start, end) tuple which spans the minimal statement region + which containing the given lineno.""" + if not (0 <= lineno < len(self)): + raise IndexError("lineno out of range") + ast, start, end = getstatementrange_ast(lineno, self) + return start, end + + def deindent(self) -> "Source": + """Return a new Source object deindented.""" + newsource = Source() + newsource.lines[:] = deindent(self.lines) + return newsource + + def __str__(self) -> str: + return "\n".join(self.lines) + + +# +# helper functions +# + + +def findsource(obj) -> Tuple[Optional[Source], int]: + try: + sourcelines, lineno = inspect.findsource(obj) + except Exception: + return None, -1 + source = Source() + source.lines = [line.rstrip() for line in sourcelines] + return source, lineno + + +def getrawcode(obj: object, trycall: bool = True) -> types.CodeType: + """Return code object for given function.""" + try: + return obj.__code__ # type: ignore[attr-defined,no-any-return] + except AttributeError: + pass + if trycall: + call = getattr(obj, "__call__", None) + if call and not isinstance(obj, type): + return getrawcode(call, trycall=False) + raise TypeError(f"could not get code object for {obj!r}") + + +def deindent(lines: Iterable[str]) -> List[str]: + return textwrap.dedent("\n".join(lines)).splitlines() + + +def get_statement_startend2(lineno: int, node: ast.AST) -> Tuple[int, Optional[int]]: + # Flatten all statements and except handlers into one lineno-list. + # AST's line numbers start indexing at 1. + values: List[int] = [] + for x in ast.walk(node): + if isinstance(x, (ast.stmt, ast.ExceptHandler)): + # The lineno points to the class/def, so need to include the decorators. + if isinstance(x, (ast.ClassDef, ast.FunctionDef, ast.AsyncFunctionDef)): + for d in x.decorator_list: + values.append(d.lineno - 1) + values.append(x.lineno - 1) + for name in ("finalbody", "orelse"): + val: Optional[List[ast.stmt]] = getattr(x, name, None) + if val: + # Treat the finally/orelse part as its own statement. + values.append(val[0].lineno - 1 - 1) + values.sort() + insert_index = bisect_right(values, lineno) + start = values[insert_index - 1] + if insert_index >= len(values): + end = None + else: + end = values[insert_index] + return start, end + + +def getstatementrange_ast( + lineno: int, + source: Source, + assertion: bool = False, + astnode: Optional[ast.AST] = None, +) -> Tuple[ast.AST, int, int]: + if astnode is None: + content = str(source) + # See #4260: + # Don't produce duplicate warnings when compiling source to find AST. + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + astnode = ast.parse(content, "source", "exec") + + start, end = get_statement_startend2(lineno, astnode) + # We need to correct the end: + # - ast-parsing strips comments + # - there might be empty lines + # - we might have lesser indented code blocks at the end + if end is None: + end = len(source.lines) + + if end > start + 1: + # Make sure we don't span differently indented code blocks + # by using the BlockFinder helper used which inspect.getsource() uses itself. + block_finder = inspect.BlockFinder() + # If we start with an indented line, put blockfinder to "started" mode. + block_finder.started = ( + bool(source.lines[start]) and source.lines[start][0].isspace() + ) + it = ((x + "\n") for x in source.lines[start:end]) + try: + for tok in tokenize.generate_tokens(lambda: next(it)): + block_finder.tokeneater(*tok) + except (inspect.EndOfBlock, IndentationError): + end = block_finder.last + start + except Exception: + pass + + # The end might still point to a comment or empty line, correct it. + while end: + line = source.lines[end - 1].lstrip() + if line.startswith("#") or not line: + end -= 1 + else: + break + return astnode, start, end diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_io/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_io/__init__.py new file mode 100644 index 000000000..db001e918 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_io/__init__.py @@ -0,0 +1,8 @@ +from .terminalwriter import get_terminal_width +from .terminalwriter import TerminalWriter + + +__all__ = [ + "TerminalWriter", + "get_terminal_width", +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_io/pprint.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_io/pprint.py new file mode 100644 index 000000000..75e9a7123 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_io/pprint.py @@ -0,0 +1,676 @@ +# mypy: allow-untyped-defs +# This module was imported from the cpython standard library +# (https://github.com/python/cpython/) at commit +# c5140945c723ae6c4b7ee81ff720ac8ea4b52cfd (python3.12). +# +# +# Original Author: Fred L. Drake, Jr. +# fdrake@acm.org +# +# This is a simple little module I wrote to make life easier. I didn't +# see anything quite like it in the library, though I may have overlooked +# something. I wrote this when I was trying to read some heavily nested +# tuples with fairly non-descriptive content. This is modeled very much +# after Lisp/Scheme - style pretty-printing of lists. If you find it +# useful, thank small children who sleep at night. +import collections as _collections +import dataclasses as _dataclasses +from io import StringIO as _StringIO +import re +import types as _types +from typing import Any +from typing import Callable +from typing import Dict +from typing import IO +from typing import Iterator +from typing import List +from typing import Optional +from typing import Set +from typing import Tuple + + +class _safe_key: + """Helper function for key functions when sorting unorderable objects. + + The wrapped-object will fallback to a Py2.x style comparison for + unorderable types (sorting first comparing the type name and then by + the obj ids). Does not work recursively, so dict.items() must have + _safe_key applied to both the key and the value. + + """ + + __slots__ = ["obj"] + + def __init__(self, obj): + self.obj = obj + + def __lt__(self, other): + try: + return self.obj < other.obj + except TypeError: + return (str(type(self.obj)), id(self.obj)) < ( + str(type(other.obj)), + id(other.obj), + ) + + +def _safe_tuple(t): + """Helper function for comparing 2-tuples""" + return _safe_key(t[0]), _safe_key(t[1]) + + +class PrettyPrinter: + def __init__( + self, + indent: int = 4, + width: int = 80, + depth: Optional[int] = None, + ) -> None: + """Handle pretty printing operations onto a stream using a set of + configured parameters. + + indent + Number of spaces to indent for each level of nesting. + + width + Attempted maximum number of columns in the output. + + depth + The maximum depth to print out nested structures. + + """ + if indent < 0: + raise ValueError("indent must be >= 0") + if depth is not None and depth <= 0: + raise ValueError("depth must be > 0") + if not width: + raise ValueError("width must be != 0") + self._depth = depth + self._indent_per_level = indent + self._width = width + + def pformat(self, object: Any) -> str: + sio = _StringIO() + self._format(object, sio, 0, 0, set(), 0) + return sio.getvalue() + + def _format( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: Set[int], + level: int, + ) -> None: + objid = id(object) + if objid in context: + stream.write(_recursion(object)) + return + + p = self._dispatch.get(type(object).__repr__, None) + if p is not None: + context.add(objid) + p(self, object, stream, indent, allowance, context, level + 1) + context.remove(objid) + elif ( + _dataclasses.is_dataclass(object) + and not isinstance(object, type) + and object.__dataclass_params__.repr + and + # Check dataclass has generated repr method. + hasattr(object.__repr__, "__wrapped__") + and "__create_fn__" in object.__repr__.__wrapped__.__qualname__ + ): + context.add(objid) + self._pprint_dataclass( + object, stream, indent, allowance, context, level + 1 + ) + context.remove(objid) + else: + stream.write(self._repr(object, context, level)) + + def _pprint_dataclass( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: Set[int], + level: int, + ) -> None: + cls_name = object.__class__.__name__ + items = [ + (f.name, getattr(object, f.name)) + for f in _dataclasses.fields(object) + if f.repr + ] + stream.write(cls_name + "(") + self._format_namespace_items(items, stream, indent, allowance, context, level) + stream.write(")") + + _dispatch: Dict[ + Callable[..., str], + Callable[["PrettyPrinter", Any, IO[str], int, int, Set[int], int], None], + ] = {} + + def _pprint_dict( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: Set[int], + level: int, + ) -> None: + write = stream.write + write("{") + items = sorted(object.items(), key=_safe_tuple) + self._format_dict_items(items, stream, indent, allowance, context, level) + write("}") + + _dispatch[dict.__repr__] = _pprint_dict + + def _pprint_ordered_dict( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: Set[int], + level: int, + ) -> None: + if not len(object): + stream.write(repr(object)) + return + cls = object.__class__ + stream.write(cls.__name__ + "(") + self._pprint_dict(object, stream, indent, allowance, context, level) + stream.write(")") + + _dispatch[_collections.OrderedDict.__repr__] = _pprint_ordered_dict + + def _pprint_list( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: Set[int], + level: int, + ) -> None: + stream.write("[") + self._format_items(object, stream, indent, allowance, context, level) + stream.write("]") + + _dispatch[list.__repr__] = _pprint_list + + def _pprint_tuple( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: Set[int], + level: int, + ) -> None: + stream.write("(") + self._format_items(object, stream, indent, allowance, context, level) + stream.write(")") + + _dispatch[tuple.__repr__] = _pprint_tuple + + def _pprint_set( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: Set[int], + level: int, + ) -> None: + if not len(object): + stream.write(repr(object)) + return + typ = object.__class__ + if typ is set: + stream.write("{") + endchar = "}" + else: + stream.write(typ.__name__ + "({") + endchar = "})" + object = sorted(object, key=_safe_key) + self._format_items(object, stream, indent, allowance, context, level) + stream.write(endchar) + + _dispatch[set.__repr__] = _pprint_set + _dispatch[frozenset.__repr__] = _pprint_set + + def _pprint_str( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: Set[int], + level: int, + ) -> None: + write = stream.write + if not len(object): + write(repr(object)) + return + chunks = [] + lines = object.splitlines(True) + if level == 1: + indent += 1 + allowance += 1 + max_width1 = max_width = self._width - indent + for i, line in enumerate(lines): + rep = repr(line) + if i == len(lines) - 1: + max_width1 -= allowance + if len(rep) <= max_width1: + chunks.append(rep) + else: + # A list of alternating (non-space, space) strings + parts = re.findall(r"\S*\s*", line) + assert parts + assert not parts[-1] + parts.pop() # drop empty last part + max_width2 = max_width + current = "" + for j, part in enumerate(parts): + candidate = current + part + if j == len(parts) - 1 and i == len(lines) - 1: + max_width2 -= allowance + if len(repr(candidate)) > max_width2: + if current: + chunks.append(repr(current)) + current = part + else: + current = candidate + if current: + chunks.append(repr(current)) + if len(chunks) == 1: + write(rep) + return + if level == 1: + write("(") + for i, rep in enumerate(chunks): + if i > 0: + write("\n" + " " * indent) + write(rep) + if level == 1: + write(")") + + _dispatch[str.__repr__] = _pprint_str + + def _pprint_bytes( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: Set[int], + level: int, + ) -> None: + write = stream.write + if len(object) <= 4: + write(repr(object)) + return + parens = level == 1 + if parens: + indent += 1 + allowance += 1 + write("(") + delim = "" + for rep in _wrap_bytes_repr(object, self._width - indent, allowance): + write(delim) + write(rep) + if not delim: + delim = "\n" + " " * indent + if parens: + write(")") + + _dispatch[bytes.__repr__] = _pprint_bytes + + def _pprint_bytearray( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: Set[int], + level: int, + ) -> None: + write = stream.write + write("bytearray(") + self._pprint_bytes( + bytes(object), stream, indent + 10, allowance + 1, context, level + 1 + ) + write(")") + + _dispatch[bytearray.__repr__] = _pprint_bytearray + + def _pprint_mappingproxy( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: Set[int], + level: int, + ) -> None: + stream.write("mappingproxy(") + self._format(object.copy(), stream, indent, allowance, context, level) + stream.write(")") + + _dispatch[_types.MappingProxyType.__repr__] = _pprint_mappingproxy + + def _pprint_simplenamespace( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: Set[int], + level: int, + ) -> None: + if type(object) is _types.SimpleNamespace: + # The SimpleNamespace repr is "namespace" instead of the class + # name, so we do the same here. For subclasses; use the class name. + cls_name = "namespace" + else: + cls_name = object.__class__.__name__ + items = object.__dict__.items() + stream.write(cls_name + "(") + self._format_namespace_items(items, stream, indent, allowance, context, level) + stream.write(")") + + _dispatch[_types.SimpleNamespace.__repr__] = _pprint_simplenamespace + + def _format_dict_items( + self, + items: List[Tuple[Any, Any]], + stream: IO[str], + indent: int, + allowance: int, + context: Set[int], + level: int, + ) -> None: + if not items: + return + + write = stream.write + item_indent = indent + self._indent_per_level + delimnl = "\n" + " " * item_indent + for key, ent in items: + write(delimnl) + write(self._repr(key, context, level)) + write(": ") + self._format(ent, stream, item_indent, 1, context, level) + write(",") + + write("\n" + " " * indent) + + def _format_namespace_items( + self, + items: List[Tuple[Any, Any]], + stream: IO[str], + indent: int, + allowance: int, + context: Set[int], + level: int, + ) -> None: + if not items: + return + + write = stream.write + item_indent = indent + self._indent_per_level + delimnl = "\n" + " " * item_indent + for key, ent in items: + write(delimnl) + write(key) + write("=") + if id(ent) in context: + # Special-case representation of recursion to match standard + # recursive dataclass repr. + write("...") + else: + self._format( + ent, + stream, + item_indent + len(key) + 1, + 1, + context, + level, + ) + + write(",") + + write("\n" + " " * indent) + + def _format_items( + self, + items: List[Any], + stream: IO[str], + indent: int, + allowance: int, + context: Set[int], + level: int, + ) -> None: + if not items: + return + + write = stream.write + item_indent = indent + self._indent_per_level + delimnl = "\n" + " " * item_indent + + for item in items: + write(delimnl) + self._format(item, stream, item_indent, 1, context, level) + write(",") + + write("\n" + " " * indent) + + def _repr(self, object: Any, context: Set[int], level: int) -> str: + return self._safe_repr(object, context.copy(), self._depth, level) + + def _pprint_default_dict( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: Set[int], + level: int, + ) -> None: + rdf = self._repr(object.default_factory, context, level) + stream.write(f"{object.__class__.__name__}({rdf}, ") + self._pprint_dict(object, stream, indent, allowance, context, level) + stream.write(")") + + _dispatch[_collections.defaultdict.__repr__] = _pprint_default_dict + + def _pprint_counter( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: Set[int], + level: int, + ) -> None: + stream.write(object.__class__.__name__ + "(") + + if object: + stream.write("{") + items = object.most_common() + self._format_dict_items(items, stream, indent, allowance, context, level) + stream.write("}") + + stream.write(")") + + _dispatch[_collections.Counter.__repr__] = _pprint_counter + + def _pprint_chain_map( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: Set[int], + level: int, + ) -> None: + if not len(object.maps) or (len(object.maps) == 1 and not len(object.maps[0])): + stream.write(repr(object)) + return + + stream.write(object.__class__.__name__ + "(") + self._format_items(object.maps, stream, indent, allowance, context, level) + stream.write(")") + + _dispatch[_collections.ChainMap.__repr__] = _pprint_chain_map + + def _pprint_deque( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: Set[int], + level: int, + ) -> None: + stream.write(object.__class__.__name__ + "(") + if object.maxlen is not None: + stream.write("maxlen=%d, " % object.maxlen) + stream.write("[") + + self._format_items(object, stream, indent, allowance + 1, context, level) + stream.write("])") + + _dispatch[_collections.deque.__repr__] = _pprint_deque + + def _pprint_user_dict( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: Set[int], + level: int, + ) -> None: + self._format(object.data, stream, indent, allowance, context, level - 1) + + _dispatch[_collections.UserDict.__repr__] = _pprint_user_dict + + def _pprint_user_list( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: Set[int], + level: int, + ) -> None: + self._format(object.data, stream, indent, allowance, context, level - 1) + + _dispatch[_collections.UserList.__repr__] = _pprint_user_list + + def _pprint_user_string( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: Set[int], + level: int, + ) -> None: + self._format(object.data, stream, indent, allowance, context, level - 1) + + _dispatch[_collections.UserString.__repr__] = _pprint_user_string + + def _safe_repr( + self, object: Any, context: Set[int], maxlevels: Optional[int], level: int + ) -> str: + typ = type(object) + if typ in _builtin_scalars: + return repr(object) + + r = getattr(typ, "__repr__", None) + + if issubclass(typ, dict) and r is dict.__repr__: + if not object: + return "{}" + objid = id(object) + if maxlevels and level >= maxlevels: + return "{...}" + if objid in context: + return _recursion(object) + context.add(objid) + components: List[str] = [] + append = components.append + level += 1 + for k, v in sorted(object.items(), key=_safe_tuple): + krepr = self._safe_repr(k, context, maxlevels, level) + vrepr = self._safe_repr(v, context, maxlevels, level) + append(f"{krepr}: {vrepr}") + context.remove(objid) + return "{%s}" % ", ".join(components) + + if (issubclass(typ, list) and r is list.__repr__) or ( + issubclass(typ, tuple) and r is tuple.__repr__ + ): + if issubclass(typ, list): + if not object: + return "[]" + format = "[%s]" + elif len(object) == 1: + format = "(%s,)" + else: + if not object: + return "()" + format = "(%s)" + objid = id(object) + if maxlevels and level >= maxlevels: + return format % "..." + if objid in context: + return _recursion(object) + context.add(objid) + components = [] + append = components.append + level += 1 + for o in object: + orepr = self._safe_repr(o, context, maxlevels, level) + append(orepr) + context.remove(objid) + return format % ", ".join(components) + + return repr(object) + + +_builtin_scalars = frozenset( + {str, bytes, bytearray, float, complex, bool, type(None), int} +) + + +def _recursion(object: Any) -> str: + return f"" + + +def _wrap_bytes_repr(object: Any, width: int, allowance: int) -> Iterator[str]: + current = b"" + last = len(object) // 4 * 4 + for i in range(0, len(object), 4): + part = object[i : i + 4] + candidate = current + part + if i == last: + width -= allowance + if len(repr(candidate)) > width: + if current: + yield repr(current) + current = part + else: + current = candidate + if current: + yield repr(current) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_io/saferepr.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_io/saferepr.py new file mode 100644 index 000000000..9f33fced6 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_io/saferepr.py @@ -0,0 +1,130 @@ +import pprint +import reprlib +from typing import Optional + + +def _try_repr_or_str(obj: object) -> str: + try: + return repr(obj) + except (KeyboardInterrupt, SystemExit): + raise + except BaseException: + return f'{type(obj).__name__}("{obj}")' + + +def _format_repr_exception(exc: BaseException, obj: object) -> str: + try: + exc_info = _try_repr_or_str(exc) + except (KeyboardInterrupt, SystemExit): + raise + except BaseException as exc: + exc_info = f"unpresentable exception ({_try_repr_or_str(exc)})" + return ( + f"<[{exc_info} raised in repr()] {type(obj).__name__} object at 0x{id(obj):x}>" + ) + + +def _ellipsize(s: str, maxsize: int) -> str: + if len(s) > maxsize: + i = max(0, (maxsize - 3) // 2) + j = max(0, maxsize - 3 - i) + return s[:i] + "..." + s[len(s) - j :] + return s + + +class SafeRepr(reprlib.Repr): + """ + repr.Repr that limits the resulting size of repr() and includes + information on exceptions raised during the call. + """ + + def __init__(self, maxsize: Optional[int], use_ascii: bool = False) -> None: + """ + :param maxsize: + If not None, will truncate the resulting repr to that specific size, using ellipsis + somewhere in the middle to hide the extra text. + If None, will not impose any size limits on the returning repr. + """ + super().__init__() + # ``maxstring`` is used by the superclass, and needs to be an int; using a + # very large number in case maxsize is None, meaning we want to disable + # truncation. + self.maxstring = maxsize if maxsize is not None else 1_000_000_000 + self.maxsize = maxsize + self.use_ascii = use_ascii + + def repr(self, x: object) -> str: + try: + if self.use_ascii: + s = ascii(x) + else: + s = super().repr(x) + + except (KeyboardInterrupt, SystemExit): + raise + except BaseException as exc: + s = _format_repr_exception(exc, x) + if self.maxsize is not None: + s = _ellipsize(s, self.maxsize) + return s + + def repr_instance(self, x: object, level: int) -> str: + try: + s = repr(x) + except (KeyboardInterrupt, SystemExit): + raise + except BaseException as exc: + s = _format_repr_exception(exc, x) + if self.maxsize is not None: + s = _ellipsize(s, self.maxsize) + return s + + +def safeformat(obj: object) -> str: + """Return a pretty printed string for the given object. + + Failing __repr__ functions of user instances will be represented + with a short exception info. + """ + try: + return pprint.pformat(obj) + except Exception as exc: + return _format_repr_exception(exc, obj) + + +# Maximum size of overall repr of objects to display during assertion errors. +DEFAULT_REPR_MAX_SIZE = 240 + + +def saferepr( + obj: object, maxsize: Optional[int] = DEFAULT_REPR_MAX_SIZE, use_ascii: bool = False +) -> str: + """Return a size-limited safe repr-string for the given object. + + Failing __repr__ functions of user instances will be represented + with a short exception info and 'saferepr' generally takes + care to never raise exceptions itself. + + This function is a wrapper around the Repr/reprlib functionality of the + stdlib. + """ + return SafeRepr(maxsize, use_ascii).repr(obj) + + +def saferepr_unlimited(obj: object, use_ascii: bool = True) -> str: + """Return an unlimited-size safe repr-string for the given object. + + As with saferepr, failing __repr__ functions of user instances + will be represented with a short exception info. + + This function is a wrapper around simple repr. + + Note: a cleaner solution would be to alter ``saferepr``this way + when maxsize=None, but that might affect some other code. + """ + try: + if use_ascii: + return ascii(obj) + return repr(obj) + except Exception as exc: + return _format_repr_exception(exc, obj) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_io/terminalwriter.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_io/terminalwriter.py new file mode 100644 index 000000000..deb6ecc3c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_io/terminalwriter.py @@ -0,0 +1,251 @@ +"""Helper functions for writing to terminals and files.""" + +import os +import shutil +import sys +from typing import final +from typing import Literal +from typing import Optional +from typing import Sequence +from typing import TextIO + +from ..compat import assert_never +from .wcwidth import wcswidth + + +# This code was initially copied from py 1.8.1, file _io/terminalwriter.py. + + +def get_terminal_width() -> int: + width, _ = shutil.get_terminal_size(fallback=(80, 24)) + + # The Windows get_terminal_size may be bogus, let's sanify a bit. + if width < 40: + width = 80 + + return width + + +def should_do_markup(file: TextIO) -> bool: + if os.environ.get("PY_COLORS") == "1": + return True + if os.environ.get("PY_COLORS") == "0": + return False + if os.environ.get("NO_COLOR"): + return False + if os.environ.get("FORCE_COLOR"): + return True + return ( + hasattr(file, "isatty") and file.isatty() and os.environ.get("TERM") != "dumb" + ) + + +@final +class TerminalWriter: + _esctable = dict( + black=30, + red=31, + green=32, + yellow=33, + blue=34, + purple=35, + cyan=36, + white=37, + Black=40, + Red=41, + Green=42, + Yellow=43, + Blue=44, + Purple=45, + Cyan=46, + White=47, + bold=1, + light=2, + blink=5, + invert=7, + ) + + def __init__(self, file: Optional[TextIO] = None) -> None: + if file is None: + file = sys.stdout + if hasattr(file, "isatty") and file.isatty() and sys.platform == "win32": + try: + import colorama + except ImportError: + pass + else: + file = colorama.AnsiToWin32(file).stream + assert file is not None + self._file = file + self.hasmarkup = should_do_markup(file) + self._current_line = "" + self._terminal_width: Optional[int] = None + self.code_highlight = True + + @property + def fullwidth(self) -> int: + if self._terminal_width is not None: + return self._terminal_width + return get_terminal_width() + + @fullwidth.setter + def fullwidth(self, value: int) -> None: + self._terminal_width = value + + @property + def width_of_current_line(self) -> int: + """Return an estimate of the width so far in the current line.""" + return wcswidth(self._current_line) + + def markup(self, text: str, **markup: bool) -> str: + for name in markup: + if name not in self._esctable: + raise ValueError(f"unknown markup: {name!r}") + if self.hasmarkup: + esc = [self._esctable[name] for name, on in markup.items() if on] + if esc: + text = "".join("\x1b[%sm" % cod for cod in esc) + text + "\x1b[0m" + return text + + def sep( + self, + sepchar: str, + title: Optional[str] = None, + fullwidth: Optional[int] = None, + **markup: bool, + ) -> None: + if fullwidth is None: + fullwidth = self.fullwidth + # The goal is to have the line be as long as possible + # under the condition that len(line) <= fullwidth. + if sys.platform == "win32": + # If we print in the last column on windows we are on a + # new line but there is no way to verify/neutralize this + # (we may not know the exact line width). + # So let's be defensive to avoid empty lines in the output. + fullwidth -= 1 + if title is not None: + # we want 2 + 2*len(fill) + len(title) <= fullwidth + # i.e. 2 + 2*len(sepchar)*N + len(title) <= fullwidth + # 2*len(sepchar)*N <= fullwidth - len(title) - 2 + # N <= (fullwidth - len(title) - 2) // (2*len(sepchar)) + N = max((fullwidth - len(title) - 2) // (2 * len(sepchar)), 1) + fill = sepchar * N + line = f"{fill} {title} {fill}" + else: + # we want len(sepchar)*N <= fullwidth + # i.e. N <= fullwidth // len(sepchar) + line = sepchar * (fullwidth // len(sepchar)) + # In some situations there is room for an extra sepchar at the right, + # in particular if we consider that with a sepchar like "_ " the + # trailing space is not important at the end of the line. + if len(line) + len(sepchar.rstrip()) <= fullwidth: + line += sepchar.rstrip() + + self.line(line, **markup) + + def write(self, msg: str, *, flush: bool = False, **markup: bool) -> None: + if msg: + current_line = msg.rsplit("\n", 1)[-1] + if "\n" in msg: + self._current_line = current_line + else: + self._current_line += current_line + + msg = self.markup(msg, **markup) + + try: + self._file.write(msg) + except UnicodeEncodeError: + # Some environments don't support printing general Unicode + # strings, due to misconfiguration or otherwise; in that case, + # print the string escaped to ASCII. + # When the Unicode situation improves we should consider + # letting the error propagate instead of masking it (see #7475 + # for one brief attempt). + msg = msg.encode("unicode-escape").decode("ascii") + self._file.write(msg) + + if flush: + self.flush() + + def line(self, s: str = "", **markup: bool) -> None: + self.write(s, **markup) + self.write("\n") + + def flush(self) -> None: + self._file.flush() + + def _write_source(self, lines: Sequence[str], indents: Sequence[str] = ()) -> None: + """Write lines of source code possibly highlighted. + + Keeping this private for now because the API is clunky. We should discuss how + to evolve the terminal writer so we can have more precise color support, for example + being able to write part of a line in one color and the rest in another, and so on. + """ + if indents and len(indents) != len(lines): + raise ValueError( + f"indents size ({len(indents)}) should have same size as lines ({len(lines)})" + ) + if not indents: + indents = [""] * len(lines) + source = "\n".join(lines) + new_lines = self._highlight(source).splitlines() + for indent, new_line in zip(indents, new_lines): + self.line(indent + new_line) + + def _highlight( + self, source: str, lexer: Literal["diff", "python"] = "python" + ) -> str: + """Highlight the given source if we have markup support.""" + from _pytest.config.exceptions import UsageError + + if not source or not self.hasmarkup or not self.code_highlight: + return source + + try: + from pygments.formatters.terminal import TerminalFormatter + + if lexer == "python": + from pygments.lexers.python import PythonLexer as Lexer + elif lexer == "diff": + from pygments.lexers.diff import DiffLexer as Lexer + else: + assert_never(lexer) + from pygments import highlight + import pygments.util + except ImportError: + return source + else: + try: + highlighted: str = highlight( + source, + Lexer(), + TerminalFormatter( + bg=os.getenv("PYTEST_THEME_MODE", "dark"), + style=os.getenv("PYTEST_THEME"), + ), + ) + # pygments terminal formatter may add a newline when there wasn't one. + # We don't want this, remove. + if highlighted[-1] == "\n" and source[-1] != "\n": + highlighted = highlighted[:-1] + + # Some lexers will not set the initial color explicitly + # which may lead to the previous color being propagated to the + # start of the expression, so reset first. + return "\x1b[0m" + highlighted + except pygments.util.ClassNotFound as e: + raise UsageError( + "PYTEST_THEME environment variable had an invalid value: '{}'. " + "Only valid pygment styles are allowed.".format( + os.getenv("PYTEST_THEME") + ) + ) from e + except pygments.util.OptionError as e: + raise UsageError( + "PYTEST_THEME_MODE environment variable had an invalid value: '{}'. " + "The only allowed values are 'dark' and 'light'.".format( + os.getenv("PYTEST_THEME_MODE") + ) + ) from e diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_io/wcwidth.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_io/wcwidth.py new file mode 100644 index 000000000..538031335 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_io/wcwidth.py @@ -0,0 +1,55 @@ +from functools import lru_cache +import unicodedata + + +@lru_cache(100) +def wcwidth(c: str) -> int: + """Determine how many columns are needed to display a character in a terminal. + + Returns -1 if the character is not printable. + Returns 0, 1 or 2 for other characters. + """ + o = ord(c) + + # ASCII fast path. + if 0x20 <= o < 0x07F: + return 1 + + # Some Cf/Zp/Zl characters which should be zero-width. + if ( + o == 0x0000 + or 0x200B <= o <= 0x200F + or 0x2028 <= o <= 0x202E + or 0x2060 <= o <= 0x2063 + ): + return 0 + + category = unicodedata.category(c) + + # Control characters. + if category == "Cc": + return -1 + + # Combining characters with zero width. + if category in ("Me", "Mn"): + return 0 + + # Full/Wide east asian characters. + if unicodedata.east_asian_width(c) in ("F", "W"): + return 2 + + return 1 + + +def wcswidth(s: str) -> int: + """Determine how many columns are needed to display a string in a terminal. + + Returns -1 if the string contains non-printable characters. + """ + width = 0 + for c in unicodedata.normalize("NFC", s): + wc = wcwidth(c) + if wc < 0: + return -1 + width += wc + return width diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_py/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_py/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_py/error.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_py/error.py new file mode 100644 index 000000000..68f1eed7e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_py/error.py @@ -0,0 +1,111 @@ +"""create errno-specific classes for IO or os calls.""" + +from __future__ import annotations + +import errno +import os +import sys +from typing import Callable +from typing import TYPE_CHECKING +from typing import TypeVar + + +if TYPE_CHECKING: + from typing_extensions import ParamSpec + + P = ParamSpec("P") + +R = TypeVar("R") + + +class Error(EnvironmentError): + def __repr__(self) -> str: + return "{}.{} {!r}: {} ".format( + self.__class__.__module__, + self.__class__.__name__, + self.__class__.__doc__, + " ".join(map(str, self.args)), + # repr(self.args) + ) + + def __str__(self) -> str: + s = "[{}]: {}".format( + self.__class__.__doc__, + " ".join(map(str, self.args)), + ) + return s + + +_winerrnomap = { + 2: errno.ENOENT, + 3: errno.ENOENT, + 17: errno.EEXIST, + 18: errno.EXDEV, + 13: errno.EBUSY, # empty cd drive, but ENOMEDIUM seems unavailiable + 22: errno.ENOTDIR, + 20: errno.ENOTDIR, + 267: errno.ENOTDIR, + 5: errno.EACCES, # anything better? +} + + +class ErrorMaker: + """lazily provides Exception classes for each possible POSIX errno + (as defined per the 'errno' module). All such instances + subclass EnvironmentError. + """ + + _errno2class: dict[int, type[Error]] = {} + + def __getattr__(self, name: str) -> type[Error]: + if name[0] == "_": + raise AttributeError(name) + eno = getattr(errno, name) + cls = self._geterrnoclass(eno) + setattr(self, name, cls) + return cls + + def _geterrnoclass(self, eno: int) -> type[Error]: + try: + return self._errno2class[eno] + except KeyError: + clsname = errno.errorcode.get(eno, "UnknownErrno%d" % (eno,)) + errorcls = type( + clsname, + (Error,), + {"__module__": "py.error", "__doc__": os.strerror(eno)}, + ) + self._errno2class[eno] = errorcls + return errorcls + + def checked_call( + self, func: Callable[P, R], *args: P.args, **kwargs: P.kwargs + ) -> R: + """Call a function and raise an errno-exception if applicable.""" + __tracebackhide__ = True + try: + return func(*args, **kwargs) + except Error: + raise + except OSError as value: + if not hasattr(value, "errno"): + raise + errno = value.errno + if sys.platform == "win32": + try: + cls = self._geterrnoclass(_winerrnomap[errno]) + except KeyError: + raise value + else: + # we are not on Windows, or we got a proper OSError + cls = self._geterrnoclass(errno) + + raise cls(f"{func.__name__}{args!r}") + + +_error_maker = ErrorMaker() +checked_call = _error_maker.checked_call + + +def __getattr__(attr: str) -> type[Error]: + return getattr(_error_maker, attr) # type: ignore[no-any-return] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_py/path.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_py/path.py new file mode 100644 index 000000000..7bb3693f9 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_py/path.py @@ -0,0 +1,1477 @@ +# mypy: allow-untyped-defs +"""local path implementation.""" + +from __future__ import annotations + +import atexit +from contextlib import contextmanager +import fnmatch +import importlib.util +import io +import os +from os.path import abspath +from os.path import dirname +from os.path import exists +from os.path import isabs +from os.path import isdir +from os.path import isfile +from os.path import islink +from os.path import normpath +import posixpath +from stat import S_ISDIR +from stat import S_ISLNK +from stat import S_ISREG +import sys +from typing import Any +from typing import Callable +from typing import cast +from typing import Literal +from typing import overload +from typing import TYPE_CHECKING +import uuid +import warnings + +from . import error + + +# Moved from local.py. +iswin32 = sys.platform == "win32" or (getattr(os, "_name", False) == "nt") + + +class Checkers: + _depend_on_existence = "exists", "link", "dir", "file" + + def __init__(self, path): + self.path = path + + def dotfile(self): + return self.path.basename.startswith(".") + + def ext(self, arg): + if not arg.startswith("."): + arg = "." + arg + return self.path.ext == arg + + def basename(self, arg): + return self.path.basename == arg + + def basestarts(self, arg): + return self.path.basename.startswith(arg) + + def relto(self, arg): + return self.path.relto(arg) + + def fnmatch(self, arg): + return self.path.fnmatch(arg) + + def endswith(self, arg): + return str(self.path).endswith(arg) + + def _evaluate(self, kw): + from .._code.source import getrawcode + + for name, value in kw.items(): + invert = False + meth = None + try: + meth = getattr(self, name) + except AttributeError: + if name[:3] == "not": + invert = True + try: + meth = getattr(self, name[3:]) + except AttributeError: + pass + if meth is None: + raise TypeError(f"no {name!r} checker available for {self.path!r}") + try: + if getrawcode(meth).co_argcount > 1: + if (not meth(value)) ^ invert: + return False + else: + if bool(value) ^ bool(meth()) ^ invert: + return False + except (error.ENOENT, error.ENOTDIR, error.EBUSY): + # EBUSY feels not entirely correct, + # but its kind of necessary since ENOMEDIUM + # is not accessible in python + for name in self._depend_on_existence: + if name in kw: + if kw.get(name): + return False + name = "not" + name + if name in kw: + if not kw.get(name): + return False + return True + + _statcache: Stat + + def _stat(self) -> Stat: + try: + return self._statcache + except AttributeError: + try: + self._statcache = self.path.stat() + except error.ELOOP: + self._statcache = self.path.lstat() + return self._statcache + + def dir(self): + return S_ISDIR(self._stat().mode) + + def file(self): + return S_ISREG(self._stat().mode) + + def exists(self): + return self._stat() + + def link(self): + st = self.path.lstat() + return S_ISLNK(st.mode) + + +class NeverRaised(Exception): + pass + + +class Visitor: + def __init__(self, fil, rec, ignore, bf, sort): + if isinstance(fil, str): + fil = FNMatcher(fil) + if isinstance(rec, str): + self.rec: Callable[[LocalPath], bool] = FNMatcher(rec) + elif not hasattr(rec, "__call__") and rec: + self.rec = lambda path: True + else: + self.rec = rec + self.fil = fil + self.ignore = ignore + self.breadthfirst = bf + self.optsort = cast(Callable[[Any], Any], sorted) if sort else (lambda x: x) + + def gen(self, path): + try: + entries = path.listdir() + except self.ignore: + return + rec = self.rec + dirs = self.optsort( + [p for p in entries if p.check(dir=1) and (rec is None or rec(p))] + ) + if not self.breadthfirst: + for subdir in dirs: + for p in self.gen(subdir): + yield p + for p in self.optsort(entries): + if self.fil is None or self.fil(p): + yield p + if self.breadthfirst: + for subdir in dirs: + for p in self.gen(subdir): + yield p + + +class FNMatcher: + def __init__(self, pattern): + self.pattern = pattern + + def __call__(self, path): + pattern = self.pattern + + if ( + pattern.find(path.sep) == -1 + and iswin32 + and pattern.find(posixpath.sep) != -1 + ): + # Running on Windows, the pattern has no Windows path separators, + # and the pattern has one or more Posix path separators. Replace + # the Posix path separators with the Windows path separator. + pattern = pattern.replace(posixpath.sep, path.sep) + + if pattern.find(path.sep) == -1: + name = path.basename + else: + name = str(path) # path.strpath # XXX svn? + if not os.path.isabs(pattern): + pattern = "*" + path.sep + pattern + return fnmatch.fnmatch(name, pattern) + + +def map_as_list(func, iter): + return list(map(func, iter)) + + +class Stat: + if TYPE_CHECKING: + + @property + def size(self) -> int: ... + + @property + def mtime(self) -> float: ... + + def __getattr__(self, name: str) -> Any: + return getattr(self._osstatresult, "st_" + name) + + def __init__(self, path, osstatresult): + self.path = path + self._osstatresult = osstatresult + + @property + def owner(self): + if iswin32: + raise NotImplementedError("XXX win32") + import pwd + + entry = error.checked_call(pwd.getpwuid, self.uid) # type:ignore[attr-defined,unused-ignore] + return entry[0] + + @property + def group(self): + """Return group name of file.""" + if iswin32: + raise NotImplementedError("XXX win32") + import grp + + entry = error.checked_call(grp.getgrgid, self.gid) # type:ignore[attr-defined,unused-ignore] + return entry[0] + + def isdir(self): + return S_ISDIR(self._osstatresult.st_mode) + + def isfile(self): + return S_ISREG(self._osstatresult.st_mode) + + def islink(self): + self.path.lstat() + return S_ISLNK(self._osstatresult.st_mode) + + +def getuserid(user): + import pwd + + if not isinstance(user, int): + user = pwd.getpwnam(user)[2] # type:ignore[attr-defined,unused-ignore] + return user + + +def getgroupid(group): + import grp + + if not isinstance(group, int): + group = grp.getgrnam(group)[2] # type:ignore[attr-defined,unused-ignore] + return group + + +class LocalPath: + """Object oriented interface to os.path and other local filesystem + related information. + """ + + class ImportMismatchError(ImportError): + """raised on pyimport() if there is a mismatch of __file__'s""" + + sep = os.sep + + def __init__(self, path=None, expanduser=False): + """Initialize and return a local Path instance. + + Path can be relative to the current directory. + If path is None it defaults to the current working directory. + If expanduser is True, tilde-expansion is performed. + Note that Path instances always carry an absolute path. + Note also that passing in a local path object will simply return + the exact same path object. Use new() to get a new copy. + """ + if path is None: + self.strpath = error.checked_call(os.getcwd) + else: + try: + path = os.fspath(path) + except TypeError: + raise ValueError( + "can only pass None, Path instances " + "or non-empty strings to LocalPath" + ) + if expanduser: + path = os.path.expanduser(path) + self.strpath = abspath(path) + + if sys.platform != "win32": + + def chown(self, user, group, rec=0): + """Change ownership to the given user and group. + user and group may be specified by a number or + by a name. if rec is True change ownership + recursively. + """ + uid = getuserid(user) + gid = getgroupid(group) + if rec: + for x in self.visit(rec=lambda x: x.check(link=0)): + if x.check(link=0): + error.checked_call(os.chown, str(x), uid, gid) + error.checked_call(os.chown, str(self), uid, gid) + + def readlink(self) -> str: + """Return value of a symbolic link.""" + # https://github.com/python/mypy/issues/12278 + return error.checked_call(os.readlink, self.strpath) # type: ignore[arg-type,return-value,unused-ignore] + + def mklinkto(self, oldname): + """Posix style hard link to another name.""" + error.checked_call(os.link, str(oldname), str(self)) + + def mksymlinkto(self, value, absolute=1): + """Create a symbolic link with the given value (pointing to another name).""" + if absolute: + error.checked_call(os.symlink, str(value), self.strpath) + else: + base = self.common(value) + # with posix local paths '/' is always a common base + relsource = self.__class__(value).relto(base) + reldest = self.relto(base) + n = reldest.count(self.sep) + target = self.sep.join(("..",) * n + (relsource,)) + error.checked_call(os.symlink, target, self.strpath) + + def __div__(self, other): + return self.join(os.fspath(other)) + + __truediv__ = __div__ # py3k + + @property + def basename(self): + """Basename part of path.""" + return self._getbyspec("basename")[0] + + @property + def dirname(self): + """Dirname part of path.""" + return self._getbyspec("dirname")[0] + + @property + def purebasename(self): + """Pure base name of the path.""" + return self._getbyspec("purebasename")[0] + + @property + def ext(self): + """Extension of the path (including the '.').""" + return self._getbyspec("ext")[0] + + def read_binary(self): + """Read and return a bytestring from reading the path.""" + with self.open("rb") as f: + return f.read() + + def read_text(self, encoding): + """Read and return a Unicode string from reading the path.""" + with self.open("r", encoding=encoding) as f: + return f.read() + + def read(self, mode="r"): + """Read and return a bytestring from reading the path.""" + with self.open(mode) as f: + return f.read() + + def readlines(self, cr=1): + """Read and return a list of lines from the path. if cr is False, the + newline will be removed from the end of each line.""" + mode = "r" + + if not cr: + content = self.read(mode) + return content.split("\n") + else: + f = self.open(mode) + try: + return f.readlines() + finally: + f.close() + + def load(self): + """(deprecated) return object unpickled from self.read()""" + f = self.open("rb") + try: + import pickle + + return error.checked_call(pickle.load, f) + finally: + f.close() + + def move(self, target): + """Move this path to target.""" + if target.relto(self): + raise error.EINVAL(target, "cannot move path into a subdirectory of itself") + try: + self.rename(target) + except error.EXDEV: # invalid cross-device link + self.copy(target) + self.remove() + + def fnmatch(self, pattern): + """Return true if the basename/fullname matches the glob-'pattern'. + + valid pattern characters:: + + * matches everything + ? matches any single character + [seq] matches any character in seq + [!seq] matches any char not in seq + + If the pattern contains a path-separator then the full path + is used for pattern matching and a '*' is prepended to the + pattern. + + if the pattern doesn't contain a path-separator the pattern + is only matched against the basename. + """ + return FNMatcher(pattern)(self) + + def relto(self, relpath): + """Return a string which is the relative part of the path + to the given 'relpath'. + """ + if not isinstance(relpath, (str, LocalPath)): + raise TypeError(f"{relpath!r}: not a string or path object") + strrelpath = str(relpath) + if strrelpath and strrelpath[-1] != self.sep: + strrelpath += self.sep + # assert strrelpath[-1] == self.sep + # assert strrelpath[-2] != self.sep + strself = self.strpath + if sys.platform == "win32" or getattr(os, "_name", None) == "nt": + if os.path.normcase(strself).startswith(os.path.normcase(strrelpath)): + return strself[len(strrelpath) :] + elif strself.startswith(strrelpath): + return strself[len(strrelpath) :] + return "" + + def ensure_dir(self, *args): + """Ensure the path joined with args is a directory.""" + return self.ensure(*args, dir=True) + + def bestrelpath(self, dest): + """Return a string which is a relative path from self + (assumed to be a directory) to dest such that + self.join(bestrelpath) == dest and if not such + path can be determined return dest. + """ + try: + if self == dest: + return os.curdir + base = self.common(dest) + if not base: # can be the case on windows + return str(dest) + self2base = self.relto(base) + reldest = dest.relto(base) + if self2base: + n = self2base.count(self.sep) + 1 + else: + n = 0 + lst = [os.pardir] * n + if reldest: + lst.append(reldest) + target = dest.sep.join(lst) + return target + except AttributeError: + return str(dest) + + def exists(self): + return self.check() + + def isdir(self): + return self.check(dir=1) + + def isfile(self): + return self.check(file=1) + + def parts(self, reverse=False): + """Return a root-first list of all ancestor directories + plus the path itself. + """ + current = self + lst = [self] + while 1: + last = current + current = current.dirpath() + if last == current: + break + lst.append(current) + if not reverse: + lst.reverse() + return lst + + def common(self, other): + """Return the common part shared with the other path + or None if there is no common part. + """ + last = None + for x, y in zip(self.parts(), other.parts()): + if x != y: + return last + last = x + return last + + def __add__(self, other): + """Return new path object with 'other' added to the basename""" + return self.new(basename=self.basename + str(other)) + + def visit(self, fil=None, rec=None, ignore=NeverRaised, bf=False, sort=False): + """Yields all paths below the current one + + fil is a filter (glob pattern or callable), if not matching the + path will not be yielded, defaulting to None (everything is + returned) + + rec is a filter (glob pattern or callable) that controls whether + a node is descended, defaulting to None + + ignore is an Exception class that is ignoredwhen calling dirlist() + on any of the paths (by default, all exceptions are reported) + + bf if True will cause a breadthfirst search instead of the + default depthfirst. Default: False + + sort if True will sort entries within each directory level. + """ + yield from Visitor(fil, rec, ignore, bf, sort).gen(self) + + def _sortlist(self, res, sort): + if sort: + if hasattr(sort, "__call__"): + warnings.warn( + DeprecationWarning( + "listdir(sort=callable) is deprecated and breaks on python3" + ), + stacklevel=3, + ) + res.sort(sort) + else: + res.sort() + + def __fspath__(self): + return self.strpath + + def __hash__(self): + s = self.strpath + if iswin32: + s = s.lower() + return hash(s) + + def __eq__(self, other): + s1 = os.fspath(self) + try: + s2 = os.fspath(other) + except TypeError: + return False + if iswin32: + s1 = s1.lower() + try: + s2 = s2.lower() + except AttributeError: + return False + return s1 == s2 + + def __ne__(self, other): + return not (self == other) + + def __lt__(self, other): + return os.fspath(self) < os.fspath(other) + + def __gt__(self, other): + return os.fspath(self) > os.fspath(other) + + def samefile(self, other): + """Return True if 'other' references the same file as 'self'.""" + other = os.fspath(other) + if not isabs(other): + other = abspath(other) + if self == other: + return True + if not hasattr(os.path, "samefile"): + return False + return error.checked_call(os.path.samefile, self.strpath, other) + + def remove(self, rec=1, ignore_errors=False): + """Remove a file or directory (or a directory tree if rec=1). + if ignore_errors is True, errors while removing directories will + be ignored. + """ + if self.check(dir=1, link=0): + if rec: + # force remove of readonly files on windows + if iswin32: + self.chmod(0o700, rec=1) + import shutil + + error.checked_call( + shutil.rmtree, self.strpath, ignore_errors=ignore_errors + ) + else: + error.checked_call(os.rmdir, self.strpath) + else: + if iswin32: + self.chmod(0o700) + error.checked_call(os.remove, self.strpath) + + def computehash(self, hashtype="md5", chunksize=524288): + """Return hexdigest of hashvalue for this file.""" + try: + try: + import hashlib as mod + except ImportError: + if hashtype == "sha1": + hashtype = "sha" + mod = __import__(hashtype) + hash = getattr(mod, hashtype)() + except (AttributeError, ImportError): + raise ValueError(f"Don't know how to compute {hashtype!r} hash") + f = self.open("rb") + try: + while 1: + buf = f.read(chunksize) + if not buf: + return hash.hexdigest() + hash.update(buf) + finally: + f.close() + + def new(self, **kw): + """Create a modified version of this path. + the following keyword arguments modify various path parts:: + + a:/some/path/to/a/file.ext + xx drive + xxxxxxxxxxxxxxxxx dirname + xxxxxxxx basename + xxxx purebasename + xxx ext + """ + obj = object.__new__(self.__class__) + if not kw: + obj.strpath = self.strpath + return obj + drive, dirname, basename, purebasename, ext = self._getbyspec( + "drive,dirname,basename,purebasename,ext" + ) + if "basename" in kw: + if "purebasename" in kw or "ext" in kw: + raise ValueError("invalid specification %r" % kw) + else: + pb = kw.setdefault("purebasename", purebasename) + try: + ext = kw["ext"] + except KeyError: + pass + else: + if ext and not ext.startswith("."): + ext = "." + ext + kw["basename"] = pb + ext + + if "dirname" in kw and not kw["dirname"]: + kw["dirname"] = drive + else: + kw.setdefault("dirname", dirname) + kw.setdefault("sep", self.sep) + obj.strpath = normpath("{dirname}{sep}{basename}".format(**kw)) + return obj + + def _getbyspec(self, spec: str) -> list[str]: + """See new for what 'spec' can be.""" + res = [] + parts = self.strpath.split(self.sep) + + args = filter(None, spec.split(",")) + for name in args: + if name == "drive": + res.append(parts[0]) + elif name == "dirname": + res.append(self.sep.join(parts[:-1])) + else: + basename = parts[-1] + if name == "basename": + res.append(basename) + else: + i = basename.rfind(".") + if i == -1: + purebasename, ext = basename, "" + else: + purebasename, ext = basename[:i], basename[i:] + if name == "purebasename": + res.append(purebasename) + elif name == "ext": + res.append(ext) + else: + raise ValueError("invalid part specification %r" % name) + return res + + def dirpath(self, *args, **kwargs): + """Return the directory path joined with any given path arguments.""" + if not kwargs: + path = object.__new__(self.__class__) + path.strpath = dirname(self.strpath) + if args: + path = path.join(*args) + return path + return self.new(basename="").join(*args, **kwargs) + + def join(self, *args: os.PathLike[str], abs: bool = False) -> LocalPath: + """Return a new path by appending all 'args' as path + components. if abs=1 is used restart from root if any + of the args is an absolute path. + """ + sep = self.sep + strargs = [os.fspath(arg) for arg in args] + strpath = self.strpath + if abs: + newargs: list[str] = [] + for arg in reversed(strargs): + if isabs(arg): + strpath = arg + strargs = newargs + break + newargs.insert(0, arg) + # special case for when we have e.g. strpath == "/" + actual_sep = "" if strpath.endswith(sep) else sep + for arg in strargs: + arg = arg.strip(sep) + if iswin32: + # allow unix style paths even on windows. + arg = arg.strip("/") + arg = arg.replace("/", sep) + strpath = strpath + actual_sep + arg + actual_sep = sep + obj = object.__new__(self.__class__) + obj.strpath = normpath(strpath) + return obj + + def open(self, mode="r", ensure=False, encoding=None): + """Return an opened file with the given mode. + + If ensure is True, create parent directories if needed. + """ + if ensure: + self.dirpath().ensure(dir=1) + if encoding: + return error.checked_call( + io.open, + self.strpath, + mode, + encoding=encoding, + ) + return error.checked_call(open, self.strpath, mode) + + def _fastjoin(self, name): + child = object.__new__(self.__class__) + child.strpath = self.strpath + self.sep + name + return child + + def islink(self): + return islink(self.strpath) + + def check(self, **kw): + """Check a path for existence and properties. + + Without arguments, return True if the path exists, otherwise False. + + valid checkers:: + + file = 1 # is a file + file = 0 # is not a file (may not even exist) + dir = 1 # is a dir + link = 1 # is a link + exists = 1 # exists + + You can specify multiple checker definitions, for example:: + + path.check(file=1, link=1) # a link pointing to a file + """ + if not kw: + return exists(self.strpath) + if len(kw) == 1: + if "dir" in kw: + return not kw["dir"] ^ isdir(self.strpath) + if "file" in kw: + return not kw["file"] ^ isfile(self.strpath) + if not kw: + kw = {"exists": 1} + return Checkers(self)._evaluate(kw) + + _patternchars = set("*?[" + os.sep) + + def listdir(self, fil=None, sort=None): + """List directory contents, possibly filter by the given fil func + and possibly sorted. + """ + if fil is None and sort is None: + names = error.checked_call(os.listdir, self.strpath) + return map_as_list(self._fastjoin, names) + if isinstance(fil, str): + if not self._patternchars.intersection(fil): + child = self._fastjoin(fil) + if exists(child.strpath): + return [child] + return [] + fil = FNMatcher(fil) + names = error.checked_call(os.listdir, self.strpath) + res = [] + for name in names: + child = self._fastjoin(name) + if fil is None or fil(child): + res.append(child) + self._sortlist(res, sort) + return res + + def size(self) -> int: + """Return size of the underlying file object""" + return self.stat().size + + def mtime(self) -> float: + """Return last modification time of the path.""" + return self.stat().mtime + + def copy(self, target, mode=False, stat=False): + """Copy path to target. + + If mode is True, will copy copy permission from path to target. + If stat is True, copy permission, last modification + time, last access time, and flags from path to target. + """ + if self.check(file=1): + if target.check(dir=1): + target = target.join(self.basename) + assert self != target + copychunked(self, target) + if mode: + copymode(self.strpath, target.strpath) + if stat: + copystat(self, target) + else: + + def rec(p): + return p.check(link=0) + + for x in self.visit(rec=rec): + relpath = x.relto(self) + newx = target.join(relpath) + newx.dirpath().ensure(dir=1) + if x.check(link=1): + newx.mksymlinkto(x.readlink()) + continue + elif x.check(file=1): + copychunked(x, newx) + elif x.check(dir=1): + newx.ensure(dir=1) + if mode: + copymode(x.strpath, newx.strpath) + if stat: + copystat(x, newx) + + def rename(self, target): + """Rename this path to target.""" + target = os.fspath(target) + return error.checked_call(os.rename, self.strpath, target) + + def dump(self, obj, bin=1): + """Pickle object into path location""" + f = self.open("wb") + import pickle + + try: + error.checked_call(pickle.dump, obj, f, bin) + finally: + f.close() + + def mkdir(self, *args): + """Create & return the directory joined with args.""" + p = self.join(*args) + error.checked_call(os.mkdir, os.fspath(p)) + return p + + def write_binary(self, data, ensure=False): + """Write binary data into path. If ensure is True create + missing parent directories. + """ + if ensure: + self.dirpath().ensure(dir=1) + with self.open("wb") as f: + f.write(data) + + def write_text(self, data, encoding, ensure=False): + """Write text data into path using the specified encoding. + If ensure is True create missing parent directories. + """ + if ensure: + self.dirpath().ensure(dir=1) + with self.open("w", encoding=encoding) as f: + f.write(data) + + def write(self, data, mode="w", ensure=False): + """Write data into path. If ensure is True create + missing parent directories. + """ + if ensure: + self.dirpath().ensure(dir=1) + if "b" in mode: + if not isinstance(data, bytes): + raise ValueError("can only process bytes") + else: + if not isinstance(data, str): + if not isinstance(data, bytes): + data = str(data) + else: + data = data.decode(sys.getdefaultencoding()) + f = self.open(mode) + try: + f.write(data) + finally: + f.close() + + def _ensuredirs(self): + parent = self.dirpath() + if parent == self: + return self + if parent.check(dir=0): + parent._ensuredirs() + if self.check(dir=0): + try: + self.mkdir() + except error.EEXIST: + # race condition: file/dir created by another thread/process. + # complain if it is not a dir + if self.check(dir=0): + raise + return self + + def ensure(self, *args, **kwargs): + """Ensure that an args-joined path exists (by default as + a file). if you specify a keyword argument 'dir=True' + then the path is forced to be a directory path. + """ + p = self.join(*args) + if kwargs.get("dir", 0): + return p._ensuredirs() + else: + p.dirpath()._ensuredirs() + if not p.check(file=1): + p.open("wb").close() + return p + + @overload + def stat(self, raising: Literal[True] = ...) -> Stat: ... + + @overload + def stat(self, raising: Literal[False]) -> Stat | None: ... + + def stat(self, raising: bool = True) -> Stat | None: + """Return an os.stat() tuple.""" + if raising: + return Stat(self, error.checked_call(os.stat, self.strpath)) + try: + return Stat(self, os.stat(self.strpath)) + except KeyboardInterrupt: + raise + except Exception: + return None + + def lstat(self) -> Stat: + """Return an os.lstat() tuple.""" + return Stat(self, error.checked_call(os.lstat, self.strpath)) + + def setmtime(self, mtime=None): + """Set modification time for the given path. if 'mtime' is None + (the default) then the file's mtime is set to current time. + + Note that the resolution for 'mtime' is platform dependent. + """ + if mtime is None: + return error.checked_call(os.utime, self.strpath, mtime) + try: + return error.checked_call(os.utime, self.strpath, (-1, mtime)) + except error.EINVAL: + return error.checked_call(os.utime, self.strpath, (self.atime(), mtime)) + + def chdir(self): + """Change directory to self and return old current directory""" + try: + old = self.__class__() + except error.ENOENT: + old = None + error.checked_call(os.chdir, self.strpath) + return old + + @contextmanager + def as_cwd(self): + """ + Return a context manager, which changes to the path's dir during the + managed "with" context. + On __enter__ it returns the old dir, which might be ``None``. + """ + old = self.chdir() + try: + yield old + finally: + if old is not None: + old.chdir() + + def realpath(self): + """Return a new path which contains no symbolic links.""" + return self.__class__(os.path.realpath(self.strpath)) + + def atime(self): + """Return last access time of the path.""" + return self.stat().atime + + def __repr__(self): + return "local(%r)" % self.strpath + + def __str__(self): + """Return string representation of the Path.""" + return self.strpath + + def chmod(self, mode, rec=0): + """Change permissions to the given mode. If mode is an + integer it directly encodes the os-specific modes. + if rec is True perform recursively. + """ + if not isinstance(mode, int): + raise TypeError(f"mode {mode!r} must be an integer") + if rec: + for x in self.visit(rec=rec): + error.checked_call(os.chmod, str(x), mode) + error.checked_call(os.chmod, self.strpath, mode) + + def pypkgpath(self): + """Return the Python package path by looking for the last + directory upwards which still contains an __init__.py. + Return None if a pkgpath can not be determined. + """ + pkgpath = None + for parent in self.parts(reverse=True): + if parent.isdir(): + if not parent.join("__init__.py").exists(): + break + if not isimportable(parent.basename): + break + pkgpath = parent + return pkgpath + + def _ensuresyspath(self, ensuremode, path): + if ensuremode: + s = str(path) + if ensuremode == "append": + if s not in sys.path: + sys.path.append(s) + else: + if s != sys.path[0]: + sys.path.insert(0, s) + + def pyimport(self, modname=None, ensuresyspath=True): + """Return path as an imported python module. + + If modname is None, look for the containing package + and construct an according module name. + The module will be put/looked up in sys.modules. + if ensuresyspath is True then the root dir for importing + the file (taking __init__.py files into account) will + be prepended to sys.path if it isn't there already. + If ensuresyspath=="append" the root dir will be appended + if it isn't already contained in sys.path. + if ensuresyspath is False no modification of syspath happens. + + Special value of ensuresyspath=="importlib" is intended + purely for using in pytest, it is capable only of importing + separate .py files outside packages, e.g. for test suite + without any __init__.py file. It effectively allows having + same-named test modules in different places and offers + mild opt-in via this option. Note that it works only in + recent versions of python. + """ + if not self.check(): + raise error.ENOENT(self) + + if ensuresyspath == "importlib": + if modname is None: + modname = self.purebasename + spec = importlib.util.spec_from_file_location(modname, str(self)) + if spec is None or spec.loader is None: + raise ImportError(f"Can't find module {modname} at location {self!s}") + mod = importlib.util.module_from_spec(spec) + spec.loader.exec_module(mod) + return mod + + pkgpath = None + if modname is None: + pkgpath = self.pypkgpath() + if pkgpath is not None: + pkgroot = pkgpath.dirpath() + names = self.new(ext="").relto(pkgroot).split(self.sep) + if names[-1] == "__init__": + names.pop() + modname = ".".join(names) + else: + pkgroot = self.dirpath() + modname = self.purebasename + + self._ensuresyspath(ensuresyspath, pkgroot) + __import__(modname) + mod = sys.modules[modname] + if self.basename == "__init__.py": + return mod # we don't check anything as we might + # be in a namespace package ... too icky to check + modfile = mod.__file__ + assert modfile is not None + if modfile[-4:] in (".pyc", ".pyo"): + modfile = modfile[:-1] + elif modfile.endswith("$py.class"): + modfile = modfile[:-9] + ".py" + if modfile.endswith(os.sep + "__init__.py"): + if self.basename != "__init__.py": + modfile = modfile[:-12] + try: + issame = self.samefile(modfile) + except error.ENOENT: + issame = False + if not issame: + ignore = os.getenv("PY_IGNORE_IMPORTMISMATCH") + if ignore != "1": + raise self.ImportMismatchError(modname, modfile, self) + return mod + else: + try: + return sys.modules[modname] + except KeyError: + # we have a custom modname, do a pseudo-import + import types + + mod = types.ModuleType(modname) + mod.__file__ = str(self) + sys.modules[modname] = mod + try: + with open(str(self), "rb") as f: + exec(f.read(), mod.__dict__) + except BaseException: + del sys.modules[modname] + raise + return mod + + def sysexec(self, *argv: os.PathLike[str], **popen_opts: Any) -> str: + """Return stdout text from executing a system child process, + where the 'self' path points to executable. + The process is directly invoked and not through a system shell. + """ + from subprocess import PIPE + from subprocess import Popen + + popen_opts.pop("stdout", None) + popen_opts.pop("stderr", None) + proc = Popen( + [str(self)] + [str(arg) for arg in argv], + **popen_opts, + stdout=PIPE, + stderr=PIPE, + ) + stdout: str | bytes + stdout, stderr = proc.communicate() + ret = proc.wait() + if isinstance(stdout, bytes): + stdout = stdout.decode(sys.getdefaultencoding()) + if ret != 0: + if isinstance(stderr, bytes): + stderr = stderr.decode(sys.getdefaultencoding()) + raise RuntimeError( + ret, + ret, + str(self), + stdout, + stderr, + ) + return stdout + + @classmethod + def sysfind(cls, name, checker=None, paths=None): + """Return a path object found by looking at the systems + underlying PATH specification. If the checker is not None + it will be invoked to filter matching paths. If a binary + cannot be found, None is returned + Note: This is probably not working on plain win32 systems + but may work on cygwin. + """ + if isabs(name): + p = local(name) + if p.check(file=1): + return p + else: + if paths is None: + if iswin32: + paths = os.environ["Path"].split(";") + if "" not in paths and "." not in paths: + paths.append(".") + try: + systemroot = os.environ["SYSTEMROOT"] + except KeyError: + pass + else: + paths = [ + path.replace("%SystemRoot%", systemroot) for path in paths + ] + else: + paths = os.environ["PATH"].split(":") + tryadd = [] + if iswin32: + tryadd += os.environ["PATHEXT"].split(os.pathsep) + tryadd.append("") + + for x in paths: + for addext in tryadd: + p = local(x).join(name, abs=True) + addext + try: + if p.check(file=1): + if checker: + if not checker(p): + continue + return p + except error.EACCES: + pass + return None + + @classmethod + def _gethomedir(cls): + try: + x = os.environ["HOME"] + except KeyError: + try: + x = os.environ["HOMEDRIVE"] + os.environ["HOMEPATH"] + except KeyError: + return None + return cls(x) + + # """ + # special class constructors for local filesystem paths + # """ + @classmethod + def get_temproot(cls): + """Return the system's temporary directory + (where tempfiles are usually created in) + """ + import tempfile + + return local(tempfile.gettempdir()) + + @classmethod + def mkdtemp(cls, rootdir=None): + """Return a Path object pointing to a fresh new temporary directory + (which we created ourselves). + """ + import tempfile + + if rootdir is None: + rootdir = cls.get_temproot() + path = error.checked_call(tempfile.mkdtemp, dir=str(rootdir)) + return cls(path) + + @classmethod + def make_numbered_dir( + cls, prefix="session-", rootdir=None, keep=3, lock_timeout=172800 + ): # two days + """Return unique directory with a number greater than the current + maximum one. The number is assumed to start directly after prefix. + if keep is true directories with a number less than (maxnum-keep) + will be removed. If .lock files are used (lock_timeout non-zero), + algorithm is multi-process safe. + """ + if rootdir is None: + rootdir = cls.get_temproot() + + nprefix = prefix.lower() + + def parse_num(path): + """Parse the number out of a path (if it matches the prefix)""" + nbasename = path.basename.lower() + if nbasename.startswith(nprefix): + try: + return int(nbasename[len(nprefix) :]) + except ValueError: + pass + + def create_lockfile(path): + """Exclusively create lockfile. Throws when failed""" + mypid = os.getpid() + lockfile = path.join(".lock") + if hasattr(lockfile, "mksymlinkto"): + lockfile.mksymlinkto(str(mypid)) + else: + fd = error.checked_call( + os.open, str(lockfile), os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0o644 + ) + with os.fdopen(fd, "w") as f: + f.write(str(mypid)) + return lockfile + + def atexit_remove_lockfile(lockfile): + """Ensure lockfile is removed at process exit""" + mypid = os.getpid() + + def try_remove_lockfile(): + # in a fork() situation, only the last process should + # remove the .lock, otherwise the other processes run the + # risk of seeing their temporary dir disappear. For now + # we remove the .lock in the parent only (i.e. we assume + # that the children finish before the parent). + if os.getpid() != mypid: + return + try: + lockfile.remove() + except error.Error: + pass + + atexit.register(try_remove_lockfile) + + # compute the maximum number currently in use with the prefix + lastmax = None + while True: + maxnum = -1 + for path in rootdir.listdir(): + num = parse_num(path) + if num is not None: + maxnum = max(maxnum, num) + + # make the new directory + try: + udir = rootdir.mkdir(prefix + str(maxnum + 1)) + if lock_timeout: + lockfile = create_lockfile(udir) + atexit_remove_lockfile(lockfile) + except (error.EEXIST, error.ENOENT, error.EBUSY): + # race condition (1): another thread/process created the dir + # in the meantime - try again + # race condition (2): another thread/process spuriously acquired + # lock treating empty directory as candidate + # for removal - try again + # race condition (3): another thread/process tried to create the lock at + # the same time (happened in Python 3.3 on Windows) + # https://ci.appveyor.com/project/pytestbot/py/build/1.0.21/job/ffi85j4c0lqwsfwa + if lastmax == maxnum: + raise + lastmax = maxnum + continue + break + + def get_mtime(path): + """Read file modification time""" + try: + return path.lstat().mtime + except error.Error: + pass + + garbage_prefix = prefix + "garbage-" + + def is_garbage(path): + """Check if path denotes directory scheduled for removal""" + bn = path.basename + return bn.startswith(garbage_prefix) + + # prune old directories + udir_time = get_mtime(udir) + if keep and udir_time: + for path in rootdir.listdir(): + num = parse_num(path) + if num is not None and num <= (maxnum - keep): + try: + # try acquiring lock to remove directory as exclusive user + if lock_timeout: + create_lockfile(path) + except (error.EEXIST, error.ENOENT, error.EBUSY): + path_time = get_mtime(path) + if not path_time: + # assume directory doesn't exist now + continue + if abs(udir_time - path_time) < lock_timeout: + # assume directory with lockfile exists + # and lock timeout hasn't expired yet + continue + + # path dir locked for exclusive use + # and scheduled for removal to avoid another thread/process + # treating it as a new directory or removal candidate + garbage_path = rootdir.join(garbage_prefix + str(uuid.uuid4())) + try: + path.rename(garbage_path) + garbage_path.remove(rec=1) + except KeyboardInterrupt: + raise + except Exception: # this might be error.Error, WindowsError ... + pass + if is_garbage(path): + try: + path.remove(rec=1) + except KeyboardInterrupt: + raise + except Exception: # this might be error.Error, WindowsError ... + pass + + # make link... + try: + username = os.environ["USER"] # linux, et al + except KeyError: + try: + username = os.environ["USERNAME"] # windows + except KeyError: + username = "current" + + src = str(udir) + dest = src[: src.rfind("-")] + "-" + username + try: + os.unlink(dest) + except OSError: + pass + try: + os.symlink(src, dest) + except (OSError, AttributeError, NotImplementedError): + pass + + return udir + + +def copymode(src, dest): + """Copy permission from src to dst.""" + import shutil + + shutil.copymode(src, dest) + + +def copystat(src, dest): + """Copy permission, last modification time, + last access time, and flags from src to dst.""" + import shutil + + shutil.copystat(str(src), str(dest)) + + +def copychunked(src, dest): + chunksize = 524288 # half a meg of bytes + fsrc = src.open("rb") + try: + fdest = dest.open("wb") + try: + while 1: + buf = fsrc.read(chunksize) + if not buf: + break + fdest.write(buf) + finally: + fdest.close() + finally: + fsrc.close() + + +def isimportable(name): + if name and (name[0].isalpha() or name[0] == "_"): + name = name.replace("_", "") + return not name or name.isalnum() + + +local = LocalPath diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_version.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_version.py new file mode 100644 index 000000000..ac23f69e9 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/_version.py @@ -0,0 +1,16 @@ +# file generated by setuptools_scm +# don't change, don't track in version control +TYPE_CHECKING = False +if TYPE_CHECKING: + from typing import Tuple, Union + VERSION_TUPLE = Tuple[Union[int, str], ...] +else: + VERSION_TUPLE = object + +version: str +__version__: str +__version_tuple__: VERSION_TUPLE +version_tuple: VERSION_TUPLE + +__version__ = version = '8.2.0' +__version_tuple__ = version_tuple = (8, 2, 0) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/assertion/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/assertion/__init__.py new file mode 100644 index 000000000..21dd4a4a4 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/assertion/__init__.py @@ -0,0 +1,192 @@ +# mypy: allow-untyped-defs +"""Support for presenting detailed information in failing assertions.""" + +import sys +from typing import Any +from typing import Generator +from typing import List +from typing import Optional +from typing import TYPE_CHECKING + +from _pytest.assertion import rewrite +from _pytest.assertion import truncate +from _pytest.assertion import util +from _pytest.assertion.rewrite import assertstate_key +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.nodes import Item + + +if TYPE_CHECKING: + from _pytest.main import Session + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("debugconfig") + group.addoption( + "--assert", + action="store", + dest="assertmode", + choices=("rewrite", "plain"), + default="rewrite", + metavar="MODE", + help=( + "Control assertion debugging tools.\n" + "'plain' performs no assertion debugging.\n" + "'rewrite' (the default) rewrites assert statements in test modules" + " on import to provide assert expression information." + ), + ) + parser.addini( + "enable_assertion_pass_hook", + type="bool", + default=False, + help="Enables the pytest_assertion_pass hook. " + "Make sure to delete any previously generated pyc cache files.", + ) + Config._add_verbosity_ini( + parser, + Config.VERBOSITY_ASSERTIONS, + help=( + "Specify a verbosity level for assertions, overriding the main level. " + "Higher levels will provide more detailed explanation when an assertion fails." + ), + ) + + +def register_assert_rewrite(*names: str) -> None: + """Register one or more module names to be rewritten on import. + + This function will make sure that this module or all modules inside + the package will get their assert statements rewritten. + Thus you should make sure to call this before the module is + actually imported, usually in your __init__.py if you are a plugin + using a package. + + :param names: The module names to register. + """ + for name in names: + if not isinstance(name, str): + msg = "expected module names as *args, got {0} instead" # type: ignore[unreachable] + raise TypeError(msg.format(repr(names))) + for hook in sys.meta_path: + if isinstance(hook, rewrite.AssertionRewritingHook): + importhook = hook + break + else: + # TODO(typing): Add a protocol for mark_rewrite() and use it + # for importhook and for PytestPluginManager.rewrite_hook. + importhook = DummyRewriteHook() # type: ignore + importhook.mark_rewrite(*names) + + +class DummyRewriteHook: + """A no-op import hook for when rewriting is disabled.""" + + def mark_rewrite(self, *names: str) -> None: + pass + + +class AssertionState: + """State for the assertion plugin.""" + + def __init__(self, config: Config, mode) -> None: + self.mode = mode + self.trace = config.trace.root.get("assertion") + self.hook: Optional[rewrite.AssertionRewritingHook] = None + + +def install_importhook(config: Config) -> rewrite.AssertionRewritingHook: + """Try to install the rewrite hook, raise SystemError if it fails.""" + config.stash[assertstate_key] = AssertionState(config, "rewrite") + config.stash[assertstate_key].hook = hook = rewrite.AssertionRewritingHook(config) + sys.meta_path.insert(0, hook) + config.stash[assertstate_key].trace("installed rewrite import hook") + + def undo() -> None: + hook = config.stash[assertstate_key].hook + if hook is not None and hook in sys.meta_path: + sys.meta_path.remove(hook) + + config.add_cleanup(undo) + return hook + + +def pytest_collection(session: "Session") -> None: + # This hook is only called when test modules are collected + # so for example not in the managing process of pytest-xdist + # (which does not collect test modules). + assertstate = session.config.stash.get(assertstate_key, None) + if assertstate: + if assertstate.hook is not None: + assertstate.hook.set_session(session) + + +@hookimpl(wrapper=True, tryfirst=True) +def pytest_runtest_protocol(item: Item) -> Generator[None, object, object]: + """Setup the pytest_assertrepr_compare and pytest_assertion_pass hooks. + + The rewrite module will use util._reprcompare if it exists to use custom + reporting via the pytest_assertrepr_compare hook. This sets up this custom + comparison for the test. + """ + ihook = item.ihook + + def callbinrepr(op, left: object, right: object) -> Optional[str]: + """Call the pytest_assertrepr_compare hook and prepare the result. + + This uses the first result from the hook and then ensures the + following: + * Overly verbose explanations are truncated unless configured otherwise + (eg. if running in verbose mode). + * Embedded newlines are escaped to help util.format_explanation() + later. + * If the rewrite mode is used embedded %-characters are replaced + to protect later % formatting. + + The result can be formatted by util.format_explanation() for + pretty printing. + """ + hook_result = ihook.pytest_assertrepr_compare( + config=item.config, op=op, left=left, right=right + ) + for new_expl in hook_result: + if new_expl: + new_expl = truncate.truncate_if_required(new_expl, item) + new_expl = [line.replace("\n", "\\n") for line in new_expl] + res = "\n~".join(new_expl) + if item.config.getvalue("assertmode") == "rewrite": + res = res.replace("%", "%%") + return res + return None + + saved_assert_hooks = util._reprcompare, util._assertion_pass + util._reprcompare = callbinrepr + util._config = item.config + + if ihook.pytest_assertion_pass.get_hookimpls(): + + def call_assertion_pass_hook(lineno: int, orig: str, expl: str) -> None: + ihook.pytest_assertion_pass(item=item, lineno=lineno, orig=orig, expl=expl) + + util._assertion_pass = call_assertion_pass_hook + + try: + return (yield) + finally: + util._reprcompare, util._assertion_pass = saved_assert_hooks + util._config = None + + +def pytest_sessionfinish(session: "Session") -> None: + assertstate = session.config.stash.get(assertstate_key, None) + if assertstate: + if assertstate.hook is not None: + assertstate.hook.set_session(None) + + +def pytest_assertrepr_compare( + config: Config, op: str, left: Any, right: Any +) -> Optional[List[str]]: + return util.assertrepr_compare(config=config, op=op, left=left, right=right) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/assertion/rewrite.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/assertion/rewrite.py new file mode 100644 index 000000000..678471ee9 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/assertion/rewrite.py @@ -0,0 +1,1191 @@ +"""Rewrite assertion AST to produce nice error messages.""" + +import ast +from collections import defaultdict +import errno +import functools +import importlib.abc +import importlib.machinery +import importlib.util +import io +import itertools +import marshal +import os +from pathlib import Path +from pathlib import PurePath +import struct +import sys +import tokenize +import types +from typing import Callable +from typing import Dict +from typing import IO +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Optional +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union + +from _pytest._io.saferepr import DEFAULT_REPR_MAX_SIZE +from _pytest._io.saferepr import saferepr +from _pytest._version import version +from _pytest.assertion import util +from _pytest.config import Config +from _pytest.main import Session +from _pytest.pathlib import absolutepath +from _pytest.pathlib import fnmatch_ex +from _pytest.stash import StashKey + + +# fmt: off +from _pytest.assertion.util import format_explanation as _format_explanation # noqa:F401, isort:skip +# fmt:on + +if TYPE_CHECKING: + from _pytest.assertion import AssertionState + + +class Sentinel: + pass + + +assertstate_key = StashKey["AssertionState"]() + +# pytest caches rewritten pycs in pycache dirs +PYTEST_TAG = f"{sys.implementation.cache_tag}-pytest-{version}" +PYC_EXT = ".py" + (__debug__ and "c" or "o") +PYC_TAIL = "." + PYTEST_TAG + PYC_EXT + +# Special marker that denotes we have just left a scope definition +_SCOPE_END_MARKER = Sentinel() + + +class AssertionRewritingHook(importlib.abc.MetaPathFinder, importlib.abc.Loader): + """PEP302/PEP451 import hook which rewrites asserts.""" + + def __init__(self, config: Config) -> None: + self.config = config + try: + self.fnpats = config.getini("python_files") + except ValueError: + self.fnpats = ["test_*.py", "*_test.py"] + self.session: Optional[Session] = None + self._rewritten_names: Dict[str, Path] = {} + self._must_rewrite: Set[str] = set() + # flag to guard against trying to rewrite a pyc file while we are already writing another pyc file, + # which might result in infinite recursion (#3506) + self._writing_pyc = False + self._basenames_to_check_rewrite = {"conftest"} + self._marked_for_rewrite_cache: Dict[str, bool] = {} + self._session_paths_checked = False + + def set_session(self, session: Optional[Session]) -> None: + self.session = session + self._session_paths_checked = False + + # Indirection so we can mock calls to find_spec originated from the hook during testing + _find_spec = importlib.machinery.PathFinder.find_spec + + def find_spec( + self, + name: str, + path: Optional[Sequence[Union[str, bytes]]] = None, + target: Optional[types.ModuleType] = None, + ) -> Optional[importlib.machinery.ModuleSpec]: + if self._writing_pyc: + return None + state = self.config.stash[assertstate_key] + if self._early_rewrite_bailout(name, state): + return None + state.trace("find_module called for: %s" % name) + + # Type ignored because mypy is confused about the `self` binding here. + spec = self._find_spec(name, path) # type: ignore + if ( + # the import machinery could not find a file to import + spec is None + # this is a namespace package (without `__init__.py`) + # there's nothing to rewrite there + or spec.origin is None + # we can only rewrite source files + or not isinstance(spec.loader, importlib.machinery.SourceFileLoader) + # if the file doesn't exist, we can't rewrite it + or not os.path.exists(spec.origin) + ): + return None + else: + fn = spec.origin + + if not self._should_rewrite(name, fn, state): + return None + + return importlib.util.spec_from_file_location( + name, + fn, + loader=self, + submodule_search_locations=spec.submodule_search_locations, + ) + + def create_module( + self, spec: importlib.machinery.ModuleSpec + ) -> Optional[types.ModuleType]: + return None # default behaviour is fine + + def exec_module(self, module: types.ModuleType) -> None: + assert module.__spec__ is not None + assert module.__spec__.origin is not None + fn = Path(module.__spec__.origin) + state = self.config.stash[assertstate_key] + + self._rewritten_names[module.__name__] = fn + + # The requested module looks like a test file, so rewrite it. This is + # the most magical part of the process: load the source, rewrite the + # asserts, and load the rewritten source. We also cache the rewritten + # module code in a special pyc. We must be aware of the possibility of + # concurrent pytest processes rewriting and loading pycs. To avoid + # tricky race conditions, we maintain the following invariant: The + # cached pyc is always a complete, valid pyc. Operations on it must be + # atomic. POSIX's atomic rename comes in handy. + write = not sys.dont_write_bytecode + cache_dir = get_cache_dir(fn) + if write: + ok = try_makedirs(cache_dir) + if not ok: + write = False + state.trace(f"read only directory: {cache_dir}") + + cache_name = fn.name[:-3] + PYC_TAIL + pyc = cache_dir / cache_name + # Notice that even if we're in a read-only directory, I'm going + # to check for a cached pyc. This may not be optimal... + co = _read_pyc(fn, pyc, state.trace) + if co is None: + state.trace(f"rewriting {fn!r}") + source_stat, co = _rewrite_test(fn, self.config) + if write: + self._writing_pyc = True + try: + _write_pyc(state, co, source_stat, pyc) + finally: + self._writing_pyc = False + else: + state.trace(f"found cached rewritten pyc for {fn}") + exec(co, module.__dict__) + + def _early_rewrite_bailout(self, name: str, state: "AssertionState") -> bool: + """A fast way to get out of rewriting modules. + + Profiling has shown that the call to PathFinder.find_spec (inside of + the find_spec from this class) is a major slowdown, so, this method + tries to filter what we're sure won't be rewritten before getting to + it. + """ + if self.session is not None and not self._session_paths_checked: + self._session_paths_checked = True + for initial_path in self.session._initialpaths: + # Make something as c:/projects/my_project/path.py -> + # ['c:', 'projects', 'my_project', 'path.py'] + parts = str(initial_path).split(os.sep) + # add 'path' to basenames to be checked. + self._basenames_to_check_rewrite.add(os.path.splitext(parts[-1])[0]) + + # Note: conftest already by default in _basenames_to_check_rewrite. + parts = name.split(".") + if parts[-1] in self._basenames_to_check_rewrite: + return False + + # For matching the name it must be as if it was a filename. + path = PurePath(*parts).with_suffix(".py") + + for pat in self.fnpats: + # if the pattern contains subdirectories ("tests/**.py" for example) we can't bail out based + # on the name alone because we need to match against the full path + if os.path.dirname(pat): + return False + if fnmatch_ex(pat, path): + return False + + if self._is_marked_for_rewrite(name, state): + return False + + state.trace(f"early skip of rewriting module: {name}") + return True + + def _should_rewrite(self, name: str, fn: str, state: "AssertionState") -> bool: + # always rewrite conftest files + if os.path.basename(fn) == "conftest.py": + state.trace(f"rewriting conftest file: {fn!r}") + return True + + if self.session is not None: + if self.session.isinitpath(absolutepath(fn)): + state.trace(f"matched test file (was specified on cmdline): {fn!r}") + return True + + # modules not passed explicitly on the command line are only + # rewritten if they match the naming convention for test files + fn_path = PurePath(fn) + for pat in self.fnpats: + if fnmatch_ex(pat, fn_path): + state.trace(f"matched test file {fn!r}") + return True + + return self._is_marked_for_rewrite(name, state) + + def _is_marked_for_rewrite(self, name: str, state: "AssertionState") -> bool: + try: + return self._marked_for_rewrite_cache[name] + except KeyError: + for marked in self._must_rewrite: + if name == marked or name.startswith(marked + "."): + state.trace(f"matched marked file {name!r} (from {marked!r})") + self._marked_for_rewrite_cache[name] = True + return True + + self._marked_for_rewrite_cache[name] = False + return False + + def mark_rewrite(self, *names: str) -> None: + """Mark import names as needing to be rewritten. + + The named module or package as well as any nested modules will + be rewritten on import. + """ + already_imported = ( + set(names).intersection(sys.modules).difference(self._rewritten_names) + ) + for name in already_imported: + mod = sys.modules[name] + if not AssertionRewriter.is_rewrite_disabled( + mod.__doc__ or "" + ) and not isinstance(mod.__loader__, type(self)): + self._warn_already_imported(name) + self._must_rewrite.update(names) + self._marked_for_rewrite_cache.clear() + + def _warn_already_imported(self, name: str) -> None: + from _pytest.warning_types import PytestAssertRewriteWarning + + self.config.issue_config_time_warning( + PytestAssertRewriteWarning( + "Module already imported so cannot be rewritten: %s" % name + ), + stacklevel=5, + ) + + def get_data(self, pathname: Union[str, bytes]) -> bytes: + """Optional PEP302 get_data API.""" + with open(pathname, "rb") as f: + return f.read() + + if sys.version_info >= (3, 10): + if sys.version_info >= (3, 12): + from importlib.resources.abc import TraversableResources + else: + from importlib.abc import TraversableResources + + def get_resource_reader(self, name: str) -> TraversableResources: + if sys.version_info < (3, 11): + from importlib.readers import FileReader + else: + from importlib.resources.readers import FileReader + + return FileReader(types.SimpleNamespace(path=self._rewritten_names[name])) + + +def _write_pyc_fp( + fp: IO[bytes], source_stat: os.stat_result, co: types.CodeType +) -> None: + # Technically, we don't have to have the same pyc format as + # (C)Python, since these "pycs" should never be seen by builtin + # import. However, there's little reason to deviate. + fp.write(importlib.util.MAGIC_NUMBER) + # https://www.python.org/dev/peps/pep-0552/ + flags = b"\x00\x00\x00\x00" + fp.write(flags) + # as of now, bytecode header expects 32-bit numbers for size and mtime (#4903) + mtime = int(source_stat.st_mtime) & 0xFFFFFFFF + size = source_stat.st_size & 0xFFFFFFFF + # " bool: + proc_pyc = f"{pyc}.{os.getpid()}" + try: + with open(proc_pyc, "wb") as fp: + _write_pyc_fp(fp, source_stat, co) + except OSError as e: + state.trace(f"error writing pyc file at {proc_pyc}: errno={e.errno}") + return False + + try: + os.replace(proc_pyc, pyc) + except OSError as e: + state.trace(f"error writing pyc file at {pyc}: {e}") + # we ignore any failure to write the cache file + # there are many reasons, permission-denied, pycache dir being a + # file etc. + return False + return True + + +def _rewrite_test(fn: Path, config: Config) -> Tuple[os.stat_result, types.CodeType]: + """Read and rewrite *fn* and return the code object.""" + stat = os.stat(fn) + source = fn.read_bytes() + strfn = str(fn) + tree = ast.parse(source, filename=strfn) + rewrite_asserts(tree, source, strfn, config) + co = compile(tree, strfn, "exec", dont_inherit=True) + return stat, co + + +def _read_pyc( + source: Path, pyc: Path, trace: Callable[[str], None] = lambda x: None +) -> Optional[types.CodeType]: + """Possibly read a pytest pyc containing rewritten code. + + Return rewritten code if successful or None if not. + """ + try: + fp = open(pyc, "rb") + except OSError: + return None + with fp: + try: + stat_result = os.stat(source) + mtime = int(stat_result.st_mtime) + size = stat_result.st_size + data = fp.read(16) + except OSError as e: + trace(f"_read_pyc({source}): OSError {e}") + return None + # Check for invalid or out of date pyc file. + if len(data) != (16): + trace("_read_pyc(%s): invalid pyc (too short)" % source) + return None + if data[:4] != importlib.util.MAGIC_NUMBER: + trace("_read_pyc(%s): invalid pyc (bad magic number)" % source) + return None + if data[4:8] != b"\x00\x00\x00\x00": + trace("_read_pyc(%s): invalid pyc (unsupported flags)" % source) + return None + mtime_data = data[8:12] + if int.from_bytes(mtime_data, "little") != mtime & 0xFFFFFFFF: + trace("_read_pyc(%s): out of date" % source) + return None + size_data = data[12:16] + if int.from_bytes(size_data, "little") != size & 0xFFFFFFFF: + trace("_read_pyc(%s): invalid pyc (incorrect size)" % source) + return None + try: + co = marshal.load(fp) + except Exception as e: + trace(f"_read_pyc({source}): marshal.load error {e}") + return None + if not isinstance(co, types.CodeType): + trace("_read_pyc(%s): not a code object" % source) + return None + return co + + +def rewrite_asserts( + mod: ast.Module, + source: bytes, + module_path: Optional[str] = None, + config: Optional[Config] = None, +) -> None: + """Rewrite the assert statements in mod.""" + AssertionRewriter(module_path, config, source).run(mod) + + +def _saferepr(obj: object) -> str: + r"""Get a safe repr of an object for assertion error messages. + + The assertion formatting (util.format_explanation()) requires + newlines to be escaped since they are a special character for it. + Normally assertion.util.format_explanation() does this but for a + custom repr it is possible to contain one of the special escape + sequences, especially '\n{' and '\n}' are likely to be present in + JSON reprs. + """ + maxsize = _get_maxsize_for_saferepr(util._config) + return saferepr(obj, maxsize=maxsize).replace("\n", "\\n") + + +def _get_maxsize_for_saferepr(config: Optional[Config]) -> Optional[int]: + """Get `maxsize` configuration for saferepr based on the given config object.""" + if config is None: + verbosity = 0 + else: + verbosity = config.get_verbosity(Config.VERBOSITY_ASSERTIONS) + if verbosity >= 2: + return None + if verbosity >= 1: + return DEFAULT_REPR_MAX_SIZE * 10 + return DEFAULT_REPR_MAX_SIZE + + +def _format_assertmsg(obj: object) -> str: + r"""Format the custom assertion message given. + + For strings this simply replaces newlines with '\n~' so that + util.format_explanation() will preserve them instead of escaping + newlines. For other objects saferepr() is used first. + """ + # reprlib appears to have a bug which means that if a string + # contains a newline it gets escaped, however if an object has a + # .__repr__() which contains newlines it does not get escaped. + # However in either case we want to preserve the newline. + replaces = [("\n", "\n~"), ("%", "%%")] + if not isinstance(obj, str): + obj = saferepr(obj) + replaces.append(("\\n", "\n~")) + + for r1, r2 in replaces: + obj = obj.replace(r1, r2) + + return obj + + +def _should_repr_global_name(obj: object) -> bool: + if callable(obj): + return False + + try: + return not hasattr(obj, "__name__") + except Exception: + return True + + +def _format_boolop(explanations: Iterable[str], is_or: bool) -> str: + explanation = "(" + (is_or and " or " or " and ").join(explanations) + ")" + return explanation.replace("%", "%%") + + +def _call_reprcompare( + ops: Sequence[str], + results: Sequence[bool], + expls: Sequence[str], + each_obj: Sequence[object], +) -> str: + for i, res, expl in zip(range(len(ops)), results, expls): + try: + done = not res + except Exception: + done = True + if done: + break + if util._reprcompare is not None: + custom = util._reprcompare(ops[i], each_obj[i], each_obj[i + 1]) + if custom is not None: + return custom + return expl + + +def _call_assertion_pass(lineno: int, orig: str, expl: str) -> None: + if util._assertion_pass is not None: + util._assertion_pass(lineno, orig, expl) + + +def _check_if_assertion_pass_impl() -> bool: + """Check if any plugins implement the pytest_assertion_pass hook + in order not to generate explanation unnecessarily (might be expensive).""" + return True if util._assertion_pass else False + + +UNARY_MAP = {ast.Not: "not %s", ast.Invert: "~%s", ast.USub: "-%s", ast.UAdd: "+%s"} + +BINOP_MAP = { + ast.BitOr: "|", + ast.BitXor: "^", + ast.BitAnd: "&", + ast.LShift: "<<", + ast.RShift: ">>", + ast.Add: "+", + ast.Sub: "-", + ast.Mult: "*", + ast.Div: "/", + ast.FloorDiv: "//", + ast.Mod: "%%", # escaped for string formatting + ast.Eq: "==", + ast.NotEq: "!=", + ast.Lt: "<", + ast.LtE: "<=", + ast.Gt: ">", + ast.GtE: ">=", + ast.Pow: "**", + ast.Is: "is", + ast.IsNot: "is not", + ast.In: "in", + ast.NotIn: "not in", + ast.MatMult: "@", +} + + +def traverse_node(node: ast.AST) -> Iterator[ast.AST]: + """Recursively yield node and all its children in depth-first order.""" + yield node + for child in ast.iter_child_nodes(node): + yield from traverse_node(child) + + +@functools.lru_cache(maxsize=1) +def _get_assertion_exprs(src: bytes) -> Dict[int, str]: + """Return a mapping from {lineno: "assertion test expression"}.""" + ret: Dict[int, str] = {} + + depth = 0 + lines: List[str] = [] + assert_lineno: Optional[int] = None + seen_lines: Set[int] = set() + + def _write_and_reset() -> None: + nonlocal depth, lines, assert_lineno, seen_lines + assert assert_lineno is not None + ret[assert_lineno] = "".join(lines).rstrip().rstrip("\\") + depth = 0 + lines = [] + assert_lineno = None + seen_lines = set() + + tokens = tokenize.tokenize(io.BytesIO(src).readline) + for tp, source, (lineno, offset), _, line in tokens: + if tp == tokenize.NAME and source == "assert": + assert_lineno = lineno + elif assert_lineno is not None: + # keep track of depth for the assert-message `,` lookup + if tp == tokenize.OP and source in "([{": + depth += 1 + elif tp == tokenize.OP and source in ")]}": + depth -= 1 + + if not lines: + lines.append(line[offset:]) + seen_lines.add(lineno) + # a non-nested comma separates the expression from the message + elif depth == 0 and tp == tokenize.OP and source == ",": + # one line assert with message + if lineno in seen_lines and len(lines) == 1: + offset_in_trimmed = offset + len(lines[-1]) - len(line) + lines[-1] = lines[-1][:offset_in_trimmed] + # multi-line assert with message + elif lineno in seen_lines: + lines[-1] = lines[-1][:offset] + # multi line assert with escapd newline before message + else: + lines.append(line[:offset]) + _write_and_reset() + elif tp in {tokenize.NEWLINE, tokenize.ENDMARKER}: + _write_and_reset() + elif lines and lineno not in seen_lines: + lines.append(line) + seen_lines.add(lineno) + + return ret + + +class AssertionRewriter(ast.NodeVisitor): + """Assertion rewriting implementation. + + The main entrypoint is to call .run() with an ast.Module instance, + this will then find all the assert statements and rewrite them to + provide intermediate values and a detailed assertion error. See + http://pybites.blogspot.be/2011/07/behind-scenes-of-pytests-new-assertion.html + for an overview of how this works. + + The entry point here is .run() which will iterate over all the + statements in an ast.Module and for each ast.Assert statement it + finds call .visit() with it. Then .visit_Assert() takes over and + is responsible for creating new ast statements to replace the + original assert statement: it rewrites the test of an assertion + to provide intermediate values and replace it with an if statement + which raises an assertion error with a detailed explanation in + case the expression is false and calls pytest_assertion_pass hook + if expression is true. + + For this .visit_Assert() uses the visitor pattern to visit all the + AST nodes of the ast.Assert.test field, each visit call returning + an AST node and the corresponding explanation string. During this + state is kept in several instance attributes: + + :statements: All the AST statements which will replace the assert + statement. + + :variables: This is populated by .variable() with each variable + used by the statements so that they can all be set to None at + the end of the statements. + + :variable_counter: Counter to create new unique variables needed + by statements. Variables are created using .variable() and + have the form of "@py_assert0". + + :expl_stmts: The AST statements which will be executed to get + data from the assertion. This is the code which will construct + the detailed assertion message that is used in the AssertionError + or for the pytest_assertion_pass hook. + + :explanation_specifiers: A dict filled by .explanation_param() + with %-formatting placeholders and their corresponding + expressions to use in the building of an assertion message. + This is used by .pop_format_context() to build a message. + + :stack: A stack of the explanation_specifiers dicts maintained by + .push_format_context() and .pop_format_context() which allows + to build another %-formatted string while already building one. + + :scope: A tuple containing the current scope used for variables_overwrite. + + :variables_overwrite: A dict filled with references to variables + that change value within an assert. This happens when a variable is + reassigned with the walrus operator + + This state, except the variables_overwrite, is reset on every new assert + statement visited and used by the other visitors. + """ + + def __init__( + self, module_path: Optional[str], config: Optional[Config], source: bytes + ) -> None: + super().__init__() + self.module_path = module_path + self.config = config + if config is not None: + self.enable_assertion_pass_hook = config.getini( + "enable_assertion_pass_hook" + ) + else: + self.enable_assertion_pass_hook = False + self.source = source + self.scope: tuple[ast.AST, ...] = () + self.variables_overwrite: defaultdict[tuple[ast.AST, ...], Dict[str, str]] = ( + defaultdict(dict) + ) + + def run(self, mod: ast.Module) -> None: + """Find all assert statements in *mod* and rewrite them.""" + if not mod.body: + # Nothing to do. + return + + # We'll insert some special imports at the top of the module, but after any + # docstrings and __future__ imports, so first figure out where that is. + doc = getattr(mod, "docstring", None) + expect_docstring = doc is None + if doc is not None and self.is_rewrite_disabled(doc): + return + pos = 0 + item = None + for item in mod.body: + if ( + expect_docstring + and isinstance(item, ast.Expr) + and isinstance(item.value, ast.Constant) + and isinstance(item.value.value, str) + ): + doc = item.value.value + if self.is_rewrite_disabled(doc): + return + expect_docstring = False + elif ( + isinstance(item, ast.ImportFrom) + and item.level == 0 + and item.module == "__future__" + ): + pass + else: + break + pos += 1 + # Special case: for a decorated function, set the lineno to that of the + # first decorator, not the `def`. Issue #4984. + if isinstance(item, ast.FunctionDef) and item.decorator_list: + lineno = item.decorator_list[0].lineno + else: + lineno = item.lineno + # Now actually insert the special imports. + if sys.version_info >= (3, 10): + aliases = [ + ast.alias("builtins", "@py_builtins", lineno=lineno, col_offset=0), + ast.alias( + "_pytest.assertion.rewrite", + "@pytest_ar", + lineno=lineno, + col_offset=0, + ), + ] + else: + aliases = [ + ast.alias("builtins", "@py_builtins"), + ast.alias("_pytest.assertion.rewrite", "@pytest_ar"), + ] + imports = [ + ast.Import([alias], lineno=lineno, col_offset=0) for alias in aliases + ] + mod.body[pos:pos] = imports + + # Collect asserts. + self.scope = (mod,) + nodes: List[Union[ast.AST, Sentinel]] = [mod] + while nodes: + node = nodes.pop() + if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef, ast.ClassDef)): + self.scope = tuple((*self.scope, node)) + nodes.append(_SCOPE_END_MARKER) + if node == _SCOPE_END_MARKER: + self.scope = self.scope[:-1] + continue + assert isinstance(node, ast.AST) + for name, field in ast.iter_fields(node): + if isinstance(field, list): + new: List[ast.AST] = [] + for i, child in enumerate(field): + if isinstance(child, ast.Assert): + # Transform assert. + new.extend(self.visit(child)) + else: + new.append(child) + if isinstance(child, ast.AST): + nodes.append(child) + setattr(node, name, new) + elif ( + isinstance(field, ast.AST) + # Don't recurse into expressions as they can't contain + # asserts. + and not isinstance(field, ast.expr) + ): + nodes.append(field) + + @staticmethod + def is_rewrite_disabled(docstring: str) -> bool: + return "PYTEST_DONT_REWRITE" in docstring + + def variable(self) -> str: + """Get a new variable.""" + # Use a character invalid in python identifiers to avoid clashing. + name = "@py_assert" + str(next(self.variable_counter)) + self.variables.append(name) + return name + + def assign(self, expr: ast.expr) -> ast.Name: + """Give *expr* a name.""" + name = self.variable() + self.statements.append(ast.Assign([ast.Name(name, ast.Store())], expr)) + return ast.Name(name, ast.Load()) + + def display(self, expr: ast.expr) -> ast.expr: + """Call saferepr on the expression.""" + return self.helper("_saferepr", expr) + + def helper(self, name: str, *args: ast.expr) -> ast.expr: + """Call a helper in this module.""" + py_name = ast.Name("@pytest_ar", ast.Load()) + attr = ast.Attribute(py_name, name, ast.Load()) + return ast.Call(attr, list(args), []) + + def builtin(self, name: str) -> ast.Attribute: + """Return the builtin called *name*.""" + builtin_name = ast.Name("@py_builtins", ast.Load()) + return ast.Attribute(builtin_name, name, ast.Load()) + + def explanation_param(self, expr: ast.expr) -> str: + """Return a new named %-formatting placeholder for expr. + + This creates a %-formatting placeholder for expr in the + current formatting context, e.g. ``%(py0)s``. The placeholder + and expr are placed in the current format context so that it + can be used on the next call to .pop_format_context(). + """ + specifier = "py" + str(next(self.variable_counter)) + self.explanation_specifiers[specifier] = expr + return "%(" + specifier + ")s" + + def push_format_context(self) -> None: + """Create a new formatting context. + + The format context is used for when an explanation wants to + have a variable value formatted in the assertion message. In + this case the value required can be added using + .explanation_param(). Finally .pop_format_context() is used + to format a string of %-formatted values as added by + .explanation_param(). + """ + self.explanation_specifiers: Dict[str, ast.expr] = {} + self.stack.append(self.explanation_specifiers) + + def pop_format_context(self, expl_expr: ast.expr) -> ast.Name: + """Format the %-formatted string with current format context. + + The expl_expr should be an str ast.expr instance constructed from + the %-placeholders created by .explanation_param(). This will + add the required code to format said string to .expl_stmts and + return the ast.Name instance of the formatted string. + """ + current = self.stack.pop() + if self.stack: + self.explanation_specifiers = self.stack[-1] + keys = [ast.Constant(key) for key in current.keys()] + format_dict = ast.Dict(keys, list(current.values())) + form = ast.BinOp(expl_expr, ast.Mod(), format_dict) + name = "@py_format" + str(next(self.variable_counter)) + if self.enable_assertion_pass_hook: + self.format_variables.append(name) + self.expl_stmts.append(ast.Assign([ast.Name(name, ast.Store())], form)) + return ast.Name(name, ast.Load()) + + def generic_visit(self, node: ast.AST) -> Tuple[ast.Name, str]: + """Handle expressions we don't have custom code for.""" + assert isinstance(node, ast.expr) + res = self.assign(node) + return res, self.explanation_param(self.display(res)) + + def visit_Assert(self, assert_: ast.Assert) -> List[ast.stmt]: + """Return the AST statements to replace the ast.Assert instance. + + This rewrites the test of an assertion to provide + intermediate values and replace it with an if statement which + raises an assertion error with a detailed explanation in case + the expression is false. + """ + if isinstance(assert_.test, ast.Tuple) and len(assert_.test.elts) >= 1: + import warnings + + from _pytest.warning_types import PytestAssertRewriteWarning + + # TODO: This assert should not be needed. + assert self.module_path is not None + warnings.warn_explicit( + PytestAssertRewriteWarning( + "assertion is always true, perhaps remove parentheses?" + ), + category=None, + filename=self.module_path, + lineno=assert_.lineno, + ) + + self.statements: List[ast.stmt] = [] + self.variables: List[str] = [] + self.variable_counter = itertools.count() + + if self.enable_assertion_pass_hook: + self.format_variables: List[str] = [] + + self.stack: List[Dict[str, ast.expr]] = [] + self.expl_stmts: List[ast.stmt] = [] + self.push_format_context() + # Rewrite assert into a bunch of statements. + top_condition, explanation = self.visit(assert_.test) + + negation = ast.UnaryOp(ast.Not(), top_condition) + + if self.enable_assertion_pass_hook: # Experimental pytest_assertion_pass hook + msg = self.pop_format_context(ast.Constant(explanation)) + + # Failed + if assert_.msg: + assertmsg = self.helper("_format_assertmsg", assert_.msg) + gluestr = "\n>assert " + else: + assertmsg = ast.Constant("") + gluestr = "assert " + err_explanation = ast.BinOp(ast.Constant(gluestr), ast.Add(), msg) + err_msg = ast.BinOp(assertmsg, ast.Add(), err_explanation) + err_name = ast.Name("AssertionError", ast.Load()) + fmt = self.helper("_format_explanation", err_msg) + exc = ast.Call(err_name, [fmt], []) + raise_ = ast.Raise(exc, None) + statements_fail = [] + statements_fail.extend(self.expl_stmts) + statements_fail.append(raise_) + + # Passed + fmt_pass = self.helper("_format_explanation", msg) + orig = _get_assertion_exprs(self.source)[assert_.lineno] + hook_call_pass = ast.Expr( + self.helper( + "_call_assertion_pass", + ast.Constant(assert_.lineno), + ast.Constant(orig), + fmt_pass, + ) + ) + # If any hooks implement assert_pass hook + hook_impl_test = ast.If( + self.helper("_check_if_assertion_pass_impl"), + [*self.expl_stmts, hook_call_pass], + [], + ) + statements_pass = [hook_impl_test] + + # Test for assertion condition + main_test = ast.If(negation, statements_fail, statements_pass) + self.statements.append(main_test) + if self.format_variables: + variables = [ + ast.Name(name, ast.Store()) for name in self.format_variables + ] + clear_format = ast.Assign(variables, ast.Constant(None)) + self.statements.append(clear_format) + + else: # Original assertion rewriting + # Create failure message. + body = self.expl_stmts + self.statements.append(ast.If(negation, body, [])) + if assert_.msg: + assertmsg = self.helper("_format_assertmsg", assert_.msg) + explanation = "\n>assert " + explanation + else: + assertmsg = ast.Constant("") + explanation = "assert " + explanation + template = ast.BinOp(assertmsg, ast.Add(), ast.Constant(explanation)) + msg = self.pop_format_context(template) + fmt = self.helper("_format_explanation", msg) + err_name = ast.Name("AssertionError", ast.Load()) + exc = ast.Call(err_name, [fmt], []) + raise_ = ast.Raise(exc, None) + + body.append(raise_) + + # Clear temporary variables by setting them to None. + if self.variables: + variables = [ast.Name(name, ast.Store()) for name in self.variables] + clear = ast.Assign(variables, ast.Constant(None)) + self.statements.append(clear) + # Fix locations (line numbers/column offsets). + for stmt in self.statements: + for node in traverse_node(stmt): + ast.copy_location(node, assert_) + return self.statements + + def visit_NamedExpr(self, name: ast.NamedExpr) -> Tuple[ast.NamedExpr, str]: + # This method handles the 'walrus operator' repr of the target + # name if it's a local variable or _should_repr_global_name() + # thinks it's acceptable. + locs = ast.Call(self.builtin("locals"), [], []) + target_id = name.target.id + inlocs = ast.Compare(ast.Constant(target_id), [ast.In()], [locs]) + dorepr = self.helper("_should_repr_global_name", name) + test = ast.BoolOp(ast.Or(), [inlocs, dorepr]) + expr = ast.IfExp(test, self.display(name), ast.Constant(target_id)) + return name, self.explanation_param(expr) + + def visit_Name(self, name: ast.Name) -> Tuple[ast.Name, str]: + # Display the repr of the name if it's a local variable or + # _should_repr_global_name() thinks it's acceptable. + locs = ast.Call(self.builtin("locals"), [], []) + inlocs = ast.Compare(ast.Constant(name.id), [ast.In()], [locs]) + dorepr = self.helper("_should_repr_global_name", name) + test = ast.BoolOp(ast.Or(), [inlocs, dorepr]) + expr = ast.IfExp(test, self.display(name), ast.Constant(name.id)) + return name, self.explanation_param(expr) + + def visit_BoolOp(self, boolop: ast.BoolOp) -> Tuple[ast.Name, str]: + res_var = self.variable() + expl_list = self.assign(ast.List([], ast.Load())) + app = ast.Attribute(expl_list, "append", ast.Load()) + is_or = int(isinstance(boolop.op, ast.Or)) + body = save = self.statements + fail_save = self.expl_stmts + levels = len(boolop.values) - 1 + self.push_format_context() + # Process each operand, short-circuiting if needed. + for i, v in enumerate(boolop.values): + if i: + fail_inner: List[ast.stmt] = [] + # cond is set in a prior loop iteration below + self.expl_stmts.append(ast.If(cond, fail_inner, [])) # noqa: F821 + self.expl_stmts = fail_inner + # Check if the left operand is a ast.NamedExpr and the value has already been visited + if ( + isinstance(v, ast.Compare) + and isinstance(v.left, ast.NamedExpr) + and v.left.target.id + in [ + ast_expr.id + for ast_expr in boolop.values[:i] + if hasattr(ast_expr, "id") + ] + ): + pytest_temp = self.variable() + self.variables_overwrite[self.scope][v.left.target.id] = v.left # type:ignore[assignment] + v.left.target.id = pytest_temp + self.push_format_context() + res, expl = self.visit(v) + body.append(ast.Assign([ast.Name(res_var, ast.Store())], res)) + expl_format = self.pop_format_context(ast.Constant(expl)) + call = ast.Call(app, [expl_format], []) + self.expl_stmts.append(ast.Expr(call)) + if i < levels: + cond: ast.expr = res + if is_or: + cond = ast.UnaryOp(ast.Not(), cond) + inner: List[ast.stmt] = [] + self.statements.append(ast.If(cond, inner, [])) + self.statements = body = inner + self.statements = save + self.expl_stmts = fail_save + expl_template = self.helper("_format_boolop", expl_list, ast.Constant(is_or)) + expl = self.pop_format_context(expl_template) + return ast.Name(res_var, ast.Load()), self.explanation_param(expl) + + def visit_UnaryOp(self, unary: ast.UnaryOp) -> Tuple[ast.Name, str]: + pattern = UNARY_MAP[unary.op.__class__] + operand_res, operand_expl = self.visit(unary.operand) + res = self.assign(ast.UnaryOp(unary.op, operand_res)) + return res, pattern % (operand_expl,) + + def visit_BinOp(self, binop: ast.BinOp) -> Tuple[ast.Name, str]: + symbol = BINOP_MAP[binop.op.__class__] + left_expr, left_expl = self.visit(binop.left) + right_expr, right_expl = self.visit(binop.right) + explanation = f"({left_expl} {symbol} {right_expl})" + res = self.assign(ast.BinOp(left_expr, binop.op, right_expr)) + return res, explanation + + def visit_Call(self, call: ast.Call) -> Tuple[ast.Name, str]: + new_func, func_expl = self.visit(call.func) + arg_expls = [] + new_args = [] + new_kwargs = [] + for arg in call.args: + if isinstance(arg, ast.Name) and arg.id in self.variables_overwrite.get( + self.scope, {} + ): + arg = self.variables_overwrite[self.scope][arg.id] # type:ignore[assignment] + res, expl = self.visit(arg) + arg_expls.append(expl) + new_args.append(res) + for keyword in call.keywords: + if isinstance( + keyword.value, ast.Name + ) and keyword.value.id in self.variables_overwrite.get(self.scope, {}): + keyword.value = self.variables_overwrite[self.scope][keyword.value.id] # type:ignore[assignment] + res, expl = self.visit(keyword.value) + new_kwargs.append(ast.keyword(keyword.arg, res)) + if keyword.arg: + arg_expls.append(keyword.arg + "=" + expl) + else: # **args have `arg` keywords with an .arg of None + arg_expls.append("**" + expl) + + expl = "{}({})".format(func_expl, ", ".join(arg_expls)) + new_call = ast.Call(new_func, new_args, new_kwargs) + res = self.assign(new_call) + res_expl = self.explanation_param(self.display(res)) + outer_expl = f"{res_expl}\n{{{res_expl} = {expl}\n}}" + return res, outer_expl + + def visit_Starred(self, starred: ast.Starred) -> Tuple[ast.Starred, str]: + # A Starred node can appear in a function call. + res, expl = self.visit(starred.value) + new_starred = ast.Starred(res, starred.ctx) + return new_starred, "*" + expl + + def visit_Attribute(self, attr: ast.Attribute) -> Tuple[ast.Name, str]: + if not isinstance(attr.ctx, ast.Load): + return self.generic_visit(attr) + value, value_expl = self.visit(attr.value) + res = self.assign(ast.Attribute(value, attr.attr, ast.Load())) + res_expl = self.explanation_param(self.display(res)) + pat = "%s\n{%s = %s.%s\n}" + expl = pat % (res_expl, res_expl, value_expl, attr.attr) + return res, expl + + def visit_Compare(self, comp: ast.Compare) -> Tuple[ast.expr, str]: + self.push_format_context() + # We first check if we have overwritten a variable in the previous assert + if isinstance( + comp.left, ast.Name + ) and comp.left.id in self.variables_overwrite.get(self.scope, {}): + comp.left = self.variables_overwrite[self.scope][comp.left.id] # type:ignore[assignment] + if isinstance(comp.left, ast.NamedExpr): + self.variables_overwrite[self.scope][comp.left.target.id] = comp.left # type:ignore[assignment] + left_res, left_expl = self.visit(comp.left) + if isinstance(comp.left, (ast.Compare, ast.BoolOp)): + left_expl = f"({left_expl})" + res_variables = [self.variable() for i in range(len(comp.ops))] + load_names = [ast.Name(v, ast.Load()) for v in res_variables] + store_names = [ast.Name(v, ast.Store()) for v in res_variables] + it = zip(range(len(comp.ops)), comp.ops, comp.comparators) + expls = [] + syms = [] + results = [left_res] + for i, op, next_operand in it: + if ( + isinstance(next_operand, ast.NamedExpr) + and isinstance(left_res, ast.Name) + and next_operand.target.id == left_res.id + ): + next_operand.target.id = self.variable() + self.variables_overwrite[self.scope][left_res.id] = next_operand # type:ignore[assignment] + next_res, next_expl = self.visit(next_operand) + if isinstance(next_operand, (ast.Compare, ast.BoolOp)): + next_expl = f"({next_expl})" + results.append(next_res) + sym = BINOP_MAP[op.__class__] + syms.append(ast.Constant(sym)) + expl = f"{left_expl} {sym} {next_expl}" + expls.append(ast.Constant(expl)) + res_expr = ast.Compare(left_res, [op], [next_res]) + self.statements.append(ast.Assign([store_names[i]], res_expr)) + left_res, left_expl = next_res, next_expl + # Use pytest.assertion.util._reprcompare if that's available. + expl_call = self.helper( + "_call_reprcompare", + ast.Tuple(syms, ast.Load()), + ast.Tuple(load_names, ast.Load()), + ast.Tuple(expls, ast.Load()), + ast.Tuple(results, ast.Load()), + ) + if len(comp.ops) > 1: + res: ast.expr = ast.BoolOp(ast.And(), load_names) + else: + res = load_names[0] + + return res, self.explanation_param(self.pop_format_context(expl_call)) + + +def try_makedirs(cache_dir: Path) -> bool: + """Attempt to create the given directory and sub-directories exist. + + Returns True if successful or if it already exists. + """ + try: + os.makedirs(cache_dir, exist_ok=True) + except (FileNotFoundError, NotADirectoryError, FileExistsError): + # One of the path components was not a directory: + # - we're in a zip file + # - it is a file + return False + except PermissionError: + return False + except OSError as e: + # as of now, EROFS doesn't have an equivalent OSError-subclass + if e.errno == errno.EROFS: + return False + raise + return True + + +def get_cache_dir(file_path: Path) -> Path: + """Return the cache directory to write .pyc files for the given .py file path.""" + if sys.pycache_prefix: + # given: + # prefix = '/tmp/pycs' + # path = '/home/user/proj/test_app.py' + # we want: + # '/tmp/pycs/home/user/proj' + return Path(sys.pycache_prefix) / Path(*file_path.parts[1:-1]) + else: + # classic pycache directory + return file_path.parent / "__pycache__" diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/assertion/truncate.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/assertion/truncate.py new file mode 100644 index 000000000..4fdfd86a5 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/assertion/truncate.py @@ -0,0 +1,118 @@ +"""Utilities for truncating assertion output. + +Current default behaviour is to truncate assertion explanations at +terminal lines, unless running with an assertions verbosity level of at least 2 or running on CI. +""" + +from typing import List +from typing import Optional + +from _pytest.assertion import util +from _pytest.config import Config +from _pytest.nodes import Item + + +DEFAULT_MAX_LINES = 8 +DEFAULT_MAX_CHARS = 8 * 80 +USAGE_MSG = "use '-vv' to show" + + +def truncate_if_required( + explanation: List[str], item: Item, max_length: Optional[int] = None +) -> List[str]: + """Truncate this assertion explanation if the given test item is eligible.""" + if _should_truncate_item(item): + return _truncate_explanation(explanation) + return explanation + + +def _should_truncate_item(item: Item) -> bool: + """Whether or not this test item is eligible for truncation.""" + verbose = item.config.get_verbosity(Config.VERBOSITY_ASSERTIONS) + return verbose < 2 and not util.running_on_ci() + + +def _truncate_explanation( + input_lines: List[str], + max_lines: Optional[int] = None, + max_chars: Optional[int] = None, +) -> List[str]: + """Truncate given list of strings that makes up the assertion explanation. + + Truncates to either 8 lines, or 640 characters - whichever the input reaches + first, taking the truncation explanation into account. The remaining lines + will be replaced by a usage message. + """ + if max_lines is None: + max_lines = DEFAULT_MAX_LINES + if max_chars is None: + max_chars = DEFAULT_MAX_CHARS + + # Check if truncation required + input_char_count = len("".join(input_lines)) + # The length of the truncation explanation depends on the number of lines + # removed but is at least 68 characters: + # The real value is + # 64 (for the base message: + # '...\n...Full output truncated (1 line hidden), use '-vv' to show")' + # ) + # + 1 (for plural) + # + int(math.log10(len(input_lines) - max_lines)) (number of hidden line, at least 1) + # + 3 for the '...' added to the truncated line + # But if there's more than 100 lines it's very likely that we're going to + # truncate, so we don't need the exact value using log10. + tolerable_max_chars = ( + max_chars + 70 # 64 + 1 (for plural) + 2 (for '99') + 3 for '...' + ) + # The truncation explanation add two lines to the output + tolerable_max_lines = max_lines + 2 + if ( + len(input_lines) <= tolerable_max_lines + and input_char_count <= tolerable_max_chars + ): + return input_lines + # Truncate first to max_lines, and then truncate to max_chars if necessary + truncated_explanation = input_lines[:max_lines] + truncated_char = True + # We reevaluate the need to truncate chars following removal of some lines + if len("".join(truncated_explanation)) > tolerable_max_chars: + truncated_explanation = _truncate_by_char_count( + truncated_explanation, max_chars + ) + else: + truncated_char = False + + truncated_line_count = len(input_lines) - len(truncated_explanation) + if truncated_explanation[-1]: + # Add ellipsis and take into account part-truncated final line + truncated_explanation[-1] = truncated_explanation[-1] + "..." + if truncated_char: + # It's possible that we did not remove any char from this line + truncated_line_count += 1 + else: + # Add proper ellipsis when we were able to fit a full line exactly + truncated_explanation[-1] = "..." + return [ + *truncated_explanation, + "", + f"...Full output truncated ({truncated_line_count} line" + f"{'' if truncated_line_count == 1 else 's'} hidden), {USAGE_MSG}", + ] + + +def _truncate_by_char_count(input_lines: List[str], max_chars: int) -> List[str]: + # Find point at which input length exceeds total allowed length + iterated_char_count = 0 + for iterated_index, input_line in enumerate(input_lines): + if iterated_char_count + len(input_line) > max_chars: + break + iterated_char_count += len(input_line) + + # Create truncated explanation with modified final line + truncated_result = input_lines[:iterated_index] + final_line = input_lines[iterated_index] + if final_line: + final_line_truncate_point = max_chars - iterated_char_count + final_line = final_line[:final_line_truncate_point] + truncated_result.append(final_line) + return truncated_result diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/assertion/util.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/assertion/util.py new file mode 100644 index 000000000..cb6716410 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/assertion/util.py @@ -0,0 +1,609 @@ +# mypy: allow-untyped-defs +"""Utilities for assertion debugging.""" + +import collections.abc +import os +import pprint +from typing import AbstractSet +from typing import Any +from typing import Callable +from typing import Iterable +from typing import List +from typing import Literal +from typing import Mapping +from typing import Optional +from typing import Protocol +from typing import Sequence +from unicodedata import normalize + +from _pytest import outcomes +import _pytest._code +from _pytest._io.pprint import PrettyPrinter +from _pytest._io.saferepr import saferepr +from _pytest._io.saferepr import saferepr_unlimited +from _pytest.config import Config + + +# The _reprcompare attribute on the util module is used by the new assertion +# interpretation code and assertion rewriter to detect this plugin was +# loaded and in turn call the hooks defined here as part of the +# DebugInterpreter. +_reprcompare: Optional[Callable[[str, object, object], Optional[str]]] = None + +# Works similarly as _reprcompare attribute. Is populated with the hook call +# when pytest_runtest_setup is called. +_assertion_pass: Optional[Callable[[int, str, str], None]] = None + +# Config object which is assigned during pytest_runtest_protocol. +_config: Optional[Config] = None + + +class _HighlightFunc(Protocol): + def __call__(self, source: str, lexer: Literal["diff", "python"] = "python") -> str: + """Apply highlighting to the given source.""" + + +def format_explanation(explanation: str) -> str: + r"""Format an explanation. + + Normally all embedded newlines are escaped, however there are + three exceptions: \n{, \n} and \n~. The first two are intended + cover nested explanations, see function and attribute explanations + for examples (.visit_Call(), visit_Attribute()). The last one is + for when one explanation needs to span multiple lines, e.g. when + displaying diffs. + """ + lines = _split_explanation(explanation) + result = _format_lines(lines) + return "\n".join(result) + + +def _split_explanation(explanation: str) -> List[str]: + r"""Return a list of individual lines in the explanation. + + This will return a list of lines split on '\n{', '\n}' and '\n~'. + Any other newlines will be escaped and appear in the line as the + literal '\n' characters. + """ + raw_lines = (explanation or "").split("\n") + lines = [raw_lines[0]] + for values in raw_lines[1:]: + if values and values[0] in ["{", "}", "~", ">"]: + lines.append(values) + else: + lines[-1] += "\\n" + values + return lines + + +def _format_lines(lines: Sequence[str]) -> List[str]: + """Format the individual lines. + + This will replace the '{', '}' and '~' characters of our mini formatting + language with the proper 'where ...', 'and ...' and ' + ...' text, taking + care of indentation along the way. + + Return a list of formatted lines. + """ + result = list(lines[:1]) + stack = [0] + stackcnt = [0] + for line in lines[1:]: + if line.startswith("{"): + if stackcnt[-1]: + s = "and " + else: + s = "where " + stack.append(len(result)) + stackcnt[-1] += 1 + stackcnt.append(0) + result.append(" +" + " " * (len(stack) - 1) + s + line[1:]) + elif line.startswith("}"): + stack.pop() + stackcnt.pop() + result[stack[-1]] += line[1:] + else: + assert line[0] in ["~", ">"] + stack[-1] += 1 + indent = len(stack) if line.startswith("~") else len(stack) - 1 + result.append(" " * indent + line[1:]) + assert len(stack) == 1 + return result + + +def issequence(x: Any) -> bool: + return isinstance(x, collections.abc.Sequence) and not isinstance(x, str) + + +def istext(x: Any) -> bool: + return isinstance(x, str) + + +def isdict(x: Any) -> bool: + return isinstance(x, dict) + + +def isset(x: Any) -> bool: + return isinstance(x, (set, frozenset)) + + +def isnamedtuple(obj: Any) -> bool: + return isinstance(obj, tuple) and getattr(obj, "_fields", None) is not None + + +def isdatacls(obj: Any) -> bool: + return getattr(obj, "__dataclass_fields__", None) is not None + + +def isattrs(obj: Any) -> bool: + return getattr(obj, "__attrs_attrs__", None) is not None + + +def isiterable(obj: Any) -> bool: + try: + iter(obj) + return not istext(obj) + except Exception: + return False + + +def has_default_eq( + obj: object, +) -> bool: + """Check if an instance of an object contains the default eq + + First, we check if the object's __eq__ attribute has __code__, + if so, we check the equally of the method code filename (__code__.co_filename) + to the default one generated by the dataclass and attr module + for dataclasses the default co_filename is , for attrs class, the __eq__ should contain "attrs eq generated" + """ + # inspired from https://github.com/willmcgugan/rich/blob/07d51ffc1aee6f16bd2e5a25b4e82850fb9ed778/rich/pretty.py#L68 + if hasattr(obj.__eq__, "__code__") and hasattr(obj.__eq__.__code__, "co_filename"): + code_filename = obj.__eq__.__code__.co_filename + + if isattrs(obj): + return "attrs generated eq" in code_filename + + return code_filename == "" # data class + return True + + +def assertrepr_compare( + config, op: str, left: Any, right: Any, use_ascii: bool = False +) -> Optional[List[str]]: + """Return specialised explanations for some operators/operands.""" + verbose = config.get_verbosity(Config.VERBOSITY_ASSERTIONS) + + # Strings which normalize equal are often hard to distinguish when printed; use ascii() to make this easier. + # See issue #3246. + use_ascii = ( + isinstance(left, str) + and isinstance(right, str) + and normalize("NFD", left) == normalize("NFD", right) + ) + + if verbose > 1: + left_repr = saferepr_unlimited(left, use_ascii=use_ascii) + right_repr = saferepr_unlimited(right, use_ascii=use_ascii) + else: + # XXX: "15 chars indentation" is wrong + # ("E AssertionError: assert "); should use term width. + maxsize = ( + 80 - 15 - len(op) - 2 + ) // 2 # 15 chars indentation, 1 space around op + + left_repr = saferepr(left, maxsize=maxsize, use_ascii=use_ascii) + right_repr = saferepr(right, maxsize=maxsize, use_ascii=use_ascii) + + summary = f"{left_repr} {op} {right_repr}" + highlighter = config.get_terminal_writer()._highlight + + explanation = None + try: + if op == "==": + explanation = _compare_eq_any(left, right, highlighter, verbose) + elif op == "not in": + if istext(left) and istext(right): + explanation = _notin_text(left, right, verbose) + elif op == "!=": + if isset(left) and isset(right): + explanation = ["Both sets are equal"] + elif op == ">=": + if isset(left) and isset(right): + explanation = _compare_gte_set(left, right, highlighter, verbose) + elif op == "<=": + if isset(left) and isset(right): + explanation = _compare_lte_set(left, right, highlighter, verbose) + elif op == ">": + if isset(left) and isset(right): + explanation = _compare_gt_set(left, right, highlighter, verbose) + elif op == "<": + if isset(left) and isset(right): + explanation = _compare_lt_set(left, right, highlighter, verbose) + + except outcomes.Exit: + raise + except Exception: + repr_crash = _pytest._code.ExceptionInfo.from_current()._getreprcrash() + explanation = [ + f"(pytest_assertion plugin: representation of details failed: {repr_crash}.", + " Probably an object has a faulty __repr__.)", + ] + + if not explanation: + return None + + if explanation[0] != "": + explanation = ["", *explanation] + return [summary, *explanation] + + +def _compare_eq_any( + left: Any, right: Any, highlighter: _HighlightFunc, verbose: int = 0 +) -> List[str]: + explanation = [] + if istext(left) and istext(right): + explanation = _diff_text(left, right, verbose) + else: + from _pytest.python_api import ApproxBase + + if isinstance(left, ApproxBase) or isinstance(right, ApproxBase): + # Although the common order should be obtained == expected, this ensures both ways + approx_side = left if isinstance(left, ApproxBase) else right + other_side = right if isinstance(left, ApproxBase) else left + + explanation = approx_side._repr_compare(other_side) + elif type(left) is type(right) and ( + isdatacls(left) or isattrs(left) or isnamedtuple(left) + ): + # Note: unlike dataclasses/attrs, namedtuples compare only the + # field values, not the type or field names. But this branch + # intentionally only handles the same-type case, which was often + # used in older code bases before dataclasses/attrs were available. + explanation = _compare_eq_cls(left, right, highlighter, verbose) + elif issequence(left) and issequence(right): + explanation = _compare_eq_sequence(left, right, highlighter, verbose) + elif isset(left) and isset(right): + explanation = _compare_eq_set(left, right, highlighter, verbose) + elif isdict(left) and isdict(right): + explanation = _compare_eq_dict(left, right, highlighter, verbose) + + if isiterable(left) and isiterable(right): + expl = _compare_eq_iterable(left, right, highlighter, verbose) + explanation.extend(expl) + + return explanation + + +def _diff_text(left: str, right: str, verbose: int = 0) -> List[str]: + """Return the explanation for the diff between text. + + Unless --verbose is used this will skip leading and trailing + characters which are identical to keep the diff minimal. + """ + from difflib import ndiff + + explanation: List[str] = [] + + if verbose < 1: + i = 0 # just in case left or right has zero length + for i in range(min(len(left), len(right))): + if left[i] != right[i]: + break + if i > 42: + i -= 10 # Provide some context + explanation = [ + "Skipping %s identical leading characters in diff, use -v to show" % i + ] + left = left[i:] + right = right[i:] + if len(left) == len(right): + for i in range(len(left)): + if left[-i] != right[-i]: + break + if i > 42: + i -= 10 # Provide some context + explanation += [ + f"Skipping {i} identical trailing " + "characters in diff, use -v to show" + ] + left = left[:-i] + right = right[:-i] + keepends = True + if left.isspace() or right.isspace(): + left = repr(str(left)) + right = repr(str(right)) + explanation += ["Strings contain only whitespace, escaping them using repr()"] + # "right" is the expected base against which we compare "left", + # see https://github.com/pytest-dev/pytest/issues/3333 + explanation += [ + line.strip("\n") + for line in ndiff(right.splitlines(keepends), left.splitlines(keepends)) + ] + return explanation + + +def _compare_eq_iterable( + left: Iterable[Any], + right: Iterable[Any], + highligher: _HighlightFunc, + verbose: int = 0, +) -> List[str]: + if verbose <= 0 and not running_on_ci(): + return ["Use -v to get more diff"] + # dynamic import to speedup pytest + import difflib + + left_formatting = PrettyPrinter().pformat(left).splitlines() + right_formatting = PrettyPrinter().pformat(right).splitlines() + + explanation = ["", "Full diff:"] + # "right" is the expected base against which we compare "left", + # see https://github.com/pytest-dev/pytest/issues/3333 + explanation.extend( + highligher( + "\n".join( + line.rstrip() + for line in difflib.ndiff(right_formatting, left_formatting) + ), + lexer="diff", + ).splitlines() + ) + return explanation + + +def _compare_eq_sequence( + left: Sequence[Any], + right: Sequence[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> List[str]: + comparing_bytes = isinstance(left, bytes) and isinstance(right, bytes) + explanation: List[str] = [] + len_left = len(left) + len_right = len(right) + for i in range(min(len_left, len_right)): + if left[i] != right[i]: + if comparing_bytes: + # when comparing bytes, we want to see their ascii representation + # instead of their numeric values (#5260) + # using a slice gives us the ascii representation: + # >>> s = b'foo' + # >>> s[0] + # 102 + # >>> s[0:1] + # b'f' + left_value = left[i : i + 1] + right_value = right[i : i + 1] + else: + left_value = left[i] + right_value = right[i] + + explanation.append( + f"At index {i} diff:" + f" {highlighter(repr(left_value))} != {highlighter(repr(right_value))}" + ) + break + + if comparing_bytes: + # when comparing bytes, it doesn't help to show the "sides contain one or more + # items" longer explanation, so skip it + + return explanation + + len_diff = len_left - len_right + if len_diff: + if len_diff > 0: + dir_with_more = "Left" + extra = saferepr(left[len_right]) + else: + len_diff = 0 - len_diff + dir_with_more = "Right" + extra = saferepr(right[len_left]) + + if len_diff == 1: + explanation += [ + f"{dir_with_more} contains one more item: {highlighter(extra)}" + ] + else: + explanation += [ + "%s contains %d more items, first extra item: %s" + % (dir_with_more, len_diff, highlighter(extra)) + ] + return explanation + + +def _compare_eq_set( + left: AbstractSet[Any], + right: AbstractSet[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> List[str]: + explanation = [] + explanation.extend(_set_one_sided_diff("left", left, right, highlighter)) + explanation.extend(_set_one_sided_diff("right", right, left, highlighter)) + return explanation + + +def _compare_gt_set( + left: AbstractSet[Any], + right: AbstractSet[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> List[str]: + explanation = _compare_gte_set(left, right, highlighter) + if not explanation: + return ["Both sets are equal"] + return explanation + + +def _compare_lt_set( + left: AbstractSet[Any], + right: AbstractSet[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> List[str]: + explanation = _compare_lte_set(left, right, highlighter) + if not explanation: + return ["Both sets are equal"] + return explanation + + +def _compare_gte_set( + left: AbstractSet[Any], + right: AbstractSet[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> List[str]: + return _set_one_sided_diff("right", right, left, highlighter) + + +def _compare_lte_set( + left: AbstractSet[Any], + right: AbstractSet[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> List[str]: + return _set_one_sided_diff("left", left, right, highlighter) + + +def _set_one_sided_diff( + posn: str, + set1: AbstractSet[Any], + set2: AbstractSet[Any], + highlighter: _HighlightFunc, +) -> List[str]: + explanation = [] + diff = set1 - set2 + if diff: + explanation.append(f"Extra items in the {posn} set:") + for item in diff: + explanation.append(highlighter(saferepr(item))) + return explanation + + +def _compare_eq_dict( + left: Mapping[Any, Any], + right: Mapping[Any, Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> List[str]: + explanation: List[str] = [] + set_left = set(left) + set_right = set(right) + common = set_left.intersection(set_right) + same = {k: left[k] for k in common if left[k] == right[k]} + if same and verbose < 2: + explanation += ["Omitting %s identical items, use -vv to show" % len(same)] + elif same: + explanation += ["Common items:"] + explanation += highlighter(pprint.pformat(same)).splitlines() + diff = {k for k in common if left[k] != right[k]} + if diff: + explanation += ["Differing items:"] + for k in diff: + explanation += [ + highlighter(saferepr({k: left[k]})) + + " != " + + highlighter(saferepr({k: right[k]})) + ] + extra_left = set_left - set_right + len_extra_left = len(extra_left) + if len_extra_left: + explanation.append( + "Left contains %d more item%s:" + % (len_extra_left, "" if len_extra_left == 1 else "s") + ) + explanation.extend( + highlighter(pprint.pformat({k: left[k] for k in extra_left})).splitlines() + ) + extra_right = set_right - set_left + len_extra_right = len(extra_right) + if len_extra_right: + explanation.append( + "Right contains %d more item%s:" + % (len_extra_right, "" if len_extra_right == 1 else "s") + ) + explanation.extend( + highlighter(pprint.pformat({k: right[k] for k in extra_right})).splitlines() + ) + return explanation + + +def _compare_eq_cls( + left: Any, right: Any, highlighter: _HighlightFunc, verbose: int +) -> List[str]: + if not has_default_eq(left): + return [] + if isdatacls(left): + import dataclasses + + all_fields = dataclasses.fields(left) + fields_to_check = [info.name for info in all_fields if info.compare] + elif isattrs(left): + all_fields = left.__attrs_attrs__ + fields_to_check = [field.name for field in all_fields if getattr(field, "eq")] + elif isnamedtuple(left): + fields_to_check = left._fields + else: + assert False + + indent = " " + same = [] + diff = [] + for field in fields_to_check: + if getattr(left, field) == getattr(right, field): + same.append(field) + else: + diff.append(field) + + explanation = [] + if same or diff: + explanation += [""] + if same and verbose < 2: + explanation.append("Omitting %s identical items, use -vv to show" % len(same)) + elif same: + explanation += ["Matching attributes:"] + explanation += highlighter(pprint.pformat(same)).splitlines() + if diff: + explanation += ["Differing attributes:"] + explanation += highlighter(pprint.pformat(diff)).splitlines() + for field in diff: + field_left = getattr(left, field) + field_right = getattr(right, field) + explanation += [ + "", + f"Drill down into differing attribute {field}:", + f"{indent}{field}: {highlighter(repr(field_left))} != {highlighter(repr(field_right))}", + ] + explanation += [ + indent + line + for line in _compare_eq_any( + field_left, field_right, highlighter, verbose + ) + ] + return explanation + + +def _notin_text(term: str, text: str, verbose: int = 0) -> List[str]: + index = text.find(term) + head = text[:index] + tail = text[index + len(term) :] + correct_text = head + tail + diff = _diff_text(text, correct_text, verbose) + newdiff = ["%s is contained here:" % saferepr(term, maxsize=42)] + for line in diff: + if line.startswith("Skipping"): + continue + if line.startswith("- "): + continue + if line.startswith("+ "): + newdiff.append(" " + line[2:]) + else: + newdiff.append(line) + return newdiff + + +def running_on_ci() -> bool: + """Check if we're currently running on a CI system.""" + env_vars = ["CI", "BUILD_NUMBER"] + return any(var in os.environ for var in env_vars) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/cacheprovider.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/cacheprovider.py new file mode 100644 index 000000000..e9f66f1f4 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/cacheprovider.py @@ -0,0 +1,612 @@ +# mypy: allow-untyped-defs +"""Implementation of the cache provider.""" + +# This plugin was not named "cache" to avoid conflicts with the external +# pytest-cache version. +import dataclasses +import json +import os +from pathlib import Path +import tempfile +from typing import Dict +from typing import final +from typing import Generator +from typing import Iterable +from typing import List +from typing import Optional +from typing import Set +from typing import Union + +from .pathlib import resolve_from_str +from .pathlib import rm_rf +from .reports import CollectReport +from _pytest import nodes +from _pytest._io import TerminalWriter +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.main import Session +from _pytest.nodes import Directory +from _pytest.nodes import File +from _pytest.reports import TestReport + + +README_CONTENT = """\ +# pytest cache directory # + +This directory contains data from the pytest's cache plugin, +which provides the `--lf` and `--ff` options, as well as the `cache` fixture. + +**Do not** commit this to version control. + +See [the docs](https://docs.pytest.org/en/stable/how-to/cache.html) for more information. +""" + +CACHEDIR_TAG_CONTENT = b"""\ +Signature: 8a477f597d28d172789f06886806bc55 +# This file is a cache directory tag created by pytest. +# For information about cache directory tags, see: +# https://bford.info/cachedir/spec.html +""" + + +@final +@dataclasses.dataclass +class Cache: + """Instance of the `cache` fixture.""" + + _cachedir: Path = dataclasses.field(repr=False) + _config: Config = dataclasses.field(repr=False) + + # Sub-directory under cache-dir for directories created by `mkdir()`. + _CACHE_PREFIX_DIRS = "d" + + # Sub-directory under cache-dir for values created by `set()`. + _CACHE_PREFIX_VALUES = "v" + + def __init__( + self, cachedir: Path, config: Config, *, _ispytest: bool = False + ) -> None: + check_ispytest(_ispytest) + self._cachedir = cachedir + self._config = config + + @classmethod + def for_config(cls, config: Config, *, _ispytest: bool = False) -> "Cache": + """Create the Cache instance for a Config. + + :meta private: + """ + check_ispytest(_ispytest) + cachedir = cls.cache_dir_from_config(config, _ispytest=True) + if config.getoption("cacheclear") and cachedir.is_dir(): + cls.clear_cache(cachedir, _ispytest=True) + return cls(cachedir, config, _ispytest=True) + + @classmethod + def clear_cache(cls, cachedir: Path, _ispytest: bool = False) -> None: + """Clear the sub-directories used to hold cached directories and values. + + :meta private: + """ + check_ispytest(_ispytest) + for prefix in (cls._CACHE_PREFIX_DIRS, cls._CACHE_PREFIX_VALUES): + d = cachedir / prefix + if d.is_dir(): + rm_rf(d) + + @staticmethod + def cache_dir_from_config(config: Config, *, _ispytest: bool = False) -> Path: + """Get the path to the cache directory for a Config. + + :meta private: + """ + check_ispytest(_ispytest) + return resolve_from_str(config.getini("cache_dir"), config.rootpath) + + def warn(self, fmt: str, *, _ispytest: bool = False, **args: object) -> None: + """Issue a cache warning. + + :meta private: + """ + check_ispytest(_ispytest) + import warnings + + from _pytest.warning_types import PytestCacheWarning + + warnings.warn( + PytestCacheWarning(fmt.format(**args) if args else fmt), + self._config.hook, + stacklevel=3, + ) + + def _mkdir(self, path: Path) -> None: + self._ensure_cache_dir_and_supporting_files() + path.mkdir(exist_ok=True, parents=True) + + def mkdir(self, name: str) -> Path: + """Return a directory path object with the given name. + + If the directory does not yet exist, it will be created. You can use + it to manage files to e.g. store/retrieve database dumps across test + sessions. + + .. versionadded:: 7.0 + + :param name: + Must be a string not containing a ``/`` separator. + Make sure the name contains your plugin or application + identifiers to prevent clashes with other cache users. + """ + path = Path(name) + if len(path.parts) > 1: + raise ValueError("name is not allowed to contain path separators") + res = self._cachedir.joinpath(self._CACHE_PREFIX_DIRS, path) + self._mkdir(res) + return res + + def _getvaluepath(self, key: str) -> Path: + return self._cachedir.joinpath(self._CACHE_PREFIX_VALUES, Path(key)) + + def get(self, key: str, default): + """Return the cached value for the given key. + + If no value was yet cached or the value cannot be read, the specified + default is returned. + + :param key: + Must be a ``/`` separated value. Usually the first + name is the name of your plugin or your application. + :param default: + The value to return in case of a cache-miss or invalid cache value. + """ + path = self._getvaluepath(key) + try: + with path.open("r", encoding="UTF-8") as f: + return json.load(f) + except (ValueError, OSError): + return default + + def set(self, key: str, value: object) -> None: + """Save value for the given key. + + :param key: + Must be a ``/`` separated value. Usually the first + name is the name of your plugin or your application. + :param value: + Must be of any combination of basic python types, + including nested types like lists of dictionaries. + """ + path = self._getvaluepath(key) + try: + self._mkdir(path.parent) + except OSError as exc: + self.warn( + f"could not create cache path {path}: {exc}", + _ispytest=True, + ) + return + data = json.dumps(value, ensure_ascii=False, indent=2) + try: + f = path.open("w", encoding="UTF-8") + except OSError as exc: + self.warn( + f"cache could not write path {path}: {exc}", + _ispytest=True, + ) + else: + with f: + f.write(data) + + def _ensure_cache_dir_and_supporting_files(self) -> None: + """Create the cache dir and its supporting files.""" + if self._cachedir.is_dir(): + return + + self._cachedir.parent.mkdir(parents=True, exist_ok=True) + with tempfile.TemporaryDirectory( + prefix="pytest-cache-files-", + dir=self._cachedir.parent, + ) as newpath: + path = Path(newpath) + with open(path.joinpath("README.md"), "xt", encoding="UTF-8") as f: + f.write(README_CONTENT) + with open(path.joinpath(".gitignore"), "xt", encoding="UTF-8") as f: + f.write("# Created by pytest automatically.\n*\n") + with open(path.joinpath("CACHEDIR.TAG"), "xb") as f: + f.write(CACHEDIR_TAG_CONTENT) + + path.rename(self._cachedir) + # Create a directory in place of the one we just moved so that `TemporaryDirectory`'s + # cleanup doesn't complain. + # + # TODO: pass ignore_cleanup_errors=True when we no longer support python < 3.10. See + # https://github.com/python/cpython/issues/74168. Note that passing delete=False would + # do the wrong thing in case of errors and isn't supported until python 3.12. + path.mkdir() + + +class LFPluginCollWrapper: + def __init__(self, lfplugin: "LFPlugin") -> None: + self.lfplugin = lfplugin + self._collected_at_least_one_failure = False + + @hookimpl(wrapper=True) + def pytest_make_collect_report( + self, collector: nodes.Collector + ) -> Generator[None, CollectReport, CollectReport]: + res = yield + if isinstance(collector, (Session, Directory)): + # Sort any lf-paths to the beginning. + lf_paths = self.lfplugin._last_failed_paths + + # Use stable sort to priorize last failed. + def sort_key(node: Union[nodes.Item, nodes.Collector]) -> bool: + return node.path in lf_paths + + res.result = sorted( + res.result, + key=sort_key, + reverse=True, + ) + + elif isinstance(collector, File): + if collector.path in self.lfplugin._last_failed_paths: + result = res.result + lastfailed = self.lfplugin.lastfailed + + # Only filter with known failures. + if not self._collected_at_least_one_failure: + if not any(x.nodeid in lastfailed for x in result): + return res + self.lfplugin.config.pluginmanager.register( + LFPluginCollSkipfiles(self.lfplugin), "lfplugin-collskip" + ) + self._collected_at_least_one_failure = True + + session = collector.session + result[:] = [ + x + for x in result + if x.nodeid in lastfailed + # Include any passed arguments (not trivial to filter). + or session.isinitpath(x.path) + # Keep all sub-collectors. + or isinstance(x, nodes.Collector) + ] + + return res + + +class LFPluginCollSkipfiles: + def __init__(self, lfplugin: "LFPlugin") -> None: + self.lfplugin = lfplugin + + @hookimpl + def pytest_make_collect_report( + self, collector: nodes.Collector + ) -> Optional[CollectReport]: + if isinstance(collector, File): + if collector.path not in self.lfplugin._last_failed_paths: + self.lfplugin._skipped_files += 1 + + return CollectReport( + collector.nodeid, "passed", longrepr=None, result=[] + ) + return None + + +class LFPlugin: + """Plugin which implements the --lf (run last-failing) option.""" + + def __init__(self, config: Config) -> None: + self.config = config + active_keys = "lf", "failedfirst" + self.active = any(config.getoption(key) for key in active_keys) + assert config.cache + self.lastfailed: Dict[str, bool] = config.cache.get("cache/lastfailed", {}) + self._previously_failed_count: Optional[int] = None + self._report_status: Optional[str] = None + self._skipped_files = 0 # count skipped files during collection due to --lf + + if config.getoption("lf"): + self._last_failed_paths = self.get_last_failed_paths() + config.pluginmanager.register( + LFPluginCollWrapper(self), "lfplugin-collwrapper" + ) + + def get_last_failed_paths(self) -> Set[Path]: + """Return a set with all Paths of the previously failed nodeids and + their parents.""" + rootpath = self.config.rootpath + result = set() + for nodeid in self.lastfailed: + path = rootpath / nodeid.split("::")[0] + result.add(path) + result.update(path.parents) + return {x for x in result if x.exists()} + + def pytest_report_collectionfinish(self) -> Optional[str]: + if self.active and self.config.getoption("verbose") >= 0: + return "run-last-failure: %s" % self._report_status + return None + + def pytest_runtest_logreport(self, report: TestReport) -> None: + if (report.when == "call" and report.passed) or report.skipped: + self.lastfailed.pop(report.nodeid, None) + elif report.failed: + self.lastfailed[report.nodeid] = True + + def pytest_collectreport(self, report: CollectReport) -> None: + passed = report.outcome in ("passed", "skipped") + if passed: + if report.nodeid in self.lastfailed: + self.lastfailed.pop(report.nodeid) + self.lastfailed.update((item.nodeid, True) for item in report.result) + else: + self.lastfailed[report.nodeid] = True + + @hookimpl(wrapper=True, tryfirst=True) + def pytest_collection_modifyitems( + self, config: Config, items: List[nodes.Item] + ) -> Generator[None, None, None]: + res = yield + + if not self.active: + return res + + if self.lastfailed: + previously_failed = [] + previously_passed = [] + for item in items: + if item.nodeid in self.lastfailed: + previously_failed.append(item) + else: + previously_passed.append(item) + self._previously_failed_count = len(previously_failed) + + if not previously_failed: + # Running a subset of all tests with recorded failures + # only outside of it. + self._report_status = "%d known failures not in selected tests" % ( + len(self.lastfailed), + ) + else: + if self.config.getoption("lf"): + items[:] = previously_failed + config.hook.pytest_deselected(items=previously_passed) + else: # --failedfirst + items[:] = previously_failed + previously_passed + + noun = "failure" if self._previously_failed_count == 1 else "failures" + suffix = " first" if self.config.getoption("failedfirst") else "" + self._report_status = ( + f"rerun previous {self._previously_failed_count} {noun}{suffix}" + ) + + if self._skipped_files > 0: + files_noun = "file" if self._skipped_files == 1 else "files" + self._report_status += f" (skipped {self._skipped_files} {files_noun})" + else: + self._report_status = "no previously failed tests, " + if self.config.getoption("last_failed_no_failures") == "none": + self._report_status += "deselecting all items." + config.hook.pytest_deselected(items=items[:]) + items[:] = [] + else: + self._report_status += "not deselecting items." + + return res + + def pytest_sessionfinish(self, session: Session) -> None: + config = self.config + if config.getoption("cacheshow") or hasattr(config, "workerinput"): + return + + assert config.cache is not None + saved_lastfailed = config.cache.get("cache/lastfailed", {}) + if saved_lastfailed != self.lastfailed: + config.cache.set("cache/lastfailed", self.lastfailed) + + +class NFPlugin: + """Plugin which implements the --nf (run new-first) option.""" + + def __init__(self, config: Config) -> None: + self.config = config + self.active = config.option.newfirst + assert config.cache is not None + self.cached_nodeids = set(config.cache.get("cache/nodeids", [])) + + @hookimpl(wrapper=True, tryfirst=True) + def pytest_collection_modifyitems( + self, items: List[nodes.Item] + ) -> Generator[None, None, None]: + res = yield + + if self.active: + new_items: Dict[str, nodes.Item] = {} + other_items: Dict[str, nodes.Item] = {} + for item in items: + if item.nodeid not in self.cached_nodeids: + new_items[item.nodeid] = item + else: + other_items[item.nodeid] = item + + items[:] = self._get_increasing_order( + new_items.values() + ) + self._get_increasing_order(other_items.values()) + self.cached_nodeids.update(new_items) + else: + self.cached_nodeids.update(item.nodeid for item in items) + + return res + + def _get_increasing_order(self, items: Iterable[nodes.Item]) -> List[nodes.Item]: + return sorted(items, key=lambda item: item.path.stat().st_mtime, reverse=True) + + def pytest_sessionfinish(self) -> None: + config = self.config + if config.getoption("cacheshow") or hasattr(config, "workerinput"): + return + + if config.getoption("collectonly"): + return + + assert config.cache is not None + config.cache.set("cache/nodeids", sorted(self.cached_nodeids)) + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group.addoption( + "--lf", + "--last-failed", + action="store_true", + dest="lf", + help="Rerun only the tests that failed " + "at the last run (or all if none failed)", + ) + group.addoption( + "--ff", + "--failed-first", + action="store_true", + dest="failedfirst", + help="Run all tests, but run the last failures first. " + "This may re-order tests and thus lead to " + "repeated fixture setup/teardown.", + ) + group.addoption( + "--nf", + "--new-first", + action="store_true", + dest="newfirst", + help="Run tests from new files first, then the rest of the tests " + "sorted by file mtime", + ) + group.addoption( + "--cache-show", + action="append", + nargs="?", + dest="cacheshow", + help=( + "Show cache contents, don't perform collection or tests. " + "Optional argument: glob (default: '*')." + ), + ) + group.addoption( + "--cache-clear", + action="store_true", + dest="cacheclear", + help="Remove all cache contents at start of test run", + ) + cache_dir_default = ".pytest_cache" + if "TOX_ENV_DIR" in os.environ: + cache_dir_default = os.path.join(os.environ["TOX_ENV_DIR"], cache_dir_default) + parser.addini("cache_dir", default=cache_dir_default, help="Cache directory path") + group.addoption( + "--lfnf", + "--last-failed-no-failures", + action="store", + dest="last_failed_no_failures", + choices=("all", "none"), + default="all", + help="With ``--lf``, determines whether to execute tests when there " + "are no previously (known) failures or when no " + "cached ``lastfailed`` data was found. " + "``all`` (the default) runs the full test suite again. " + "``none`` just emits a message about no known failures and exits successfully.", + ) + + +def pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]: + if config.option.cacheshow and not config.option.help: + from _pytest.main import wrap_session + + return wrap_session(config, cacheshow) + return None + + +@hookimpl(tryfirst=True) +def pytest_configure(config: Config) -> None: + config.cache = Cache.for_config(config, _ispytest=True) + config.pluginmanager.register(LFPlugin(config), "lfplugin") + config.pluginmanager.register(NFPlugin(config), "nfplugin") + + +@fixture +def cache(request: FixtureRequest) -> Cache: + """Return a cache object that can persist state between testing sessions. + + cache.get(key, default) + cache.set(key, value) + + Keys must be ``/`` separated strings, where the first part is usually the + name of your plugin or application to avoid clashes with other cache users. + + Values can be any object handled by the json stdlib module. + """ + assert request.config.cache is not None + return request.config.cache + + +def pytest_report_header(config: Config) -> Optional[str]: + """Display cachedir with --cache-show and if non-default.""" + if config.option.verbose > 0 or config.getini("cache_dir") != ".pytest_cache": + assert config.cache is not None + cachedir = config.cache._cachedir + # TODO: evaluate generating upward relative paths + # starting with .., ../.. if sensible + + try: + displaypath = cachedir.relative_to(config.rootpath) + except ValueError: + displaypath = cachedir + return f"cachedir: {displaypath}" + return None + + +def cacheshow(config: Config, session: Session) -> int: + from pprint import pformat + + assert config.cache is not None + + tw = TerminalWriter() + tw.line("cachedir: " + str(config.cache._cachedir)) + if not config.cache._cachedir.is_dir(): + tw.line("cache is empty") + return 0 + + glob = config.option.cacheshow[0] + if glob is None: + glob = "*" + + dummy = object() + basedir = config.cache._cachedir + vdir = basedir / Cache._CACHE_PREFIX_VALUES + tw.sep("-", "cache values for %r" % glob) + for valpath in sorted(x for x in vdir.rglob(glob) if x.is_file()): + key = str(valpath.relative_to(vdir)) + val = config.cache.get(key, dummy) + if val is dummy: + tw.line("%s contains unreadable content, will be ignored" % key) + else: + tw.line("%s contains:" % key) + for line in pformat(val).splitlines(): + tw.line(" " + line) + + ddir = basedir / Cache._CACHE_PREFIX_DIRS + if ddir.is_dir(): + contents = sorted(ddir.rglob(glob)) + tw.sep("-", "cache directories for %r" % glob) + for p in contents: + # if p.is_dir(): + # print("%s/" % p.relative_to(basedir)) + if p.is_file(): + key = str(p.relative_to(basedir)) + tw.line(f"{key} is a file of length {p.stat().st_size:d}") + return 0 diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/capture.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/capture.py new file mode 100644 index 000000000..3f6a25103 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/capture.py @@ -0,0 +1,1081 @@ +# mypy: allow-untyped-defs +"""Per-test stdout/stderr capturing mechanism.""" + +import abc +import collections +import contextlib +import io +from io import UnsupportedOperation +import os +import sys +from tempfile import TemporaryFile +from types import TracebackType +from typing import Any +from typing import AnyStr +from typing import BinaryIO +from typing import Final +from typing import final +from typing import Generator +from typing import Generic +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Literal +from typing import NamedTuple +from typing import Optional +from typing import TextIO +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import Union + +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import SubRequest +from _pytest.nodes import Collector +from _pytest.nodes import File +from _pytest.nodes import Item +from _pytest.reports import CollectReport + + +_CaptureMethod = Literal["fd", "sys", "no", "tee-sys"] + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group._addoption( + "--capture", + action="store", + default="fd", + metavar="method", + choices=["fd", "sys", "no", "tee-sys"], + help="Per-test capturing method: one of fd|sys|no|tee-sys", + ) + group._addoption( + "-s", + action="store_const", + const="no", + dest="capture", + help="Shortcut for --capture=no", + ) + + +def _colorama_workaround() -> None: + """Ensure colorama is imported so that it attaches to the correct stdio + handles on Windows. + + colorama uses the terminal on import time. So if something does the + first import of colorama while I/O capture is active, colorama will + fail in various ways. + """ + if sys.platform.startswith("win32"): + try: + import colorama # noqa: F401 + except ImportError: + pass + + +def _windowsconsoleio_workaround(stream: TextIO) -> None: + """Workaround for Windows Unicode console handling. + + Python 3.6 implemented Unicode console handling for Windows. This works + by reading/writing to the raw console handle using + ``{Read,Write}ConsoleW``. + + The problem is that we are going to ``dup2`` over the stdio file + descriptors when doing ``FDCapture`` and this will ``CloseHandle`` the + handles used by Python to write to the console. Though there is still some + weirdness and the console handle seems to only be closed randomly and not + on the first call to ``CloseHandle``, or maybe it gets reopened with the + same handle value when we suspend capturing. + + The workaround in this case will reopen stdio with a different fd which + also means a different handle by replicating the logic in + "Py_lifecycle.c:initstdio/create_stdio". + + :param stream: + In practice ``sys.stdout`` or ``sys.stderr``, but given + here as parameter for unittesting purposes. + + See https://github.com/pytest-dev/py/issues/103. + """ + if not sys.platform.startswith("win32") or hasattr(sys, "pypy_version_info"): + return + + # Bail out if ``stream`` doesn't seem like a proper ``io`` stream (#2666). + if not hasattr(stream, "buffer"): # type: ignore[unreachable,unused-ignore] + return + + raw_stdout = stream.buffer.raw if hasattr(stream.buffer, "raw") else stream.buffer + + if not isinstance(raw_stdout, io._WindowsConsoleIO): # type: ignore[attr-defined,unused-ignore] + return + + def _reopen_stdio(f, mode): + if not hasattr(stream.buffer, "raw") and mode[0] == "w": + buffering = 0 + else: + buffering = -1 + + return io.TextIOWrapper( + open(os.dup(f.fileno()), mode, buffering), + f.encoding, + f.errors, + f.newlines, + f.line_buffering, + ) + + sys.stdin = _reopen_stdio(sys.stdin, "rb") + sys.stdout = _reopen_stdio(sys.stdout, "wb") + sys.stderr = _reopen_stdio(sys.stderr, "wb") + + +@hookimpl(wrapper=True) +def pytest_load_initial_conftests(early_config: Config) -> Generator[None, None, None]: + ns = early_config.known_args_namespace + if ns.capture == "fd": + _windowsconsoleio_workaround(sys.stdout) + _colorama_workaround() + pluginmanager = early_config.pluginmanager + capman = CaptureManager(ns.capture) + pluginmanager.register(capman, "capturemanager") + + # Make sure that capturemanager is properly reset at final shutdown. + early_config.add_cleanup(capman.stop_global_capturing) + + # Finally trigger conftest loading but while capturing (issue #93). + capman.start_global_capturing() + try: + try: + yield + finally: + capman.suspend_global_capture() + except BaseException: + out, err = capman.read_global_capture() + sys.stdout.write(out) + sys.stderr.write(err) + raise + + +# IO Helpers. + + +class EncodedFile(io.TextIOWrapper): + __slots__ = () + + @property + def name(self) -> str: + # Ensure that file.name is a string. Workaround for a Python bug + # fixed in >=3.7.4: https://bugs.python.org/issue36015 + return repr(self.buffer) + + @property + def mode(self) -> str: + # TextIOWrapper doesn't expose a mode, but at least some of our + # tests check it. + return self.buffer.mode.replace("b", "") + + +class CaptureIO(io.TextIOWrapper): + def __init__(self) -> None: + super().__init__(io.BytesIO(), encoding="UTF-8", newline="", write_through=True) + + def getvalue(self) -> str: + assert isinstance(self.buffer, io.BytesIO) + return self.buffer.getvalue().decode("UTF-8") + + +class TeeCaptureIO(CaptureIO): + def __init__(self, other: TextIO) -> None: + self._other = other + super().__init__() + + def write(self, s: str) -> int: + super().write(s) + return self._other.write(s) + + +class DontReadFromInput(TextIO): + @property + def encoding(self) -> str: + return sys.__stdin__.encoding + + def read(self, size: int = -1) -> str: + raise OSError( + "pytest: reading from stdin while output is captured! Consider using `-s`." + ) + + readline = read + + def __next__(self) -> str: + return self.readline() + + def readlines(self, hint: Optional[int] = -1) -> List[str]: + raise OSError( + "pytest: reading from stdin while output is captured! Consider using `-s`." + ) + + def __iter__(self) -> Iterator[str]: + return self + + def fileno(self) -> int: + raise UnsupportedOperation("redirected stdin is pseudofile, has no fileno()") + + def flush(self) -> None: + raise UnsupportedOperation("redirected stdin is pseudofile, has no flush()") + + def isatty(self) -> bool: + return False + + def close(self) -> None: + pass + + def readable(self) -> bool: + return False + + def seek(self, offset: int, whence: int = 0) -> int: + raise UnsupportedOperation("redirected stdin is pseudofile, has no seek(int)") + + def seekable(self) -> bool: + return False + + def tell(self) -> int: + raise UnsupportedOperation("redirected stdin is pseudofile, has no tell()") + + def truncate(self, size: Optional[int] = None) -> int: + raise UnsupportedOperation("cannot truncate stdin") + + def write(self, data: str) -> int: + raise UnsupportedOperation("cannot write to stdin") + + def writelines(self, lines: Iterable[str]) -> None: + raise UnsupportedOperation("Cannot write to stdin") + + def writable(self) -> bool: + return False + + def __enter__(self) -> "DontReadFromInput": + return self + + def __exit__( + self, + type: Optional[Type[BaseException]], + value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> None: + pass + + @property + def buffer(self) -> BinaryIO: + # The str/bytes doesn't actually matter in this type, so OK to fake. + return self # type: ignore[return-value] + + +# Capture classes. + + +class CaptureBase(abc.ABC, Generic[AnyStr]): + EMPTY_BUFFER: AnyStr + + @abc.abstractmethod + def __init__(self, fd: int) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def start(self) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def done(self) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def suspend(self) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def resume(self) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def writeorg(self, data: AnyStr) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def snap(self) -> AnyStr: + raise NotImplementedError() + + +patchsysdict = {0: "stdin", 1: "stdout", 2: "stderr"} + + +class NoCapture(CaptureBase[str]): + EMPTY_BUFFER = "" + + def __init__(self, fd: int) -> None: + pass + + def start(self) -> None: + pass + + def done(self) -> None: + pass + + def suspend(self) -> None: + pass + + def resume(self) -> None: + pass + + def snap(self) -> str: + return "" + + def writeorg(self, data: str) -> None: + pass + + +class SysCaptureBase(CaptureBase[AnyStr]): + def __init__( + self, fd: int, tmpfile: Optional[TextIO] = None, *, tee: bool = False + ) -> None: + name = patchsysdict[fd] + self._old: TextIO = getattr(sys, name) + self.name = name + if tmpfile is None: + if name == "stdin": + tmpfile = DontReadFromInput() + else: + tmpfile = CaptureIO() if not tee else TeeCaptureIO(self._old) + self.tmpfile = tmpfile + self._state = "initialized" + + def repr(self, class_name: str) -> str: + return "<{} {} _old={} _state={!r} tmpfile={!r}>".format( + class_name, + self.name, + hasattr(self, "_old") and repr(self._old) or "", + self._state, + self.tmpfile, + ) + + def __repr__(self) -> str: + return "<{} {} _old={} _state={!r} tmpfile={!r}>".format( + self.__class__.__name__, + self.name, + hasattr(self, "_old") and repr(self._old) or "", + self._state, + self.tmpfile, + ) + + def _assert_state(self, op: str, states: Tuple[str, ...]) -> None: + assert ( + self._state in states + ), "cannot {} in state {!r}: expected one of {}".format( + op, self._state, ", ".join(states) + ) + + def start(self) -> None: + self._assert_state("start", ("initialized",)) + setattr(sys, self.name, self.tmpfile) + self._state = "started" + + def done(self) -> None: + self._assert_state("done", ("initialized", "started", "suspended", "done")) + if self._state == "done": + return + setattr(sys, self.name, self._old) + del self._old + self.tmpfile.close() + self._state = "done" + + def suspend(self) -> None: + self._assert_state("suspend", ("started", "suspended")) + setattr(sys, self.name, self._old) + self._state = "suspended" + + def resume(self) -> None: + self._assert_state("resume", ("started", "suspended")) + if self._state == "started": + return + setattr(sys, self.name, self.tmpfile) + self._state = "started" + + +class SysCaptureBinary(SysCaptureBase[bytes]): + EMPTY_BUFFER = b"" + + def snap(self) -> bytes: + self._assert_state("snap", ("started", "suspended")) + self.tmpfile.seek(0) + res = self.tmpfile.buffer.read() + self.tmpfile.seek(0) + self.tmpfile.truncate() + return res + + def writeorg(self, data: bytes) -> None: + self._assert_state("writeorg", ("started", "suspended")) + self._old.flush() + self._old.buffer.write(data) + self._old.buffer.flush() + + +class SysCapture(SysCaptureBase[str]): + EMPTY_BUFFER = "" + + def snap(self) -> str: + self._assert_state("snap", ("started", "suspended")) + assert isinstance(self.tmpfile, CaptureIO) + res = self.tmpfile.getvalue() + self.tmpfile.seek(0) + self.tmpfile.truncate() + return res + + def writeorg(self, data: str) -> None: + self._assert_state("writeorg", ("started", "suspended")) + self._old.write(data) + self._old.flush() + + +class FDCaptureBase(CaptureBase[AnyStr]): + def __init__(self, targetfd: int) -> None: + self.targetfd = targetfd + + try: + os.fstat(targetfd) + except OSError: + # FD capturing is conceptually simple -- create a temporary file, + # redirect the FD to it, redirect back when done. But when the + # target FD is invalid it throws a wrench into this lovely scheme. + # + # Tests themselves shouldn't care if the FD is valid, FD capturing + # should work regardless of external circumstances. So falling back + # to just sys capturing is not a good option. + # + # Further complications are the need to support suspend() and the + # possibility of FD reuse (e.g. the tmpfile getting the very same + # target FD). The following approach is robust, I believe. + self.targetfd_invalid: Optional[int] = os.open(os.devnull, os.O_RDWR) + os.dup2(self.targetfd_invalid, targetfd) + else: + self.targetfd_invalid = None + self.targetfd_save = os.dup(targetfd) + + if targetfd == 0: + self.tmpfile = open(os.devnull, encoding="utf-8") + self.syscapture: CaptureBase[str] = SysCapture(targetfd) + else: + self.tmpfile = EncodedFile( + TemporaryFile(buffering=0), + encoding="utf-8", + errors="replace", + newline="", + write_through=True, + ) + if targetfd in patchsysdict: + self.syscapture = SysCapture(targetfd, self.tmpfile) + else: + self.syscapture = NoCapture(targetfd) + + self._state = "initialized" + + def __repr__(self) -> str: + return ( + f"<{self.__class__.__name__} {self.targetfd} oldfd={self.targetfd_save} " + f"_state={self._state!r} tmpfile={self.tmpfile!r}>" + ) + + def _assert_state(self, op: str, states: Tuple[str, ...]) -> None: + assert ( + self._state in states + ), "cannot {} in state {!r}: expected one of {}".format( + op, self._state, ", ".join(states) + ) + + def start(self) -> None: + """Start capturing on targetfd using memorized tmpfile.""" + self._assert_state("start", ("initialized",)) + os.dup2(self.tmpfile.fileno(), self.targetfd) + self.syscapture.start() + self._state = "started" + + def done(self) -> None: + """Stop capturing, restore streams, return original capture file, + seeked to position zero.""" + self._assert_state("done", ("initialized", "started", "suspended", "done")) + if self._state == "done": + return + os.dup2(self.targetfd_save, self.targetfd) + os.close(self.targetfd_save) + if self.targetfd_invalid is not None: + if self.targetfd_invalid != self.targetfd: + os.close(self.targetfd) + os.close(self.targetfd_invalid) + self.syscapture.done() + self.tmpfile.close() + self._state = "done" + + def suspend(self) -> None: + self._assert_state("suspend", ("started", "suspended")) + if self._state == "suspended": + return + self.syscapture.suspend() + os.dup2(self.targetfd_save, self.targetfd) + self._state = "suspended" + + def resume(self) -> None: + self._assert_state("resume", ("started", "suspended")) + if self._state == "started": + return + self.syscapture.resume() + os.dup2(self.tmpfile.fileno(), self.targetfd) + self._state = "started" + + +class FDCaptureBinary(FDCaptureBase[bytes]): + """Capture IO to/from a given OS-level file descriptor. + + snap() produces `bytes`. + """ + + EMPTY_BUFFER = b"" + + def snap(self) -> bytes: + self._assert_state("snap", ("started", "suspended")) + self.tmpfile.seek(0) + res = self.tmpfile.buffer.read() + self.tmpfile.seek(0) + self.tmpfile.truncate() + return res + + def writeorg(self, data: bytes) -> None: + """Write to original file descriptor.""" + self._assert_state("writeorg", ("started", "suspended")) + os.write(self.targetfd_save, data) + + +class FDCapture(FDCaptureBase[str]): + """Capture IO to/from a given OS-level file descriptor. + + snap() produces text. + """ + + EMPTY_BUFFER = "" + + def snap(self) -> str: + self._assert_state("snap", ("started", "suspended")) + self.tmpfile.seek(0) + res = self.tmpfile.read() + self.tmpfile.seek(0) + self.tmpfile.truncate() + return res + + def writeorg(self, data: str) -> None: + """Write to original file descriptor.""" + self._assert_state("writeorg", ("started", "suspended")) + # XXX use encoding of original stream + os.write(self.targetfd_save, data.encode("utf-8")) + + +# MultiCapture + + +# Generic NamedTuple only supported since Python 3.11. +if sys.version_info >= (3, 11) or TYPE_CHECKING: + + @final + class CaptureResult(NamedTuple, Generic[AnyStr]): + """The result of :method:`caplog.readouterr() `.""" + + out: AnyStr + err: AnyStr + +else: + + class CaptureResult( + collections.namedtuple("CaptureResult", ["out", "err"]), # noqa: PYI024 + Generic[AnyStr], + ): + """The result of :method:`caplog.readouterr() `.""" + + __slots__ = () + + +class MultiCapture(Generic[AnyStr]): + _state = None + _in_suspended = False + + def __init__( + self, + in_: Optional[CaptureBase[AnyStr]], + out: Optional[CaptureBase[AnyStr]], + err: Optional[CaptureBase[AnyStr]], + ) -> None: + self.in_: Optional[CaptureBase[AnyStr]] = in_ + self.out: Optional[CaptureBase[AnyStr]] = out + self.err: Optional[CaptureBase[AnyStr]] = err + + def __repr__(self) -> str: + return ( + f"" + ) + + def start_capturing(self) -> None: + self._state = "started" + if self.in_: + self.in_.start() + if self.out: + self.out.start() + if self.err: + self.err.start() + + def pop_outerr_to_orig(self) -> Tuple[AnyStr, AnyStr]: + """Pop current snapshot out/err capture and flush to orig streams.""" + out, err = self.readouterr() + if out: + assert self.out is not None + self.out.writeorg(out) + if err: + assert self.err is not None + self.err.writeorg(err) + return out, err + + def suspend_capturing(self, in_: bool = False) -> None: + self._state = "suspended" + if self.out: + self.out.suspend() + if self.err: + self.err.suspend() + if in_ and self.in_: + self.in_.suspend() + self._in_suspended = True + + def resume_capturing(self) -> None: + self._state = "started" + if self.out: + self.out.resume() + if self.err: + self.err.resume() + if self._in_suspended: + assert self.in_ is not None + self.in_.resume() + self._in_suspended = False + + def stop_capturing(self) -> None: + """Stop capturing and reset capturing streams.""" + if self._state == "stopped": + raise ValueError("was already stopped") + self._state = "stopped" + if self.out: + self.out.done() + if self.err: + self.err.done() + if self.in_: + self.in_.done() + + def is_started(self) -> bool: + """Whether actively capturing -- not suspended or stopped.""" + return self._state == "started" + + def readouterr(self) -> CaptureResult[AnyStr]: + out = self.out.snap() if self.out else "" + err = self.err.snap() if self.err else "" + # TODO: This type error is real, need to fix. + return CaptureResult(out, err) # type: ignore[arg-type] + + +def _get_multicapture(method: _CaptureMethod) -> MultiCapture[str]: + if method == "fd": + return MultiCapture(in_=FDCapture(0), out=FDCapture(1), err=FDCapture(2)) + elif method == "sys": + return MultiCapture(in_=SysCapture(0), out=SysCapture(1), err=SysCapture(2)) + elif method == "no": + return MultiCapture(in_=None, out=None, err=None) + elif method == "tee-sys": + return MultiCapture( + in_=None, out=SysCapture(1, tee=True), err=SysCapture(2, tee=True) + ) + raise ValueError(f"unknown capturing method: {method!r}") + + +# CaptureManager and CaptureFixture + + +class CaptureManager: + """The capture plugin. + + Manages that the appropriate capture method is enabled/disabled during + collection and each test phase (setup, call, teardown). After each of + those points, the captured output is obtained and attached to the + collection/runtest report. + + There are two levels of capture: + + * global: enabled by default and can be suppressed by the ``-s`` + option. This is always enabled/disabled during collection and each test + phase. + + * fixture: when a test function or one of its fixture depend on the + ``capsys`` or ``capfd`` fixtures. In this case special handling is + needed to ensure the fixtures take precedence over the global capture. + """ + + def __init__(self, method: _CaptureMethod) -> None: + self._method: Final = method + self._global_capturing: Optional[MultiCapture[str]] = None + self._capture_fixture: Optional[CaptureFixture[Any]] = None + + def __repr__(self) -> str: + return ( + f"" + ) + + def is_capturing(self) -> Union[str, bool]: + if self.is_globally_capturing(): + return "global" + if self._capture_fixture: + return "fixture %s" % self._capture_fixture.request.fixturename + return False + + # Global capturing control + + def is_globally_capturing(self) -> bool: + return self._method != "no" + + def start_global_capturing(self) -> None: + assert self._global_capturing is None + self._global_capturing = _get_multicapture(self._method) + self._global_capturing.start_capturing() + + def stop_global_capturing(self) -> None: + if self._global_capturing is not None: + self._global_capturing.pop_outerr_to_orig() + self._global_capturing.stop_capturing() + self._global_capturing = None + + def resume_global_capture(self) -> None: + # During teardown of the python process, and on rare occasions, capture + # attributes can be `None` while trying to resume global capture. + if self._global_capturing is not None: + self._global_capturing.resume_capturing() + + def suspend_global_capture(self, in_: bool = False) -> None: + if self._global_capturing is not None: + self._global_capturing.suspend_capturing(in_=in_) + + def suspend(self, in_: bool = False) -> None: + # Need to undo local capsys-et-al if it exists before disabling global capture. + self.suspend_fixture() + self.suspend_global_capture(in_) + + def resume(self) -> None: + self.resume_global_capture() + self.resume_fixture() + + def read_global_capture(self) -> CaptureResult[str]: + assert self._global_capturing is not None + return self._global_capturing.readouterr() + + # Fixture Control + + def set_fixture(self, capture_fixture: "CaptureFixture[Any]") -> None: + if self._capture_fixture: + current_fixture = self._capture_fixture.request.fixturename + requested_fixture = capture_fixture.request.fixturename + capture_fixture.request.raiseerror( + f"cannot use {requested_fixture} and {current_fixture} at the same time" + ) + self._capture_fixture = capture_fixture + + def unset_fixture(self) -> None: + self._capture_fixture = None + + def activate_fixture(self) -> None: + """If the current item is using ``capsys`` or ``capfd``, activate + them so they take precedence over the global capture.""" + if self._capture_fixture: + self._capture_fixture._start() + + def deactivate_fixture(self) -> None: + """Deactivate the ``capsys`` or ``capfd`` fixture of this item, if any.""" + if self._capture_fixture: + self._capture_fixture.close() + + def suspend_fixture(self) -> None: + if self._capture_fixture: + self._capture_fixture._suspend() + + def resume_fixture(self) -> None: + if self._capture_fixture: + self._capture_fixture._resume() + + # Helper context managers + + @contextlib.contextmanager + def global_and_fixture_disabled(self) -> Generator[None, None, None]: + """Context manager to temporarily disable global and current fixture capturing.""" + do_fixture = self._capture_fixture and self._capture_fixture._is_started() + if do_fixture: + self.suspend_fixture() + do_global = self._global_capturing and self._global_capturing.is_started() + if do_global: + self.suspend_global_capture() + try: + yield + finally: + if do_global: + self.resume_global_capture() + if do_fixture: + self.resume_fixture() + + @contextlib.contextmanager + def item_capture(self, when: str, item: Item) -> Generator[None, None, None]: + self.resume_global_capture() + self.activate_fixture() + try: + yield + finally: + self.deactivate_fixture() + self.suspend_global_capture(in_=False) + + out, err = self.read_global_capture() + item.add_report_section(when, "stdout", out) + item.add_report_section(when, "stderr", err) + + # Hooks + + @hookimpl(wrapper=True) + def pytest_make_collect_report( + self, collector: Collector + ) -> Generator[None, CollectReport, CollectReport]: + if isinstance(collector, File): + self.resume_global_capture() + try: + rep = yield + finally: + self.suspend_global_capture() + out, err = self.read_global_capture() + if out: + rep.sections.append(("Captured stdout", out)) + if err: + rep.sections.append(("Captured stderr", err)) + else: + rep = yield + return rep + + @hookimpl(wrapper=True) + def pytest_runtest_setup(self, item: Item) -> Generator[None, None, None]: + with self.item_capture("setup", item): + return (yield) + + @hookimpl(wrapper=True) + def pytest_runtest_call(self, item: Item) -> Generator[None, None, None]: + with self.item_capture("call", item): + return (yield) + + @hookimpl(wrapper=True) + def pytest_runtest_teardown(self, item: Item) -> Generator[None, None, None]: + with self.item_capture("teardown", item): + return (yield) + + @hookimpl(tryfirst=True) + def pytest_keyboard_interrupt(self) -> None: + self.stop_global_capturing() + + @hookimpl(tryfirst=True) + def pytest_internalerror(self) -> None: + self.stop_global_capturing() + + +class CaptureFixture(Generic[AnyStr]): + """Object returned by the :fixture:`capsys`, :fixture:`capsysbinary`, + :fixture:`capfd` and :fixture:`capfdbinary` fixtures.""" + + def __init__( + self, + captureclass: Type[CaptureBase[AnyStr]], + request: SubRequest, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self.captureclass: Type[CaptureBase[AnyStr]] = captureclass + self.request = request + self._capture: Optional[MultiCapture[AnyStr]] = None + self._captured_out: AnyStr = self.captureclass.EMPTY_BUFFER + self._captured_err: AnyStr = self.captureclass.EMPTY_BUFFER + + def _start(self) -> None: + if self._capture is None: + self._capture = MultiCapture( + in_=None, + out=self.captureclass(1), + err=self.captureclass(2), + ) + self._capture.start_capturing() + + def close(self) -> None: + if self._capture is not None: + out, err = self._capture.pop_outerr_to_orig() + self._captured_out += out + self._captured_err += err + self._capture.stop_capturing() + self._capture = None + + def readouterr(self) -> CaptureResult[AnyStr]: + """Read and return the captured output so far, resetting the internal + buffer. + + :returns: + The captured content as a namedtuple with ``out`` and ``err`` + string attributes. + """ + captured_out, captured_err = self._captured_out, self._captured_err + if self._capture is not None: + out, err = self._capture.readouterr() + captured_out += out + captured_err += err + self._captured_out = self.captureclass.EMPTY_BUFFER + self._captured_err = self.captureclass.EMPTY_BUFFER + return CaptureResult(captured_out, captured_err) + + def _suspend(self) -> None: + """Suspend this fixture's own capturing temporarily.""" + if self._capture is not None: + self._capture.suspend_capturing() + + def _resume(self) -> None: + """Resume this fixture's own capturing temporarily.""" + if self._capture is not None: + self._capture.resume_capturing() + + def _is_started(self) -> bool: + """Whether actively capturing -- not disabled or closed.""" + if self._capture is not None: + return self._capture.is_started() + return False + + @contextlib.contextmanager + def disabled(self) -> Generator[None, None, None]: + """Temporarily disable capturing while inside the ``with`` block.""" + capmanager: CaptureManager = self.request.config.pluginmanager.getplugin( + "capturemanager" + ) + with capmanager.global_and_fixture_disabled(): + yield + + +# The fixtures. + + +@fixture +def capsys(request: SubRequest) -> Generator[CaptureFixture[str], None, None]: + r"""Enable text capturing of writes to ``sys.stdout`` and ``sys.stderr``. + + The captured output is made available via ``capsys.readouterr()`` method + calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``text`` objects. + + Returns an instance of :class:`CaptureFixture[str] `. + + Example: + .. code-block:: python + + def test_output(capsys): + print("hello") + captured = capsys.readouterr() + assert captured.out == "hello\n" + """ + capman: CaptureManager = request.config.pluginmanager.getplugin("capturemanager") + capture_fixture = CaptureFixture(SysCapture, request, _ispytest=True) + capman.set_fixture(capture_fixture) + capture_fixture._start() + yield capture_fixture + capture_fixture.close() + capman.unset_fixture() + + +@fixture +def capsysbinary(request: SubRequest) -> Generator[CaptureFixture[bytes], None, None]: + r"""Enable bytes capturing of writes to ``sys.stdout`` and ``sys.stderr``. + + The captured output is made available via ``capsysbinary.readouterr()`` + method calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``bytes`` objects. + + Returns an instance of :class:`CaptureFixture[bytes] `. + + Example: + .. code-block:: python + + def test_output(capsysbinary): + print("hello") + captured = capsysbinary.readouterr() + assert captured.out == b"hello\n" + """ + capman: CaptureManager = request.config.pluginmanager.getplugin("capturemanager") + capture_fixture = CaptureFixture(SysCaptureBinary, request, _ispytest=True) + capman.set_fixture(capture_fixture) + capture_fixture._start() + yield capture_fixture + capture_fixture.close() + capman.unset_fixture() + + +@fixture +def capfd(request: SubRequest) -> Generator[CaptureFixture[str], None, None]: + r"""Enable text capturing of writes to file descriptors ``1`` and ``2``. + + The captured output is made available via ``capfd.readouterr()`` method + calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``text`` objects. + + Returns an instance of :class:`CaptureFixture[str] `. + + Example: + .. code-block:: python + + def test_system_echo(capfd): + os.system('echo "hello"') + captured = capfd.readouterr() + assert captured.out == "hello\n" + """ + capman: CaptureManager = request.config.pluginmanager.getplugin("capturemanager") + capture_fixture = CaptureFixture(FDCapture, request, _ispytest=True) + capman.set_fixture(capture_fixture) + capture_fixture._start() + yield capture_fixture + capture_fixture.close() + capman.unset_fixture() + + +@fixture +def capfdbinary(request: SubRequest) -> Generator[CaptureFixture[bytes], None, None]: + r"""Enable bytes capturing of writes to file descriptors ``1`` and ``2``. + + The captured output is made available via ``capfd.readouterr()`` method + calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``byte`` objects. + + Returns an instance of :class:`CaptureFixture[bytes] `. + + Example: + .. code-block:: python + + def test_system_echo(capfdbinary): + os.system('echo "hello"') + captured = capfdbinary.readouterr() + assert captured.out == b"hello\n" + + """ + capman: CaptureManager = request.config.pluginmanager.getplugin("capturemanager") + capture_fixture = CaptureFixture(FDCaptureBinary, request, _ispytest=True) + capman.set_fixture(capture_fixture) + capture_fixture._start() + yield capture_fixture + capture_fixture.close() + capman.unset_fixture() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/compat.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/compat.py new file mode 100644 index 000000000..9d9411818 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/compat.py @@ -0,0 +1,351 @@ +# mypy: allow-untyped-defs +"""Python version compatibility code.""" + +from __future__ import annotations + +import dataclasses +import enum +import functools +import inspect +from inspect import Parameter +from inspect import signature +import os +from pathlib import Path +import sys +from typing import Any +from typing import Callable +from typing import Final +from typing import NoReturn + +import py + + +#: constant to prepare valuing pylib path replacements/lazy proxies later on +# intended for removal in pytest 8.0 or 9.0 + +# fmt: off +# intentional space to create a fake difference for the verification +LEGACY_PATH = py.path. local +# fmt: on + + +def legacy_path(path: str | os.PathLike[str]) -> LEGACY_PATH: + """Internal wrapper to prepare lazy proxies for legacy_path instances""" + return LEGACY_PATH(path) + + +# fmt: off +# Singleton type for NOTSET, as described in: +# https://www.python.org/dev/peps/pep-0484/#support-for-singleton-types-in-unions +class NotSetType(enum.Enum): + token = 0 +NOTSET: Final = NotSetType.token +# fmt: on + + +def is_generator(func: object) -> bool: + genfunc = inspect.isgeneratorfunction(func) + return genfunc and not iscoroutinefunction(func) + + +def iscoroutinefunction(func: object) -> bool: + """Return True if func is a coroutine function (a function defined with async + def syntax, and doesn't contain yield), or a function decorated with + @asyncio.coroutine. + + Note: copied and modified from Python 3.5's builtin couroutines.py to avoid + importing asyncio directly, which in turns also initializes the "logging" + module as a side-effect (see issue #8). + """ + return inspect.iscoroutinefunction(func) or getattr(func, "_is_coroutine", False) + + +def is_async_function(func: object) -> bool: + """Return True if the given function seems to be an async function or + an async generator.""" + return iscoroutinefunction(func) or inspect.isasyncgenfunction(func) + + +def getlocation(function, curdir: str | os.PathLike[str] | None = None) -> str: + function = get_real_func(function) + fn = Path(inspect.getfile(function)) + lineno = function.__code__.co_firstlineno + if curdir is not None: + try: + relfn = fn.relative_to(curdir) + except ValueError: + pass + else: + return "%s:%d" % (relfn, lineno + 1) + return "%s:%d" % (fn, lineno + 1) + + +def num_mock_patch_args(function) -> int: + """Return number of arguments used up by mock arguments (if any).""" + patchings = getattr(function, "patchings", None) + if not patchings: + return 0 + + mock_sentinel = getattr(sys.modules.get("mock"), "DEFAULT", object()) + ut_mock_sentinel = getattr(sys.modules.get("unittest.mock"), "DEFAULT", object()) + + return len( + [ + p + for p in patchings + if not p.attribute_name + and (p.new is mock_sentinel or p.new is ut_mock_sentinel) + ] + ) + + +def getfuncargnames( + function: Callable[..., object], + *, + name: str = "", + cls: type | None = None, +) -> tuple[str, ...]: + """Return the names of a function's mandatory arguments. + + Should return the names of all function arguments that: + * Aren't bound to an instance or type as in instance or class methods. + * Don't have default values. + * Aren't bound with functools.partial. + * Aren't replaced with mocks. + + The cls arguments indicate that the function should be treated as a bound + method even though it's not unless the function is a static method. + + The name parameter should be the original name in which the function was collected. + """ + # TODO(RonnyPfannschmidt): This function should be refactored when we + # revisit fixtures. The fixture mechanism should ask the node for + # the fixture names, and not try to obtain directly from the + # function object well after collection has occurred. + + # The parameters attribute of a Signature object contains an + # ordered mapping of parameter names to Parameter instances. This + # creates a tuple of the names of the parameters that don't have + # defaults. + try: + parameters = signature(function).parameters + except (ValueError, TypeError) as e: + from _pytest.outcomes import fail + + fail( + f"Could not determine arguments of {function!r}: {e}", + pytrace=False, + ) + + arg_names = tuple( + p.name + for p in parameters.values() + if ( + p.kind is Parameter.POSITIONAL_OR_KEYWORD + or p.kind is Parameter.KEYWORD_ONLY + ) + and p.default is Parameter.empty + ) + if not name: + name = function.__name__ + + # If this function should be treated as a bound method even though + # it's passed as an unbound method or function, remove the first + # parameter name. + if ( + # Not using `getattr` because we don't want to resolve the staticmethod. + # Not using `cls.__dict__` because we want to check the entire MRO. + cls + and not isinstance( + inspect.getattr_static(cls, name, default=None), staticmethod + ) + ): + arg_names = arg_names[1:] + # Remove any names that will be replaced with mocks. + if hasattr(function, "__wrapped__"): + arg_names = arg_names[num_mock_patch_args(function) :] + return arg_names + + +def get_default_arg_names(function: Callable[..., Any]) -> tuple[str, ...]: + # Note: this code intentionally mirrors the code at the beginning of + # getfuncargnames, to get the arguments which were excluded from its result + # because they had default values. + return tuple( + p.name + for p in signature(function).parameters.values() + if p.kind in (Parameter.POSITIONAL_OR_KEYWORD, Parameter.KEYWORD_ONLY) + and p.default is not Parameter.empty + ) + + +_non_printable_ascii_translate_table = { + i: f"\\x{i:02x}" for i in range(128) if i not in range(32, 127) +} +_non_printable_ascii_translate_table.update( + {ord("\t"): "\\t", ord("\r"): "\\r", ord("\n"): "\\n"} +) + + +def ascii_escaped(val: bytes | str) -> str: + r"""If val is pure ASCII, return it as an str, otherwise, escape + bytes objects into a sequence of escaped bytes: + + b'\xc3\xb4\xc5\xd6' -> r'\xc3\xb4\xc5\xd6' + + and escapes strings into a sequence of escaped unicode ids, e.g.: + + r'4\nV\U00043efa\x0eMXWB\x1e\u3028\u15fd\xcd\U0007d944' + + Note: + The obvious "v.decode('unicode-escape')" will return + valid UTF-8 unicode if it finds them in bytes, but we + want to return escaped bytes for any byte, even if they match + a UTF-8 string. + """ + if isinstance(val, bytes): + ret = val.decode("ascii", "backslashreplace") + else: + ret = val.encode("unicode_escape").decode("ascii") + return ret.translate(_non_printable_ascii_translate_table) + + +@dataclasses.dataclass +class _PytestWrapper: + """Dummy wrapper around a function object for internal use only. + + Used to correctly unwrap the underlying function object when we are + creating fixtures, because we wrap the function object ourselves with a + decorator to issue warnings when the fixture function is called directly. + """ + + obj: Any + + +def get_real_func(obj): + """Get the real function object of the (possibly) wrapped object by + functools.wraps or functools.partial.""" + start_obj = obj + for i in range(100): + # __pytest_wrapped__ is set by @pytest.fixture when wrapping the fixture function + # to trigger a warning if it gets called directly instead of by pytest: we don't + # want to unwrap further than this otherwise we lose useful wrappings like @mock.patch (#3774) + new_obj = getattr(obj, "__pytest_wrapped__", None) + if isinstance(new_obj, _PytestWrapper): + obj = new_obj.obj + break + new_obj = getattr(obj, "__wrapped__", None) + if new_obj is None: + break + obj = new_obj + else: + from _pytest._io.saferepr import saferepr + + raise ValueError( + f"could not find real function of {saferepr(start_obj)}\nstopped at {saferepr(obj)}" + ) + if isinstance(obj, functools.partial): + obj = obj.func + return obj + + +def get_real_method(obj, holder): + """Attempt to obtain the real function object that might be wrapping + ``obj``, while at the same time returning a bound method to ``holder`` if + the original object was a bound method.""" + try: + is_method = hasattr(obj, "__func__") + obj = get_real_func(obj) + except Exception: # pragma: no cover + return obj + if is_method and hasattr(obj, "__get__") and callable(obj.__get__): + obj = obj.__get__(holder) + return obj + + +def getimfunc(func): + try: + return func.__func__ + except AttributeError: + return func + + +def safe_getattr(object: Any, name: str, default: Any) -> Any: + """Like getattr but return default upon any Exception or any OutcomeException. + + Attribute access can potentially fail for 'evil' Python objects. + See issue #214. + It catches OutcomeException because of #2490 (issue #580), new outcomes + are derived from BaseException instead of Exception (for more details + check #2707). + """ + from _pytest.outcomes import TEST_OUTCOME + + try: + return getattr(object, name, default) + except TEST_OUTCOME: + return default + + +def safe_isclass(obj: object) -> bool: + """Ignore any exception via isinstance on Python 3.""" + try: + return inspect.isclass(obj) + except Exception: + return False + + +def get_user_id() -> int | None: + """Return the current process's real user id or None if it could not be + determined. + + :return: The user id or None if it could not be determined. + """ + # mypy follows the version and platform checking expectation of PEP 484: + # https://mypy.readthedocs.io/en/stable/common_issues.html?highlight=platform#python-version-and-system-platform-checks + # Containment checks are too complex for mypy v1.5.0 and cause failure. + if sys.platform == "win32" or sys.platform == "emscripten": + # win32 does not have a getuid() function. + # Emscripten has a return 0 stub. + return None + else: + # On other platforms, a return value of -1 is assumed to indicate that + # the current process's real user id could not be determined. + ERROR = -1 + uid = os.getuid() + return uid if uid != ERROR else None + + +# Perform exhaustiveness checking. +# +# Consider this example: +# +# MyUnion = Union[int, str] +# +# def handle(x: MyUnion) -> int { +# if isinstance(x, int): +# return 1 +# elif isinstance(x, str): +# return 2 +# else: +# raise Exception('unreachable') +# +# Now suppose we add a new variant: +# +# MyUnion = Union[int, str, bytes] +# +# After doing this, we must remember ourselves to go and update the handle +# function to handle the new variant. +# +# With `assert_never` we can do better: +# +# // raise Exception('unreachable') +# return assert_never(x) +# +# Now, if we forget to handle the new variant, the type-checker will emit a +# compile-time error, instead of the runtime error we would have gotten +# previously. +# +# This also work for Enums (if you use `is` to compare) and Literals. +def assert_never(value: NoReturn) -> NoReturn: + assert False, f"Unhandled value: {value} ({type(value).__name__})" diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/config/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/config/__init__.py new file mode 100644 index 000000000..306b14cce --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/config/__init__.py @@ -0,0 +1,1976 @@ +# mypy: allow-untyped-defs +"""Command line options, ini-file and conftest.py processing.""" + +import argparse +import collections.abc +import copy +import dataclasses +import enum +from functools import lru_cache +import glob +import importlib.metadata +import inspect +import os +from pathlib import Path +import re +import shlex +import sys +from textwrap import dedent +import types +from types import FunctionType +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import Final +from typing import final +from typing import Generator +from typing import IO +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Optional +from typing import Sequence +from typing import Set +from typing import TextIO +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import Union +import warnings + +import pluggy +from pluggy import HookimplMarker +from pluggy import HookimplOpts +from pluggy import HookspecMarker +from pluggy import HookspecOpts +from pluggy import PluginManager + +from .compat import PathAwareHookProxy +from .exceptions import PrintHelp as PrintHelp +from .exceptions import UsageError as UsageError +from .findpaths import determine_setup +from _pytest import __version__ +import _pytest._code +from _pytest._code import ExceptionInfo +from _pytest._code import filter_traceback +from _pytest._io import TerminalWriter +import _pytest.deprecated +import _pytest.hookspec +from _pytest.outcomes import fail +from _pytest.outcomes import Skipped +from _pytest.pathlib import absolutepath +from _pytest.pathlib import bestrelpath +from _pytest.pathlib import import_path +from _pytest.pathlib import ImportMode +from _pytest.pathlib import resolve_package_path +from _pytest.pathlib import safe_exists +from _pytest.stash import Stash +from _pytest.warning_types import PytestConfigWarning +from _pytest.warning_types import warn_explicit_for + + +if TYPE_CHECKING: + from .argparsing import Argument + from .argparsing import Parser + from _pytest._code.code import _TracebackStyle + from _pytest.terminal import TerminalReporter + + +_PluggyPlugin = object +"""A type to represent plugin objects. + +Plugins can be any namespace, so we can't narrow it down much, but we use an +alias to make the intent clear. + +Ideally this type would be provided by pluggy itself. +""" + + +hookimpl = HookimplMarker("pytest") +hookspec = HookspecMarker("pytest") + + +@final +class ExitCode(enum.IntEnum): + """Encodes the valid exit codes by pytest. + + Currently users and plugins may supply other exit codes as well. + + .. versionadded:: 5.0 + """ + + #: Tests passed. + OK = 0 + #: Tests failed. + TESTS_FAILED = 1 + #: pytest was interrupted. + INTERRUPTED = 2 + #: An internal error got in the way. + INTERNAL_ERROR = 3 + #: pytest was misused. + USAGE_ERROR = 4 + #: pytest couldn't find tests. + NO_TESTS_COLLECTED = 5 + + +class ConftestImportFailure(Exception): + def __init__( + self, + path: Path, + *, + cause: Exception, + ) -> None: + self.path = path + self.cause = cause + + def __str__(self) -> str: + return f"{type(self.cause).__name__}: {self.cause} (from {self.path})" + + +def filter_traceback_for_conftest_import_failure( + entry: _pytest._code.TracebackEntry, +) -> bool: + """Filter tracebacks entries which point to pytest internals or importlib. + + Make a special case for importlib because we use it to import test modules and conftest files + in _pytest.pathlib.import_path. + """ + return filter_traceback(entry) and "importlib" not in str(entry.path).split(os.sep) + + +def main( + args: Optional[Union[List[str], "os.PathLike[str]"]] = None, + plugins: Optional[Sequence[Union[str, _PluggyPlugin]]] = None, +) -> Union[int, ExitCode]: + """Perform an in-process test run. + + :param args: + List of command line arguments. If `None` or not given, defaults to reading + arguments directly from the process command line (:data:`sys.argv`). + :param plugins: List of plugin objects to be auto-registered during initialization. + + :returns: An exit code. + """ + old_pytest_version = os.environ.get("PYTEST_VERSION") + try: + os.environ["PYTEST_VERSION"] = __version__ + try: + config = _prepareconfig(args, plugins) + except ConftestImportFailure as e: + exc_info = ExceptionInfo.from_exception(e.cause) + tw = TerminalWriter(sys.stderr) + tw.line(f"ImportError while loading conftest '{e.path}'.", red=True) + exc_info.traceback = exc_info.traceback.filter( + filter_traceback_for_conftest_import_failure + ) + exc_repr = ( + exc_info.getrepr(style="short", chain=False) + if exc_info.traceback + else exc_info.exconly() + ) + formatted_tb = str(exc_repr) + for line in formatted_tb.splitlines(): + tw.line(line.rstrip(), red=True) + return ExitCode.USAGE_ERROR + else: + try: + ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( + config=config + ) + try: + return ExitCode(ret) + except ValueError: + return ret + finally: + config._ensure_unconfigure() + except UsageError as e: + tw = TerminalWriter(sys.stderr) + for msg in e.args: + tw.line(f"ERROR: {msg}\n", red=True) + return ExitCode.USAGE_ERROR + finally: + if old_pytest_version is None: + os.environ.pop("PYTEST_VERSION", None) + else: + os.environ["PYTEST_VERSION"] = old_pytest_version + + +def console_main() -> int: + """The CLI entry point of pytest. + + This function is not meant for programmable use; use `main()` instead. + """ + # https://docs.python.org/3/library/signal.html#note-on-sigpipe + try: + code = main() + sys.stdout.flush() + return code + except BrokenPipeError: + # Python flushes standard streams on exit; redirect remaining output + # to devnull to avoid another BrokenPipeError at shutdown + devnull = os.open(os.devnull, os.O_WRONLY) + os.dup2(devnull, sys.stdout.fileno()) + return 1 # Python exits with error code 1 on EPIPE + + +class cmdline: # compatibility namespace + main = staticmethod(main) + + +def filename_arg(path: str, optname: str) -> str: + """Argparse type validator for filename arguments. + + :path: Path of filename. + :optname: Name of the option. + """ + if os.path.isdir(path): + raise UsageError(f"{optname} must be a filename, given: {path}") + return path + + +def directory_arg(path: str, optname: str) -> str: + """Argparse type validator for directory arguments. + + :path: Path of directory. + :optname: Name of the option. + """ + if not os.path.isdir(path): + raise UsageError(f"{optname} must be a directory, given: {path}") + return path + + +# Plugins that cannot be disabled via "-p no:X" currently. +essential_plugins = ( + "mark", + "main", + "runner", + "fixtures", + "helpconfig", # Provides -p. +) + +default_plugins = ( + *essential_plugins, + "python", + "terminal", + "debugging", + "unittest", + "capture", + "skipping", + "legacypath", + "tmpdir", + "monkeypatch", + "recwarn", + "pastebin", + "assertion", + "junitxml", + "doctest", + "cacheprovider", + "freeze_support", + "setuponly", + "setupplan", + "stepwise", + "warnings", + "logging", + "reports", + "python_path", + "unraisableexception", + "threadexception", + "faulthandler", +) + +builtin_plugins = set(default_plugins) +builtin_plugins.add("pytester") +builtin_plugins.add("pytester_assertions") + + +def get_config( + args: Optional[List[str]] = None, + plugins: Optional[Sequence[Union[str, _PluggyPlugin]]] = None, +) -> "Config": + # subsequent calls to main will create a fresh instance + pluginmanager = PytestPluginManager() + config = Config( + pluginmanager, + invocation_params=Config.InvocationParams( + args=args or (), + plugins=plugins, + dir=Path.cwd(), + ), + ) + + if args is not None: + # Handle any "-p no:plugin" args. + pluginmanager.consider_preparse(args, exclude_only=True) + + for spec in default_plugins: + pluginmanager.import_plugin(spec) + + return config + + +def get_plugin_manager() -> "PytestPluginManager": + """Obtain a new instance of the + :py:class:`pytest.PytestPluginManager`, with default plugins + already loaded. + + This function can be used by integration with other tools, like hooking + into pytest to run tests into an IDE. + """ + return get_config().pluginmanager + + +def _prepareconfig( + args: Optional[Union[List[str], "os.PathLike[str]"]] = None, + plugins: Optional[Sequence[Union[str, _PluggyPlugin]]] = None, +) -> "Config": + if args is None: + args = sys.argv[1:] + elif isinstance(args, os.PathLike): + args = [os.fspath(args)] + elif not isinstance(args, list): + msg = ( # type:ignore[unreachable] + "`args` parameter expected to be a list of strings, got: {!r} (type: {})" + ) + raise TypeError(msg.format(args, type(args))) + + config = get_config(args, plugins) + pluginmanager = config.pluginmanager + try: + if plugins: + for plugin in plugins: + if isinstance(plugin, str): + pluginmanager.consider_pluginarg(plugin) + else: + pluginmanager.register(plugin) + config = pluginmanager.hook.pytest_cmdline_parse( + pluginmanager=pluginmanager, args=args + ) + return config + except BaseException: + config._ensure_unconfigure() + raise + + +def _get_directory(path: Path) -> Path: + """Get the directory of a path - itself if already a directory.""" + if path.is_file(): + return path.parent + else: + return path + + +def _get_legacy_hook_marks( + method: Any, + hook_type: str, + opt_names: Tuple[str, ...], +) -> Dict[str, bool]: + if TYPE_CHECKING: + # abuse typeguard from importlib to avoid massive method type union thats lacking a alias + assert inspect.isroutine(method) + known_marks: Set[str] = {m.name for m in getattr(method, "pytestmark", [])} + must_warn: List[str] = [] + opts: Dict[str, bool] = {} + for opt_name in opt_names: + opt_attr = getattr(method, opt_name, AttributeError) + if opt_attr is not AttributeError: + must_warn.append(f"{opt_name}={opt_attr}") + opts[opt_name] = True + elif opt_name in known_marks: + must_warn.append(f"{opt_name}=True") + opts[opt_name] = True + else: + opts[opt_name] = False + if must_warn: + hook_opts = ", ".join(must_warn) + message = _pytest.deprecated.HOOK_LEGACY_MARKING.format( + type=hook_type, + fullname=method.__qualname__, + hook_opts=hook_opts, + ) + warn_explicit_for(cast(FunctionType, method), message) + return opts + + +@final +class PytestPluginManager(PluginManager): + """A :py:class:`pluggy.PluginManager ` with + additional pytest-specific functionality: + + * Loading plugins from the command line, ``PYTEST_PLUGINS`` env variable and + ``pytest_plugins`` global variables found in plugins being loaded. + * ``conftest.py`` loading during start-up. + """ + + def __init__(self) -> None: + import _pytest.assertion + + super().__init__("pytest") + + # -- State related to local conftest plugins. + # All loaded conftest modules. + self._conftest_plugins: Set[types.ModuleType] = set() + # All conftest modules applicable for a directory. + # This includes the directory's own conftest modules as well + # as those of its parent directories. + self._dirpath2confmods: Dict[Path, List[types.ModuleType]] = {} + # Cutoff directory above which conftests are no longer discovered. + self._confcutdir: Optional[Path] = None + # If set, conftest loading is skipped. + self._noconftest = False + + # _getconftestmodules()'s call to _get_directory() causes a stat + # storm when it's called potentially thousands of times in a test + # session (#9478), often with the same path, so cache it. + self._get_directory = lru_cache(256)(_get_directory) + + # plugins that were explicitly skipped with pytest.skip + # list of (module name, skip reason) + # previously we would issue a warning when a plugin was skipped, but + # since we refactored warnings as first citizens of Config, they are + # just stored here to be used later. + self.skipped_plugins: List[Tuple[str, str]] = [] + + self.add_hookspecs(_pytest.hookspec) + self.register(self) + if os.environ.get("PYTEST_DEBUG"): + err: IO[str] = sys.stderr + encoding: str = getattr(err, "encoding", "utf8") + try: + err = open( + os.dup(err.fileno()), + mode=err.mode, + buffering=1, + encoding=encoding, + ) + except Exception: + pass + self.trace.root.setwriter(err.write) + self.enable_tracing() + + # Config._consider_importhook will set a real object if required. + self.rewrite_hook = _pytest.assertion.DummyRewriteHook() + # Used to know when we are importing conftests after the pytest_configure stage. + self._configured = False + + def parse_hookimpl_opts( + self, plugin: _PluggyPlugin, name: str + ) -> Optional[HookimplOpts]: + """:meta private:""" + # pytest hooks are always prefixed with "pytest_", + # so we avoid accessing possibly non-readable attributes + # (see issue #1073). + if not name.startswith("pytest_"): + return None + # Ignore names which can not be hooks. + if name == "pytest_plugins": + return None + + opts = super().parse_hookimpl_opts(plugin, name) + if opts is not None: + return opts + + method = getattr(plugin, name) + # Consider only actual functions for hooks (#3775). + if not inspect.isroutine(method): + return None + # Collect unmarked hooks as long as they have the `pytest_' prefix. + return _get_legacy_hook_marks( # type: ignore[return-value] + method, "impl", ("tryfirst", "trylast", "optionalhook", "hookwrapper") + ) + + def parse_hookspec_opts(self, module_or_class, name: str) -> Optional[HookspecOpts]: + """:meta private:""" + opts = super().parse_hookspec_opts(module_or_class, name) + if opts is None: + method = getattr(module_or_class, name) + if name.startswith("pytest_"): + opts = _get_legacy_hook_marks( # type: ignore[assignment] + method, + "spec", + ("firstresult", "historic"), + ) + return opts + + def register( + self, plugin: _PluggyPlugin, name: Optional[str] = None + ) -> Optional[str]: + if name in _pytest.deprecated.DEPRECATED_EXTERNAL_PLUGINS: + warnings.warn( + PytestConfigWarning( + "{} plugin has been merged into the core, " + "please remove it from your requirements.".format( + name.replace("_", "-") + ) + ) + ) + return None + plugin_name = super().register(plugin, name) + if plugin_name is not None: + self.hook.pytest_plugin_registered.call_historic( + kwargs=dict( + plugin=plugin, + plugin_name=plugin_name, + manager=self, + ) + ) + + if isinstance(plugin, types.ModuleType): + self.consider_module(plugin) + return plugin_name + + def getplugin(self, name: str): + # Support deprecated naming because plugins (xdist e.g.) use it. + plugin: Optional[_PluggyPlugin] = self.get_plugin(name) + return plugin + + def hasplugin(self, name: str) -> bool: + """Return whether a plugin with the given name is registered.""" + return bool(self.get_plugin(name)) + + def pytest_configure(self, config: "Config") -> None: + """:meta private:""" + # XXX now that the pluginmanager exposes hookimpl(tryfirst...) + # we should remove tryfirst/trylast as markers. + config.addinivalue_line( + "markers", + "tryfirst: mark a hook implementation function such that the " + "plugin machinery will try to call it first/as early as possible. " + "DEPRECATED, use @pytest.hookimpl(tryfirst=True) instead.", + ) + config.addinivalue_line( + "markers", + "trylast: mark a hook implementation function such that the " + "plugin machinery will try to call it last/as late as possible. " + "DEPRECATED, use @pytest.hookimpl(trylast=True) instead.", + ) + self._configured = True + + # + # Internal API for local conftest plugin handling. + # + def _set_initial_conftests( + self, + args: Sequence[Union[str, Path]], + pyargs: bool, + noconftest: bool, + rootpath: Path, + confcutdir: Optional[Path], + invocation_dir: Path, + importmode: Union[ImportMode, str], + *, + consider_namespace_packages: bool, + ) -> None: + """Load initial conftest files given a preparsed "namespace". + + As conftest files may add their own command line options which have + arguments ('--my-opt somepath') we might get some false positives. + All builtin and 3rd party plugins will have been loaded, however, so + common options will not confuse our logic here. + """ + self._confcutdir = ( + absolutepath(invocation_dir / confcutdir) if confcutdir else None + ) + self._noconftest = noconftest + self._using_pyargs = pyargs + foundanchor = False + for intitial_path in args: + path = str(intitial_path) + # remove node-id syntax + i = path.find("::") + if i != -1: + path = path[:i] + anchor = absolutepath(invocation_dir / path) + + # Ensure we do not break if what appears to be an anchor + # is in fact a very long option (#10169, #11394). + if safe_exists(anchor): + self._try_load_conftest( + anchor, + importmode, + rootpath, + consider_namespace_packages=consider_namespace_packages, + ) + foundanchor = True + if not foundanchor: + self._try_load_conftest( + invocation_dir, + importmode, + rootpath, + consider_namespace_packages=consider_namespace_packages, + ) + + def _is_in_confcutdir(self, path: Path) -> bool: + """Whether to consider the given path to load conftests from.""" + if self._confcutdir is None: + return True + # The semantics here are literally: + # Do not load a conftest if it is found upwards from confcut dir. + # But this is *not* the same as: + # Load only conftests from confcutdir or below. + # At first glance they might seem the same thing, however we do support use cases where + # we want to load conftests that are not found in confcutdir or below, but are found + # in completely different directory hierarchies like packages installed + # in out-of-source trees. + # (see #9767 for a regression where the logic was inverted). + return path not in self._confcutdir.parents + + def _try_load_conftest( + self, + anchor: Path, + importmode: Union[str, ImportMode], + rootpath: Path, + *, + consider_namespace_packages: bool, + ) -> None: + self._loadconftestmodules( + anchor, + importmode, + rootpath, + consider_namespace_packages=consider_namespace_packages, + ) + # let's also consider test* subdirs + if anchor.is_dir(): + for x in anchor.glob("test*"): + if x.is_dir(): + self._loadconftestmodules( + x, + importmode, + rootpath, + consider_namespace_packages=consider_namespace_packages, + ) + + def _loadconftestmodules( + self, + path: Path, + importmode: Union[str, ImportMode], + rootpath: Path, + *, + consider_namespace_packages: bool, + ) -> None: + if self._noconftest: + return + + directory = self._get_directory(path) + + # Optimization: avoid repeated searches in the same directory. + # Assumes always called with same importmode and rootpath. + if directory in self._dirpath2confmods: + return + + clist = [] + for parent in reversed((directory, *directory.parents)): + if self._is_in_confcutdir(parent): + conftestpath = parent / "conftest.py" + if conftestpath.is_file(): + mod = self._importconftest( + conftestpath, + importmode, + rootpath, + consider_namespace_packages=consider_namespace_packages, + ) + clist.append(mod) + self._dirpath2confmods[directory] = clist + + def _getconftestmodules(self, path: Path) -> Sequence[types.ModuleType]: + directory = self._get_directory(path) + return self._dirpath2confmods.get(directory, ()) + + def _rget_with_confmod( + self, + name: str, + path: Path, + ) -> Tuple[types.ModuleType, Any]: + modules = self._getconftestmodules(path) + for mod in reversed(modules): + try: + return mod, getattr(mod, name) + except AttributeError: + continue + raise KeyError(name) + + def _importconftest( + self, + conftestpath: Path, + importmode: Union[str, ImportMode], + rootpath: Path, + *, + consider_namespace_packages: bool, + ) -> types.ModuleType: + conftestpath_plugin_name = str(conftestpath) + existing = self.get_plugin(conftestpath_plugin_name) + if existing is not None: + return cast(types.ModuleType, existing) + + # conftest.py files there are not in a Python package all have module + # name "conftest", and thus conflict with each other. Clear the existing + # before loading the new one, otherwise the existing one will be + # returned from the module cache. + pkgpath = resolve_package_path(conftestpath) + if pkgpath is None: + try: + del sys.modules[conftestpath.stem] + except KeyError: + pass + + try: + mod = import_path( + conftestpath, + mode=importmode, + root=rootpath, + consider_namespace_packages=consider_namespace_packages, + ) + except Exception as e: + assert e.__traceback__ is not None + raise ConftestImportFailure(conftestpath, cause=e) from e + + self._check_non_top_pytest_plugins(mod, conftestpath) + + self._conftest_plugins.add(mod) + dirpath = conftestpath.parent + if dirpath in self._dirpath2confmods: + for path, mods in self._dirpath2confmods.items(): + if dirpath in path.parents or path == dirpath: + if mod in mods: + raise AssertionError( + f"While trying to load conftest path {conftestpath!s}, " + f"found that the module {mod} is already loaded with path {mod.__file__}. " + "This is not supposed to happen. Please report this issue to pytest." + ) + mods.append(mod) + self.trace(f"loading conftestmodule {mod!r}") + self.consider_conftest(mod, registration_name=conftestpath_plugin_name) + return mod + + def _check_non_top_pytest_plugins( + self, + mod: types.ModuleType, + conftestpath: Path, + ) -> None: + if ( + hasattr(mod, "pytest_plugins") + and self._configured + and not self._using_pyargs + ): + msg = ( + "Defining 'pytest_plugins' in a non-top-level conftest is no longer supported:\n" + "It affects the entire test suite instead of just below the conftest as expected.\n" + " {}\n" + "Please move it to a top level conftest file at the rootdir:\n" + " {}\n" + "For more information, visit:\n" + " https://docs.pytest.org/en/stable/deprecations.html#pytest-plugins-in-non-top-level-conftest-files" + ) + fail(msg.format(conftestpath, self._confcutdir), pytrace=False) + + # + # API for bootstrapping plugin loading + # + # + + def consider_preparse( + self, args: Sequence[str], *, exclude_only: bool = False + ) -> None: + """:meta private:""" + i = 0 + n = len(args) + while i < n: + opt = args[i] + i += 1 + if isinstance(opt, str): + if opt == "-p": + try: + parg = args[i] + except IndexError: + return + i += 1 + elif opt.startswith("-p"): + parg = opt[2:] + else: + continue + parg = parg.strip() + if exclude_only and not parg.startswith("no:"): + continue + self.consider_pluginarg(parg) + + def consider_pluginarg(self, arg: str) -> None: + """:meta private:""" + if arg.startswith("no:"): + name = arg[3:] + if name in essential_plugins: + raise UsageError("plugin %s cannot be disabled" % name) + + # PR #4304: remove stepwise if cacheprovider is blocked. + if name == "cacheprovider": + self.set_blocked("stepwise") + self.set_blocked("pytest_stepwise") + + self.set_blocked(name) + if not name.startswith("pytest_"): + self.set_blocked("pytest_" + name) + else: + name = arg + # Unblock the plugin. + self.unblock(name) + if not name.startswith("pytest_"): + self.unblock("pytest_" + name) + self.import_plugin(arg, consider_entry_points=True) + + def consider_conftest( + self, conftestmodule: types.ModuleType, registration_name: str + ) -> None: + """:meta private:""" + self.register(conftestmodule, name=registration_name) + + def consider_env(self) -> None: + """:meta private:""" + self._import_plugin_specs(os.environ.get("PYTEST_PLUGINS")) + + def consider_module(self, mod: types.ModuleType) -> None: + """:meta private:""" + self._import_plugin_specs(getattr(mod, "pytest_plugins", [])) + + def _import_plugin_specs( + self, spec: Union[None, types.ModuleType, str, Sequence[str]] + ) -> None: + plugins = _get_plugin_specs_as_list(spec) + for import_spec in plugins: + self.import_plugin(import_spec) + + def import_plugin(self, modname: str, consider_entry_points: bool = False) -> None: + """Import a plugin with ``modname``. + + If ``consider_entry_points`` is True, entry point names are also + considered to find a plugin. + """ + # Most often modname refers to builtin modules, e.g. "pytester", + # "terminal" or "capture". Those plugins are registered under their + # basename for historic purposes but must be imported with the + # _pytest prefix. + assert isinstance(modname, str), ( + "module name as text required, got %r" % modname + ) + if self.is_blocked(modname) or self.get_plugin(modname) is not None: + return + + importspec = "_pytest." + modname if modname in builtin_plugins else modname + self.rewrite_hook.mark_rewrite(importspec) + + if consider_entry_points: + loaded = self.load_setuptools_entrypoints("pytest11", name=modname) + if loaded: + return + + try: + __import__(importspec) + except ImportError as e: + raise ImportError( + f'Error importing plugin "{modname}": {e.args[0]}' + ).with_traceback(e.__traceback__) from e + + except Skipped as e: + self.skipped_plugins.append((modname, e.msg or "")) + else: + mod = sys.modules[importspec] + self.register(mod, modname) + + +def _get_plugin_specs_as_list( + specs: Union[None, types.ModuleType, str, Sequence[str]], +) -> List[str]: + """Parse a plugins specification into a list of plugin names.""" + # None means empty. + if specs is None: + return [] + # Workaround for #3899 - a submodule which happens to be called "pytest_plugins". + if isinstance(specs, types.ModuleType): + return [] + # Comma-separated list. + if isinstance(specs, str): + return specs.split(",") if specs else [] + # Direct specification. + if isinstance(specs, collections.abc.Sequence): + return list(specs) + raise UsageError( + "Plugins may be specified as a sequence or a ','-separated string of plugin names. Got: %r" + % specs + ) + + +class Notset: + def __repr__(self): + return "" + + +notset = Notset() + + +def _iter_rewritable_modules(package_files: Iterable[str]) -> Iterator[str]: + """Given an iterable of file names in a source distribution, return the "names" that should + be marked for assertion rewrite. + + For example the package "pytest_mock/__init__.py" should be added as "pytest_mock" in + the assertion rewrite mechanism. + + This function has to deal with dist-info based distributions and egg based distributions + (which are still very much in use for "editable" installs). + + Here are the file names as seen in a dist-info based distribution: + + pytest_mock/__init__.py + pytest_mock/_version.py + pytest_mock/plugin.py + pytest_mock.egg-info/PKG-INFO + + Here are the file names as seen in an egg based distribution: + + src/pytest_mock/__init__.py + src/pytest_mock/_version.py + src/pytest_mock/plugin.py + src/pytest_mock.egg-info/PKG-INFO + LICENSE + setup.py + + We have to take in account those two distribution flavors in order to determine which + names should be considered for assertion rewriting. + + More information: + https://github.com/pytest-dev/pytest-mock/issues/167 + """ + package_files = list(package_files) + seen_some = False + for fn in package_files: + is_simple_module = "/" not in fn and fn.endswith(".py") + is_package = fn.count("/") == 1 and fn.endswith("__init__.py") + if is_simple_module: + module_name, _ = os.path.splitext(fn) + # we ignore "setup.py" at the root of the distribution + # as well as editable installation finder modules made by setuptools + if module_name != "setup" and not module_name.startswith("__editable__"): + seen_some = True + yield module_name + elif is_package: + package_name = os.path.dirname(fn) + seen_some = True + yield package_name + + if not seen_some: + # At this point we did not find any packages or modules suitable for assertion + # rewriting, so we try again by stripping the first path component (to account for + # "src" based source trees for example). + # This approach lets us have the common case continue to be fast, as egg-distributions + # are rarer. + new_package_files = [] + for fn in package_files: + parts = fn.split("/") + new_fn = "/".join(parts[1:]) + if new_fn: + new_package_files.append(new_fn) + if new_package_files: + yield from _iter_rewritable_modules(new_package_files) + + +@final +class Config: + """Access to configuration values, pluginmanager and plugin hooks. + + :param PytestPluginManager pluginmanager: + A pytest PluginManager. + + :param InvocationParams invocation_params: + Object containing parameters regarding the :func:`pytest.main` + invocation. + """ + + @final + @dataclasses.dataclass(frozen=True) + class InvocationParams: + """Holds parameters passed during :func:`pytest.main`. + + The object attributes are read-only. + + .. versionadded:: 5.1 + + .. note:: + + Note that the environment variable ``PYTEST_ADDOPTS`` and the ``addopts`` + ini option are handled by pytest, not being included in the ``args`` attribute. + + Plugins accessing ``InvocationParams`` must be aware of that. + """ + + args: Tuple[str, ...] + """The command-line arguments as passed to :func:`pytest.main`.""" + plugins: Optional[Sequence[Union[str, _PluggyPlugin]]] + """Extra plugins, might be `None`.""" + dir: Path + """The directory from which :func:`pytest.main` was invoked.""" + + def __init__( + self, + *, + args: Iterable[str], + plugins: Optional[Sequence[Union[str, _PluggyPlugin]]], + dir: Path, + ) -> None: + object.__setattr__(self, "args", tuple(args)) + object.__setattr__(self, "plugins", plugins) + object.__setattr__(self, "dir", dir) + + class ArgsSource(enum.Enum): + """Indicates the source of the test arguments. + + .. versionadded:: 7.2 + """ + + #: Command line arguments. + ARGS = enum.auto() + #: Invocation directory. + INVOCATION_DIR = enum.auto() + INCOVATION_DIR = INVOCATION_DIR # backwards compatibility alias + #: 'testpaths' configuration value. + TESTPATHS = enum.auto() + + def __init__( + self, + pluginmanager: PytestPluginManager, + *, + invocation_params: Optional[InvocationParams] = None, + ) -> None: + from .argparsing import FILE_OR_DIR + from .argparsing import Parser + + if invocation_params is None: + invocation_params = self.InvocationParams( + args=(), plugins=None, dir=Path.cwd() + ) + + self.option = argparse.Namespace() + """Access to command line option as attributes. + + :type: argparse.Namespace + """ + + self.invocation_params = invocation_params + """The parameters with which pytest was invoked. + + :type: InvocationParams + """ + + _a = FILE_OR_DIR + self._parser = Parser( + usage=f"%(prog)s [options] [{_a}] [{_a}] [...]", + processopt=self._processopt, + _ispytest=True, + ) + self.pluginmanager = pluginmanager + """The plugin manager handles plugin registration and hook invocation. + + :type: PytestPluginManager + """ + + self.stash = Stash() + """A place where plugins can store information on the config for their + own use. + + :type: Stash + """ + # Deprecated alias. Was never public. Can be removed in a few releases. + self._store = self.stash + + self.trace = self.pluginmanager.trace.root.get("config") + self.hook: pluggy.HookRelay = PathAwareHookProxy(self.pluginmanager.hook) # type: ignore[assignment] + self._inicache: Dict[str, Any] = {} + self._override_ini: Sequence[str] = () + self._opt2dest: Dict[str, str] = {} + self._cleanup: List[Callable[[], None]] = [] + self.pluginmanager.register(self, "pytestconfig") + self._configured = False + self.hook.pytest_addoption.call_historic( + kwargs=dict(parser=self._parser, pluginmanager=self.pluginmanager) + ) + self.args_source = Config.ArgsSource.ARGS + self.args: List[str] = [] + + if TYPE_CHECKING: + from _pytest.cacheprovider import Cache + + self.cache: Optional[Cache] = None + + @property + def rootpath(self) -> Path: + """The path to the :ref:`rootdir `. + + :type: pathlib.Path + + .. versionadded:: 6.1 + """ + return self._rootpath + + @property + def inipath(self) -> Optional[Path]: + """The path to the :ref:`configfile `. + + :type: Optional[pathlib.Path] + + .. versionadded:: 6.1 + """ + return self._inipath + + def add_cleanup(self, func: Callable[[], None]) -> None: + """Add a function to be called when the config object gets out of + use (usually coinciding with pytest_unconfigure).""" + self._cleanup.append(func) + + def _do_configure(self) -> None: + assert not self._configured + self._configured = True + with warnings.catch_warnings(): + warnings.simplefilter("default") + self.hook.pytest_configure.call_historic(kwargs=dict(config=self)) + + def _ensure_unconfigure(self) -> None: + if self._configured: + self._configured = False + self.hook.pytest_unconfigure(config=self) + self.hook.pytest_configure._call_history = [] + while self._cleanup: + fin = self._cleanup.pop() + fin() + + def get_terminal_writer(self) -> TerminalWriter: + terminalreporter: Optional[TerminalReporter] = self.pluginmanager.get_plugin( + "terminalreporter" + ) + assert terminalreporter is not None + return terminalreporter._tw + + def pytest_cmdline_parse( + self, pluginmanager: PytestPluginManager, args: List[str] + ) -> "Config": + try: + self.parse(args) + except UsageError: + # Handle --version and --help here in a minimal fashion. + # This gets done via helpconfig normally, but its + # pytest_cmdline_main is not called in case of errors. + if getattr(self.option, "version", False) or "--version" in args: + from _pytest.helpconfig import showversion + + showversion(self) + elif ( + getattr(self.option, "help", False) or "--help" in args or "-h" in args + ): + self._parser._getparser().print_help() + sys.stdout.write( + "\nNOTE: displaying only minimal help due to UsageError.\n\n" + ) + + raise + + return self + + def notify_exception( + self, + excinfo: ExceptionInfo[BaseException], + option: Optional[argparse.Namespace] = None, + ) -> None: + if option and getattr(option, "fulltrace", False): + style: _TracebackStyle = "long" + else: + style = "native" + excrepr = excinfo.getrepr( + funcargs=True, showlocals=getattr(option, "showlocals", False), style=style + ) + res = self.hook.pytest_internalerror(excrepr=excrepr, excinfo=excinfo) + if not any(res): + for line in str(excrepr).split("\n"): + sys.stderr.write("INTERNALERROR> %s\n" % line) + sys.stderr.flush() + + def cwd_relative_nodeid(self, nodeid: str) -> str: + # nodeid's are relative to the rootpath, compute relative to cwd. + if self.invocation_params.dir != self.rootpath: + fullpath = self.rootpath / nodeid + nodeid = bestrelpath(self.invocation_params.dir, fullpath) + return nodeid + + @classmethod + def fromdictargs(cls, option_dict, args) -> "Config": + """Constructor usable for subprocesses.""" + config = get_config(args) + config.option.__dict__.update(option_dict) + config.parse(args, addopts=False) + for x in config.option.plugins: + config.pluginmanager.consider_pluginarg(x) + return config + + def _processopt(self, opt: "Argument") -> None: + for name in opt._short_opts + opt._long_opts: + self._opt2dest[name] = opt.dest + + if hasattr(opt, "default"): + if not hasattr(self.option, opt.dest): + setattr(self.option, opt.dest, opt.default) + + @hookimpl(trylast=True) + def pytest_load_initial_conftests(self, early_config: "Config") -> None: + # We haven't fully parsed the command line arguments yet, so + # early_config.args it not set yet. But we need it for + # discovering the initial conftests. So "pre-run" the logic here. + # It will be done for real in `parse()`. + args, args_source = early_config._decide_args( + args=early_config.known_args_namespace.file_or_dir, + pyargs=early_config.known_args_namespace.pyargs, + testpaths=early_config.getini("testpaths"), + invocation_dir=early_config.invocation_params.dir, + rootpath=early_config.rootpath, + warn=False, + ) + self.pluginmanager._set_initial_conftests( + args=args, + pyargs=early_config.known_args_namespace.pyargs, + noconftest=early_config.known_args_namespace.noconftest, + rootpath=early_config.rootpath, + confcutdir=early_config.known_args_namespace.confcutdir, + invocation_dir=early_config.invocation_params.dir, + importmode=early_config.known_args_namespace.importmode, + consider_namespace_packages=early_config.getini( + "consider_namespace_packages" + ), + ) + + def _initini(self, args: Sequence[str]) -> None: + ns, unknown_args = self._parser.parse_known_and_unknown_args( + args, namespace=copy.copy(self.option) + ) + rootpath, inipath, inicfg = determine_setup( + inifile=ns.inifilename, + args=ns.file_or_dir + unknown_args, + rootdir_cmd_arg=ns.rootdir or None, + invocation_dir=self.invocation_params.dir, + ) + self._rootpath = rootpath + self._inipath = inipath + self.inicfg = inicfg + self._parser.extra_info["rootdir"] = str(self.rootpath) + self._parser.extra_info["inifile"] = str(self.inipath) + self._parser.addini("addopts", "Extra command line options", "args") + self._parser.addini("minversion", "Minimally required pytest version") + self._parser.addini( + "required_plugins", + "Plugins that must be present for pytest to run", + type="args", + default=[], + ) + self._override_ini = ns.override_ini or () + + def _consider_importhook(self, args: Sequence[str]) -> None: + """Install the PEP 302 import hook if using assertion rewriting. + + Needs to parse the --assert= option from the commandline + and find all the installed plugins to mark them for rewriting + by the importhook. + """ + ns, unknown_args = self._parser.parse_known_and_unknown_args(args) + mode = getattr(ns, "assertmode", "plain") + if mode == "rewrite": + import _pytest.assertion + + try: + hook = _pytest.assertion.install_importhook(self) + except SystemError: + mode = "plain" + else: + self._mark_plugins_for_rewrite(hook) + self._warn_about_missing_assertion(mode) + + def _mark_plugins_for_rewrite(self, hook) -> None: + """Given an importhook, mark for rewrite any top-level + modules or packages in the distribution package for + all pytest plugins.""" + self.pluginmanager.rewrite_hook = hook + + if os.environ.get("PYTEST_DISABLE_PLUGIN_AUTOLOAD"): + # We don't autoload from setuptools entry points, no need to continue. + return + + package_files = ( + str(file) + for dist in importlib.metadata.distributions() + if any(ep.group == "pytest11" for ep in dist.entry_points) + for file in dist.files or [] + ) + + for name in _iter_rewritable_modules(package_files): + hook.mark_rewrite(name) + + def _validate_args(self, args: List[str], via: str) -> List[str]: + """Validate known args.""" + self._parser._config_source_hint = via # type: ignore + try: + self._parser.parse_known_and_unknown_args( + args, namespace=copy.copy(self.option) + ) + finally: + del self._parser._config_source_hint # type: ignore + + return args + + def _decide_args( + self, + *, + args: List[str], + pyargs: bool, + testpaths: List[str], + invocation_dir: Path, + rootpath: Path, + warn: bool, + ) -> Tuple[List[str], ArgsSource]: + """Decide the args (initial paths/nodeids) to use given the relevant inputs. + + :param warn: Whether can issue warnings. + + :returns: The args and the args source. Guaranteed to be non-empty. + """ + if args: + source = Config.ArgsSource.ARGS + result = args + else: + if invocation_dir == rootpath: + source = Config.ArgsSource.TESTPATHS + if pyargs: + result = testpaths + else: + result = [] + for path in testpaths: + result.extend(sorted(glob.iglob(path, recursive=True))) + if testpaths and not result: + if warn: + warning_text = ( + "No files were found in testpaths; " + "consider removing or adjusting your testpaths configuration. " + "Searching recursively from the current directory instead." + ) + self.issue_config_time_warning( + PytestConfigWarning(warning_text), stacklevel=3 + ) + else: + result = [] + if not result: + source = Config.ArgsSource.INVOCATION_DIR + result = [str(invocation_dir)] + return result, source + + def _preparse(self, args: List[str], addopts: bool = True) -> None: + if addopts: + env_addopts = os.environ.get("PYTEST_ADDOPTS", "") + if len(env_addopts): + args[:] = ( + self._validate_args(shlex.split(env_addopts), "via PYTEST_ADDOPTS") + + args + ) + self._initini(args) + if addopts: + args[:] = ( + self._validate_args(self.getini("addopts"), "via addopts config") + args + ) + + self.known_args_namespace = self._parser.parse_known_args( + args, namespace=copy.copy(self.option) + ) + self._checkversion() + self._consider_importhook(args) + self.pluginmanager.consider_preparse(args, exclude_only=False) + if not os.environ.get("PYTEST_DISABLE_PLUGIN_AUTOLOAD"): + # Don't autoload from setuptools entry point. Only explicitly specified + # plugins are going to be loaded. + self.pluginmanager.load_setuptools_entrypoints("pytest11") + self.pluginmanager.consider_env() + + self.known_args_namespace = self._parser.parse_known_args( + args, namespace=copy.copy(self.known_args_namespace) + ) + + self._validate_plugins() + self._warn_about_skipped_plugins() + + if self.known_args_namespace.confcutdir is None: + if self.inipath is not None: + confcutdir = str(self.inipath.parent) + else: + confcutdir = str(self.rootpath) + self.known_args_namespace.confcutdir = confcutdir + try: + self.hook.pytest_load_initial_conftests( + early_config=self, args=args, parser=self._parser + ) + except ConftestImportFailure as e: + if self.known_args_namespace.help or self.known_args_namespace.version: + # we don't want to prevent --help/--version to work + # so just let is pass and print a warning at the end + self.issue_config_time_warning( + PytestConfigWarning(f"could not load initial conftests: {e.path}"), + stacklevel=2, + ) + else: + raise + + @hookimpl(wrapper=True) + def pytest_collection(self) -> Generator[None, object, object]: + # Validate invalid ini keys after collection is done so we take in account + # options added by late-loading conftest files. + try: + return (yield) + finally: + self._validate_config_options() + + def _checkversion(self) -> None: + import pytest + + minver = self.inicfg.get("minversion", None) + if minver: + # Imported lazily to improve start-up time. + from packaging.version import Version + + if not isinstance(minver, str): + raise pytest.UsageError( + "%s: 'minversion' must be a single value" % self.inipath + ) + + if Version(minver) > Version(pytest.__version__): + raise pytest.UsageError( + f"{self.inipath}: 'minversion' requires pytest-{minver}, actual pytest-{pytest.__version__}'" + ) + + def _validate_config_options(self) -> None: + for key in sorted(self._get_unknown_ini_keys()): + self._warn_or_fail_if_strict(f"Unknown config option: {key}\n") + + def _validate_plugins(self) -> None: + required_plugins = sorted(self.getini("required_plugins")) + if not required_plugins: + return + + # Imported lazily to improve start-up time. + from packaging.requirements import InvalidRequirement + from packaging.requirements import Requirement + from packaging.version import Version + + plugin_info = self.pluginmanager.list_plugin_distinfo() + plugin_dist_info = {dist.project_name: dist.version for _, dist in plugin_info} + + missing_plugins = [] + for required_plugin in required_plugins: + try: + req = Requirement(required_plugin) + except InvalidRequirement: + missing_plugins.append(required_plugin) + continue + + if req.name not in plugin_dist_info: + missing_plugins.append(required_plugin) + elif not req.specifier.contains( + Version(plugin_dist_info[req.name]), prereleases=True + ): + missing_plugins.append(required_plugin) + + if missing_plugins: + raise UsageError( + "Missing required plugins: {}".format(", ".join(missing_plugins)), + ) + + def _warn_or_fail_if_strict(self, message: str) -> None: + if self.known_args_namespace.strict_config: + raise UsageError(message) + + self.issue_config_time_warning(PytestConfigWarning(message), stacklevel=3) + + def _get_unknown_ini_keys(self) -> List[str]: + parser_inicfg = self._parser._inidict + return [name for name in self.inicfg if name not in parser_inicfg] + + def parse(self, args: List[str], addopts: bool = True) -> None: + # Parse given cmdline arguments into this config object. + assert ( + self.args == [] + ), "can only parse cmdline args at most once per Config object" + self.hook.pytest_addhooks.call_historic( + kwargs=dict(pluginmanager=self.pluginmanager) + ) + self._preparse(args, addopts=addopts) + self._parser.after_preparse = True # type: ignore + try: + args = self._parser.parse_setoption( + args, self.option, namespace=self.option + ) + self.args, self.args_source = self._decide_args( + args=args, + pyargs=self.known_args_namespace.pyargs, + testpaths=self.getini("testpaths"), + invocation_dir=self.invocation_params.dir, + rootpath=self.rootpath, + warn=True, + ) + except PrintHelp: + pass + + def issue_config_time_warning(self, warning: Warning, stacklevel: int) -> None: + """Issue and handle a warning during the "configure" stage. + + During ``pytest_configure`` we can't capture warnings using the ``catch_warnings_for_item`` + function because it is not possible to have hook wrappers around ``pytest_configure``. + + This function is mainly intended for plugins that need to issue warnings during + ``pytest_configure`` (or similar stages). + + :param warning: The warning instance. + :param stacklevel: stacklevel forwarded to warnings.warn. + """ + if self.pluginmanager.is_blocked("warnings"): + return + + cmdline_filters = self.known_args_namespace.pythonwarnings or [] + config_filters = self.getini("filterwarnings") + + with warnings.catch_warnings(record=True) as records: + warnings.simplefilter("always", type(warning)) + apply_warning_filters(config_filters, cmdline_filters) + warnings.warn(warning, stacklevel=stacklevel) + + if records: + frame = sys._getframe(stacklevel - 1) + location = frame.f_code.co_filename, frame.f_lineno, frame.f_code.co_name + self.hook.pytest_warning_recorded.call_historic( + kwargs=dict( + warning_message=records[0], + when="config", + nodeid="", + location=location, + ) + ) + + def addinivalue_line(self, name: str, line: str) -> None: + """Add a line to an ini-file option. The option must have been + declared but might not yet be set in which case the line becomes + the first line in its value.""" + x = self.getini(name) + assert isinstance(x, list) + x.append(line) # modifies the cached list inline + + def getini(self, name: str): + """Return configuration value from an :ref:`ini file `. + + If a configuration value is not defined in an + :ref:`ini file `, then the ``default`` value provided while + registering the configuration through + :func:`parser.addini ` will be returned. + Please note that you can even provide ``None`` as a valid + default value. + + If ``default`` is not provided while registering using + :func:`parser.addini `, then a default value + based on the ``type`` parameter passed to + :func:`parser.addini ` will be returned. + The default values based on ``type`` are: + ``paths``, ``pathlist``, ``args`` and ``linelist`` : empty list ``[]`` + ``bool`` : ``False`` + ``string`` : empty string ``""`` + + If neither the ``default`` nor the ``type`` parameter is passed + while registering the configuration through + :func:`parser.addini `, then the configuration + is treated as a string and a default empty string '' is returned. + + If the specified name hasn't been registered through a prior + :func:`parser.addini ` call (usually from a + plugin), a ValueError is raised. + """ + try: + return self._inicache[name] + except KeyError: + self._inicache[name] = val = self._getini(name) + return val + + # Meant for easy monkeypatching by legacypath plugin. + # Can be inlined back (with no cover removed) once legacypath is gone. + def _getini_unknown_type(self, name: str, type: str, value: Union[str, List[str]]): + msg = f"unknown configuration type: {type}" + raise ValueError(msg, value) # pragma: no cover + + def _getini(self, name: str): + try: + description, type, default = self._parser._inidict[name] + except KeyError as e: + raise ValueError(f"unknown configuration value: {name!r}") from e + override_value = self._get_override_ini_value(name) + if override_value is None: + try: + value = self.inicfg[name] + except KeyError: + return default + else: + value = override_value + # Coerce the values based on types. + # + # Note: some coercions are only required if we are reading from .ini files, because + # the file format doesn't contain type information, but when reading from toml we will + # get either str or list of str values (see _parse_ini_config_from_pyproject_toml). + # For example: + # + # ini: + # a_line_list = "tests acceptance" + # in this case, we need to split the string to obtain a list of strings. + # + # toml: + # a_line_list = ["tests", "acceptance"] + # in this case, we already have a list ready to use. + # + if type == "paths": + dp = ( + self.inipath.parent + if self.inipath is not None + else self.invocation_params.dir + ) + input_values = shlex.split(value) if isinstance(value, str) else value + return [dp / x for x in input_values] + elif type == "args": + return shlex.split(value) if isinstance(value, str) else value + elif type == "linelist": + if isinstance(value, str): + return [t for t in map(lambda x: x.strip(), value.split("\n")) if t] + else: + return value + elif type == "bool": + return _strtobool(str(value).strip()) + elif type == "string": + return value + elif type is None: + return value + else: + return self._getini_unknown_type(name, type, value) + + def _getconftest_pathlist(self, name: str, path: Path) -> Optional[List[Path]]: + try: + mod, relroots = self.pluginmanager._rget_with_confmod(name, path) + except KeyError: + return None + assert mod.__file__ is not None + modpath = Path(mod.__file__).parent + values: List[Path] = [] + for relroot in relroots: + if isinstance(relroot, os.PathLike): + relroot = Path(relroot) + else: + relroot = relroot.replace("/", os.sep) + relroot = absolutepath(modpath / relroot) + values.append(relroot) + return values + + def _get_override_ini_value(self, name: str) -> Optional[str]: + value = None + # override_ini is a list of "ini=value" options. + # Always use the last item if multiple values are set for same ini-name, + # e.g. -o foo=bar1 -o foo=bar2 will set foo to bar2. + for ini_config in self._override_ini: + try: + key, user_ini_value = ini_config.split("=", 1) + except ValueError as e: + raise UsageError( + f"-o/--override-ini expects option=value style (got: {ini_config!r})." + ) from e + else: + if key == name: + value = user_ini_value + return value + + def getoption(self, name: str, default=notset, skip: bool = False): + """Return command line option value. + + :param name: Name of the option. You may also specify + the literal ``--OPT`` option instead of the "dest" option name. + :param default: Default value if no option of that name exists. + :param skip: If True, raise pytest.skip if option does not exists + or has a None value. + """ + name = self._opt2dest.get(name, name) + try: + val = getattr(self.option, name) + if val is None and skip: + raise AttributeError(name) + return val + except AttributeError as e: + if default is not notset: + return default + if skip: + import pytest + + pytest.skip(f"no {name!r} option found") + raise ValueError(f"no option named {name!r}") from e + + def getvalue(self, name: str, path=None): + """Deprecated, use getoption() instead.""" + return self.getoption(name) + + def getvalueorskip(self, name: str, path=None): + """Deprecated, use getoption(skip=True) instead.""" + return self.getoption(name, skip=True) + + #: Verbosity type for failed assertions (see :confval:`verbosity_assertions`). + VERBOSITY_ASSERTIONS: Final = "assertions" + #: Verbosity type for test case execution (see :confval:`verbosity_test_cases`). + VERBOSITY_TEST_CASES: Final = "test_cases" + _VERBOSITY_INI_DEFAULT: Final = "auto" + + def get_verbosity(self, verbosity_type: Optional[str] = None) -> int: + r"""Retrieve the verbosity level for a fine-grained verbosity type. + + :param verbosity_type: Verbosity type to get level for. If a level is + configured for the given type, that value will be returned. If the + given type is not a known verbosity type, the global verbosity + level will be returned. If the given type is None (default), the + global verbosity level will be returned. + + To configure a level for a fine-grained verbosity type, the + configuration file should have a setting for the configuration name + and a numeric value for the verbosity level. A special value of "auto" + can be used to explicitly use the global verbosity level. + + Example: + .. code-block:: ini + + # content of pytest.ini + [pytest] + verbosity_assertions = 2 + + .. code-block:: console + + pytest -v + + .. code-block:: python + + print(config.get_verbosity()) # 1 + print(config.get_verbosity(Config.VERBOSITY_ASSERTIONS)) # 2 + """ + global_level = self.option.verbose + assert isinstance(global_level, int) + if verbosity_type is None: + return global_level + + ini_name = Config._verbosity_ini_name(verbosity_type) + if ini_name not in self._parser._inidict: + return global_level + + level = self.getini(ini_name) + if level == Config._VERBOSITY_INI_DEFAULT: + return global_level + + return int(level) + + @staticmethod + def _verbosity_ini_name(verbosity_type: str) -> str: + return f"verbosity_{verbosity_type}" + + @staticmethod + def _add_verbosity_ini(parser: "Parser", verbosity_type: str, help: str) -> None: + """Add a output verbosity configuration option for the given output type. + + :param parser: Parser for command line arguments and ini-file values. + :param verbosity_type: Fine-grained verbosity category. + :param help: Description of the output this type controls. + + The value should be retrieved via a call to + :py:func:`config.get_verbosity(type) `. + """ + parser.addini( + Config._verbosity_ini_name(verbosity_type), + help=help, + type="string", + default=Config._VERBOSITY_INI_DEFAULT, + ) + + def _warn_about_missing_assertion(self, mode: str) -> None: + if not _assertion_supported(): + if mode == "plain": + warning_text = ( + "ASSERTIONS ARE NOT EXECUTED" + " and FAILING TESTS WILL PASS. Are you" + " using python -O?" + ) + else: + warning_text = ( + "assertions not in test modules or" + " plugins will be ignored" + " because assert statements are not executed " + "by the underlying Python interpreter " + "(are you using python -O?)\n" + ) + self.issue_config_time_warning( + PytestConfigWarning(warning_text), + stacklevel=3, + ) + + def _warn_about_skipped_plugins(self) -> None: + for module_name, msg in self.pluginmanager.skipped_plugins: + self.issue_config_time_warning( + PytestConfigWarning(f"skipped plugin {module_name!r}: {msg}"), + stacklevel=2, + ) + + +def _assertion_supported() -> bool: + try: + assert False + except AssertionError: + return True + else: + return False # type: ignore[unreachable] + + +def create_terminal_writer( + config: Config, file: Optional[TextIO] = None +) -> TerminalWriter: + """Create a TerminalWriter instance configured according to the options + in the config object. + + Every code which requires a TerminalWriter object and has access to a + config object should use this function. + """ + tw = TerminalWriter(file=file) + + if config.option.color == "yes": + tw.hasmarkup = True + elif config.option.color == "no": + tw.hasmarkup = False + + if config.option.code_highlight == "yes": + tw.code_highlight = True + elif config.option.code_highlight == "no": + tw.code_highlight = False + + return tw + + +def _strtobool(val: str) -> bool: + """Convert a string representation of truth to True or False. + + True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values + are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if + 'val' is anything else. + + .. note:: Copied from distutils.util. + """ + val = val.lower() + if val in ("y", "yes", "t", "true", "on", "1"): + return True + elif val in ("n", "no", "f", "false", "off", "0"): + return False + else: + raise ValueError(f"invalid truth value {val!r}") + + +@lru_cache(maxsize=50) +def parse_warning_filter( + arg: str, *, escape: bool +) -> Tuple["warnings._ActionKind", str, Type[Warning], str, int]: + """Parse a warnings filter string. + + This is copied from warnings._setoption with the following changes: + + * Does not apply the filter. + * Escaping is optional. + * Raises UsageError so we get nice error messages on failure. + """ + __tracebackhide__ = True + error_template = dedent( + f"""\ + while parsing the following warning configuration: + + {arg} + + This error occurred: + + {{error}} + """ + ) + + parts = arg.split(":") + if len(parts) > 5: + doc_url = ( + "https://docs.python.org/3/library/warnings.html#describing-warning-filters" + ) + error = dedent( + f"""\ + Too many fields ({len(parts)}), expected at most 5 separated by colons: + + action:message:category:module:line + + For more information please consult: {doc_url} + """ + ) + raise UsageError(error_template.format(error=error)) + + while len(parts) < 5: + parts.append("") + action_, message, category_, module, lineno_ = (s.strip() for s in parts) + try: + action: "warnings._ActionKind" = warnings._getaction(action_) # type: ignore[attr-defined] + except warnings._OptionError as e: + raise UsageError(error_template.format(error=str(e))) from None + try: + category: Type[Warning] = _resolve_warning_category(category_) + except Exception: + exc_info = ExceptionInfo.from_current() + exception_text = exc_info.getrepr(style="native") + raise UsageError(error_template.format(error=exception_text)) from None + if message and escape: + message = re.escape(message) + if module and escape: + module = re.escape(module) + r"\Z" + if lineno_: + try: + lineno = int(lineno_) + if lineno < 0: + raise ValueError("number is negative") + except ValueError as e: + raise UsageError( + error_template.format(error=f"invalid lineno {lineno_!r}: {e}") + ) from None + else: + lineno = 0 + return action, message, category, module, lineno + + +def _resolve_warning_category(category: str) -> Type[Warning]: + """ + Copied from warnings._getcategory, but changed so it lets exceptions (specially ImportErrors) + propagate so we can get access to their tracebacks (#9218). + """ + __tracebackhide__ = True + if not category: + return Warning + + if "." not in category: + import builtins as m + + klass = category + else: + module, _, klass = category.rpartition(".") + m = __import__(module, None, None, [klass]) + cat = getattr(m, klass) + if not issubclass(cat, Warning): + raise UsageError(f"{cat} is not a Warning subclass") + return cast(Type[Warning], cat) + + +def apply_warning_filters( + config_filters: Iterable[str], cmdline_filters: Iterable[str] +) -> None: + """Applies pytest-configured filters to the warnings module""" + # Filters should have this precedence: cmdline options, config. + # Filters should be applied in the inverse order of precedence. + for arg in config_filters: + warnings.filterwarnings(*parse_warning_filter(arg, escape=False)) + + for arg in cmdline_filters: + warnings.filterwarnings(*parse_warning_filter(arg, escape=True)) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/config/argparsing.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/config/argparsing.py new file mode 100644 index 000000000..9006351af --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/config/argparsing.py @@ -0,0 +1,552 @@ +# mypy: allow-untyped-defs +import argparse +from gettext import gettext +import os +import sys +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import final +from typing import List +from typing import Literal +from typing import Mapping +from typing import NoReturn +from typing import Optional +from typing import Sequence +from typing import Tuple +from typing import Union + +import _pytest._io +from _pytest.config.exceptions import UsageError +from _pytest.deprecated import check_ispytest + + +FILE_OR_DIR = "file_or_dir" + + +class NotSet: + def __repr__(self) -> str: + return "" + + +NOT_SET = NotSet() + + +@final +class Parser: + """Parser for command line arguments and ini-file values. + + :ivar extra_info: Dict of generic param -> value to display in case + there's an error processing the command line arguments. + """ + + prog: Optional[str] = None + + def __init__( + self, + usage: Optional[str] = None, + processopt: Optional[Callable[["Argument"], None]] = None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self._anonymous = OptionGroup("Custom options", parser=self, _ispytest=True) + self._groups: List[OptionGroup] = [] + self._processopt = processopt + self._usage = usage + self._inidict: Dict[str, Tuple[str, Optional[str], Any]] = {} + self._ininames: List[str] = [] + self.extra_info: Dict[str, Any] = {} + + def processoption(self, option: "Argument") -> None: + if self._processopt: + if option.dest: + self._processopt(option) + + def getgroup( + self, name: str, description: str = "", after: Optional[str] = None + ) -> "OptionGroup": + """Get (or create) a named option Group. + + :param name: Name of the option group. + :param description: Long description for --help output. + :param after: Name of another group, used for ordering --help output. + :returns: The option group. + + The returned group object has an ``addoption`` method with the same + signature as :func:`parser.addoption ` but + will be shown in the respective group in the output of + ``pytest --help``. + """ + for group in self._groups: + if group.name == name: + return group + group = OptionGroup(name, description, parser=self, _ispytest=True) + i = 0 + for i, grp in enumerate(self._groups): + if grp.name == after: + break + self._groups.insert(i + 1, group) + return group + + def addoption(self, *opts: str, **attrs: Any) -> None: + """Register a command line option. + + :param opts: + Option names, can be short or long options. + :param attrs: + Same attributes as the argparse library's :meth:`add_argument() + ` function accepts. + + After command line parsing, options are available on the pytest config + object via ``config.option.NAME`` where ``NAME`` is usually set + by passing a ``dest`` attribute, for example + ``addoption("--long", dest="NAME", ...)``. + """ + self._anonymous.addoption(*opts, **attrs) + + def parse( + self, + args: Sequence[Union[str, "os.PathLike[str]"]], + namespace: Optional[argparse.Namespace] = None, + ) -> argparse.Namespace: + from _pytest._argcomplete import try_argcomplete + + self.optparser = self._getparser() + try_argcomplete(self.optparser) + strargs = [os.fspath(x) for x in args] + return self.optparser.parse_args(strargs, namespace=namespace) + + def _getparser(self) -> "MyOptionParser": + from _pytest._argcomplete import filescompleter + + optparser = MyOptionParser(self, self.extra_info, prog=self.prog) + groups = [*self._groups, self._anonymous] + for group in groups: + if group.options: + desc = group.description or group.name + arggroup = optparser.add_argument_group(desc) + for option in group.options: + n = option.names() + a = option.attrs() + arggroup.add_argument(*n, **a) + file_or_dir_arg = optparser.add_argument(FILE_OR_DIR, nargs="*") + # bash like autocompletion for dirs (appending '/') + # Type ignored because typeshed doesn't know about argcomplete. + file_or_dir_arg.completer = filescompleter # type: ignore + return optparser + + def parse_setoption( + self, + args: Sequence[Union[str, "os.PathLike[str]"]], + option: argparse.Namespace, + namespace: Optional[argparse.Namespace] = None, + ) -> List[str]: + parsedoption = self.parse(args, namespace=namespace) + for name, value in parsedoption.__dict__.items(): + setattr(option, name, value) + return cast(List[str], getattr(parsedoption, FILE_OR_DIR)) + + def parse_known_args( + self, + args: Sequence[Union[str, "os.PathLike[str]"]], + namespace: Optional[argparse.Namespace] = None, + ) -> argparse.Namespace: + """Parse the known arguments at this point. + + :returns: An argparse namespace object. + """ + return self.parse_known_and_unknown_args(args, namespace=namespace)[0] + + def parse_known_and_unknown_args( + self, + args: Sequence[Union[str, "os.PathLike[str]"]], + namespace: Optional[argparse.Namespace] = None, + ) -> Tuple[argparse.Namespace, List[str]]: + """Parse the known arguments at this point, and also return the + remaining unknown arguments. + + :returns: + A tuple containing an argparse namespace object for the known + arguments, and a list of the unknown arguments. + """ + optparser = self._getparser() + strargs = [os.fspath(x) for x in args] + return optparser.parse_known_args(strargs, namespace=namespace) + + def addini( + self, + name: str, + help: str, + type: Optional[ + Literal["string", "paths", "pathlist", "args", "linelist", "bool"] + ] = None, + default: Any = NOT_SET, + ) -> None: + """Register an ini-file option. + + :param name: + Name of the ini-variable. + :param type: + Type of the variable. Can be: + + * ``string``: a string + * ``bool``: a boolean + * ``args``: a list of strings, separated as in a shell + * ``linelist``: a list of strings, separated by line breaks + * ``paths``: a list of :class:`pathlib.Path`, separated as in a shell + * ``pathlist``: a list of ``py.path``, separated as in a shell + + For ``paths`` and ``pathlist`` types, they are considered relative to the ini-file. + In case the execution is happening without an ini-file defined, + they will be considered relative to the current working directory (for example with ``--override-ini``). + + .. versionadded:: 7.0 + The ``paths`` variable type. + + .. versionadded:: 8.1 + Use the current working directory to resolve ``paths`` and ``pathlist`` in the absence of an ini-file. + + Defaults to ``string`` if ``None`` or not passed. + :param default: + Default value if no ini-file option exists but is queried. + + The value of ini-variables can be retrieved via a call to + :py:func:`config.getini(name) `. + """ + assert type in (None, "string", "paths", "pathlist", "args", "linelist", "bool") + if default is NOT_SET: + default = get_ini_default_for_type(type) + + self._inidict[name] = (help, type, default) + self._ininames.append(name) + + +def get_ini_default_for_type( + type: Optional[Literal["string", "paths", "pathlist", "args", "linelist", "bool"]], +) -> Any: + """ + Used by addini to get the default value for a given ini-option type, when + default is not supplied. + """ + if type is None: + return "" + elif type in ("paths", "pathlist", "args", "linelist"): + return [] + elif type == "bool": + return False + else: + return "" + + +class ArgumentError(Exception): + """Raised if an Argument instance is created with invalid or + inconsistent arguments.""" + + def __init__(self, msg: str, option: Union["Argument", str]) -> None: + self.msg = msg + self.option_id = str(option) + + def __str__(self) -> str: + if self.option_id: + return f"option {self.option_id}: {self.msg}" + else: + return self.msg + + +class Argument: + """Class that mimics the necessary behaviour of optparse.Option. + + It's currently a least effort implementation and ignoring choices + and integer prefixes. + + https://docs.python.org/3/library/optparse.html#optparse-standard-option-types + """ + + def __init__(self, *names: str, **attrs: Any) -> None: + """Store params in private vars for use in add_argument.""" + self._attrs = attrs + self._short_opts: List[str] = [] + self._long_opts: List[str] = [] + try: + self.type = attrs["type"] + except KeyError: + pass + try: + # Attribute existence is tested in Config._processopt. + self.default = attrs["default"] + except KeyError: + pass + self._set_opt_strings(names) + dest: Optional[str] = attrs.get("dest") + if dest: + self.dest = dest + elif self._long_opts: + self.dest = self._long_opts[0][2:].replace("-", "_") + else: + try: + self.dest = self._short_opts[0][1:] + except IndexError as e: + self.dest = "???" # Needed for the error repr. + raise ArgumentError("need a long or short option", self) from e + + def names(self) -> List[str]: + return self._short_opts + self._long_opts + + def attrs(self) -> Mapping[str, Any]: + # Update any attributes set by processopt. + attrs = "default dest help".split() + attrs.append(self.dest) + for attr in attrs: + try: + self._attrs[attr] = getattr(self, attr) + except AttributeError: + pass + return self._attrs + + def _set_opt_strings(self, opts: Sequence[str]) -> None: + """Directly from optparse. + + Might not be necessary as this is passed to argparse later on. + """ + for opt in opts: + if len(opt) < 2: + raise ArgumentError( + "invalid option string %r: " + "must be at least two characters long" % opt, + self, + ) + elif len(opt) == 2: + if not (opt[0] == "-" and opt[1] != "-"): + raise ArgumentError( + "invalid short option string %r: " + "must be of the form -x, (x any non-dash char)" % opt, + self, + ) + self._short_opts.append(opt) + else: + if not (opt[0:2] == "--" and opt[2] != "-"): + raise ArgumentError( + "invalid long option string %r: " + "must start with --, followed by non-dash" % opt, + self, + ) + self._long_opts.append(opt) + + def __repr__(self) -> str: + args: List[str] = [] + if self._short_opts: + args += ["_short_opts: " + repr(self._short_opts)] + if self._long_opts: + args += ["_long_opts: " + repr(self._long_opts)] + args += ["dest: " + repr(self.dest)] + if hasattr(self, "type"): + args += ["type: " + repr(self.type)] + if hasattr(self, "default"): + args += ["default: " + repr(self.default)] + return "Argument({})".format(", ".join(args)) + + +class OptionGroup: + """A group of options shown in its own section.""" + + def __init__( + self, + name: str, + description: str = "", + parser: Optional[Parser] = None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self.name = name + self.description = description + self.options: List[Argument] = [] + self.parser = parser + + def addoption(self, *opts: str, **attrs: Any) -> None: + """Add an option to this group. + + If a shortened version of a long option is specified, it will + be suppressed in the help. ``addoption('--twowords', '--two-words')`` + results in help showing ``--two-words`` only, but ``--twowords`` gets + accepted **and** the automatic destination is in ``args.twowords``. + + :param opts: + Option names, can be short or long options. + :param attrs: + Same attributes as the argparse library's :meth:`add_argument() + ` function accepts. + """ + conflict = set(opts).intersection( + name for opt in self.options for name in opt.names() + ) + if conflict: + raise ValueError("option names %s already added" % conflict) + option = Argument(*opts, **attrs) + self._addoption_instance(option, shortupper=False) + + def _addoption(self, *opts: str, **attrs: Any) -> None: + option = Argument(*opts, **attrs) + self._addoption_instance(option, shortupper=True) + + def _addoption_instance(self, option: "Argument", shortupper: bool = False) -> None: + if not shortupper: + for opt in option._short_opts: + if opt[0] == "-" and opt[1].islower(): + raise ValueError("lowercase shortoptions reserved") + if self.parser: + self.parser.processoption(option) + self.options.append(option) + + +class MyOptionParser(argparse.ArgumentParser): + def __init__( + self, + parser: Parser, + extra_info: Optional[Dict[str, Any]] = None, + prog: Optional[str] = None, + ) -> None: + self._parser = parser + super().__init__( + prog=prog, + usage=parser._usage, + add_help=False, + formatter_class=DropShorterLongHelpFormatter, + allow_abbrev=False, + fromfile_prefix_chars="@", + ) + # extra_info is a dict of (param -> value) to display if there's + # an usage error to provide more contextual information to the user. + self.extra_info = extra_info if extra_info else {} + + def error(self, message: str) -> NoReturn: + """Transform argparse error message into UsageError.""" + msg = f"{self.prog}: error: {message}" + + if hasattr(self._parser, "_config_source_hint"): + msg = f"{msg} ({self._parser._config_source_hint})" + + raise UsageError(self.format_usage() + msg) + + # Type ignored because typeshed has a very complex type in the superclass. + def parse_args( # type: ignore + self, + args: Optional[Sequence[str]] = None, + namespace: Optional[argparse.Namespace] = None, + ) -> argparse.Namespace: + """Allow splitting of positional arguments.""" + parsed, unrecognized = self.parse_known_args(args, namespace) + if unrecognized: + for arg in unrecognized: + if arg and arg[0] == "-": + lines = ["unrecognized arguments: %s" % (" ".join(unrecognized))] + for k, v in sorted(self.extra_info.items()): + lines.append(f" {k}: {v}") + self.error("\n".join(lines)) + getattr(parsed, FILE_OR_DIR).extend(unrecognized) + return parsed + + if sys.version_info < (3, 9): # pragma: no cover + # Backport of https://github.com/python/cpython/pull/14316 so we can + # disable long --argument abbreviations without breaking short flags. + def _parse_optional( + self, arg_string: str + ) -> Optional[Tuple[Optional[argparse.Action], str, Optional[str]]]: + if not arg_string: + return None + if arg_string[0] not in self.prefix_chars: + return None + if arg_string in self._option_string_actions: + action = self._option_string_actions[arg_string] + return action, arg_string, None + if len(arg_string) == 1: + return None + if "=" in arg_string: + option_string, explicit_arg = arg_string.split("=", 1) + if option_string in self._option_string_actions: + action = self._option_string_actions[option_string] + return action, option_string, explicit_arg + if self.allow_abbrev or not arg_string.startswith("--"): + option_tuples = self._get_option_tuples(arg_string) + if len(option_tuples) > 1: + msg = gettext( + "ambiguous option: %(option)s could match %(matches)s" + ) + options = ", ".join(option for _, option, _ in option_tuples) + self.error(msg % {"option": arg_string, "matches": options}) + elif len(option_tuples) == 1: + (option_tuple,) = option_tuples + return option_tuple + if self._negative_number_matcher.match(arg_string): + if not self._has_negative_number_optionals: + return None + if " " in arg_string: + return None + return None, arg_string, None + + +class DropShorterLongHelpFormatter(argparse.HelpFormatter): + """Shorten help for long options that differ only in extra hyphens. + + - Collapse **long** options that are the same except for extra hyphens. + - Shortcut if there are only two options and one of them is a short one. + - Cache result on the action object as this is called at least 2 times. + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + # Use more accurate terminal width. + if "width" not in kwargs: + kwargs["width"] = _pytest._io.get_terminal_width() + super().__init__(*args, **kwargs) + + def _format_action_invocation(self, action: argparse.Action) -> str: + orgstr = super()._format_action_invocation(action) + if orgstr and orgstr[0] != "-": # only optional arguments + return orgstr + res: Optional[str] = getattr(action, "_formatted_action_invocation", None) + if res: + return res + options = orgstr.split(", ") + if len(options) == 2 and (len(options[0]) == 2 or len(options[1]) == 2): + # a shortcut for '-h, --help' or '--abc', '-a' + action._formatted_action_invocation = orgstr # type: ignore + return orgstr + return_list = [] + short_long: Dict[str, str] = {} + for option in options: + if len(option) == 2 or option[2] == " ": + continue + if not option.startswith("--"): + raise ArgumentError( + 'long optional argument without "--": [%s]' % (option), option + ) + xxoption = option[2:] + shortened = xxoption.replace("-", "") + if shortened not in short_long or len(short_long[shortened]) < len( + xxoption + ): + short_long[shortened] = xxoption + # now short_long has been filled out to the longest with dashes + # **and** we keep the right option ordering from add_argument + for option in options: + if len(option) == 2 or option[2] == " ": + return_list.append(option) + if option[2:] == short_long.get(option.replace("-", "")): + return_list.append(option.replace(" ", "=", 1)) + formatted_action_invocation = ", ".join(return_list) + action._formatted_action_invocation = formatted_action_invocation # type: ignore + return formatted_action_invocation + + def _split_lines(self, text, width): + """Wrap lines after splitting on original newlines. + + This allows to have explicit line breaks in the help text. + """ + import textwrap + + lines = [] + for line in text.splitlines(): + lines.extend(textwrap.wrap(line.strip(), width)) + return lines diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/config/compat.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/config/compat.py new file mode 100644 index 000000000..2856d85d1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/config/compat.py @@ -0,0 +1,85 @@ +from __future__ import annotations + +import functools +from pathlib import Path +from typing import Any +from typing import Mapping +import warnings + +import pluggy + +from ..compat import LEGACY_PATH +from ..compat import legacy_path +from ..deprecated import HOOK_LEGACY_PATH_ARG + + +# hookname: (Path, LEGACY_PATH) +imply_paths_hooks: Mapping[str, tuple[str, str]] = { + "pytest_ignore_collect": ("collection_path", "path"), + "pytest_collect_file": ("file_path", "path"), + "pytest_pycollect_makemodule": ("module_path", "path"), + "pytest_report_header": ("start_path", "startdir"), + "pytest_report_collectionfinish": ("start_path", "startdir"), +} + + +def _check_path(path: Path, fspath: LEGACY_PATH) -> None: + if Path(fspath) != path: + raise ValueError( + f"Path({fspath!r}) != {path!r}\n" + "if both path and fspath are given they need to be equal" + ) + + +class PathAwareHookProxy: + """ + this helper wraps around hook callers + until pluggy supports fixingcalls, this one will do + + it currently doesn't return full hook caller proxies for fixed hooks, + this may have to be changed later depending on bugs + """ + + def __init__(self, hook_relay: pluggy.HookRelay) -> None: + self._hook_relay = hook_relay + + def __dir__(self) -> list[str]: + return dir(self._hook_relay) + + def __getattr__(self, key: str) -> pluggy.HookCaller: + hook: pluggy.HookCaller = getattr(self._hook_relay, key) + if key not in imply_paths_hooks: + self.__dict__[key] = hook + return hook + else: + path_var, fspath_var = imply_paths_hooks[key] + + @functools.wraps(hook) + def fixed_hook(**kw: Any) -> Any: + path_value: Path | None = kw.pop(path_var, None) + fspath_value: LEGACY_PATH | None = kw.pop(fspath_var, None) + if fspath_value is not None: + warnings.warn( + HOOK_LEGACY_PATH_ARG.format( + pylib_path_arg=fspath_var, pathlib_path_arg=path_var + ), + stacklevel=2, + ) + if path_value is not None: + if fspath_value is not None: + _check_path(path_value, fspath_value) + else: + fspath_value = legacy_path(path_value) + else: + assert fspath_value is not None + path_value = Path(fspath_value) + + kw[path_var] = path_value + kw[fspath_var] = fspath_value + return hook(**kw) + + fixed_hook.name = hook.name # type: ignore[attr-defined] + fixed_hook.spec = hook.spec # type: ignore[attr-defined] + fixed_hook.__name__ = key + self.__dict__[key] = fixed_hook + return fixed_hook # type: ignore[return-value] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/config/exceptions.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/config/exceptions.py new file mode 100644 index 000000000..4031ea732 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/config/exceptions.py @@ -0,0 +1,11 @@ +from typing import final + + +@final +class UsageError(Exception): + """Error in pytest usage or invocation.""" + + +class PrintHelp(Exception): + """Raised when pytest should print its help to skip the rest of the + argument parsing and validation.""" diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/config/findpaths.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/config/findpaths.py new file mode 100644 index 000000000..9909376de --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/config/findpaths.py @@ -0,0 +1,231 @@ +import os +from pathlib import Path +import sys +from typing import Dict +from typing import Iterable +from typing import List +from typing import Optional +from typing import Sequence +from typing import Tuple +from typing import Union + +import iniconfig + +from .exceptions import UsageError +from _pytest.outcomes import fail +from _pytest.pathlib import absolutepath +from _pytest.pathlib import commonpath +from _pytest.pathlib import safe_exists + + +def _parse_ini_config(path: Path) -> iniconfig.IniConfig: + """Parse the given generic '.ini' file using legacy IniConfig parser, returning + the parsed object. + + Raise UsageError if the file cannot be parsed. + """ + try: + return iniconfig.IniConfig(str(path)) + except iniconfig.ParseError as exc: + raise UsageError(str(exc)) from exc + + +def load_config_dict_from_file( + filepath: Path, +) -> Optional[Dict[str, Union[str, List[str]]]]: + """Load pytest configuration from the given file path, if supported. + + Return None if the file does not contain valid pytest configuration. + """ + # Configuration from ini files are obtained from the [pytest] section, if present. + if filepath.suffix == ".ini": + iniconfig = _parse_ini_config(filepath) + + if "pytest" in iniconfig: + return dict(iniconfig["pytest"].items()) + else: + # "pytest.ini" files are always the source of configuration, even if empty. + if filepath.name == "pytest.ini": + return {} + + # '.cfg' files are considered if they contain a "[tool:pytest]" section. + elif filepath.suffix == ".cfg": + iniconfig = _parse_ini_config(filepath) + + if "tool:pytest" in iniconfig.sections: + return dict(iniconfig["tool:pytest"].items()) + elif "pytest" in iniconfig.sections: + # If a setup.cfg contains a "[pytest]" section, we raise a failure to indicate users that + # plain "[pytest]" sections in setup.cfg files is no longer supported (#3086). + fail(CFG_PYTEST_SECTION.format(filename="setup.cfg"), pytrace=False) + + # '.toml' files are considered if they contain a [tool.pytest.ini_options] table. + elif filepath.suffix == ".toml": + if sys.version_info >= (3, 11): + import tomllib + else: + import tomli as tomllib + + toml_text = filepath.read_text(encoding="utf-8") + try: + config = tomllib.loads(toml_text) + except tomllib.TOMLDecodeError as exc: + raise UsageError(f"{filepath}: {exc}") from exc + + result = config.get("tool", {}).get("pytest", {}).get("ini_options", None) + if result is not None: + # TOML supports richer data types than ini files (strings, arrays, floats, ints, etc), + # however we need to convert all scalar values to str for compatibility with the rest + # of the configuration system, which expects strings only. + def make_scalar(v: object) -> Union[str, List[str]]: + return v if isinstance(v, list) else str(v) + + return {k: make_scalar(v) for k, v in result.items()} + + return None + + +def locate_config( + invocation_dir: Path, + args: Iterable[Path], +) -> Tuple[Optional[Path], Optional[Path], Dict[str, Union[str, List[str]]]]: + """Search in the list of arguments for a valid ini-file for pytest, + and return a tuple of (rootdir, inifile, cfg-dict).""" + config_names = [ + "pytest.ini", + ".pytest.ini", + "pyproject.toml", + "tox.ini", + "setup.cfg", + ] + args = [x for x in args if not str(x).startswith("-")] + if not args: + args = [invocation_dir] + found_pyproject_toml: Optional[Path] = None + for arg in args: + argpath = absolutepath(arg) + for base in (argpath, *argpath.parents): + for config_name in config_names: + p = base / config_name + if p.is_file(): + if p.name == "pyproject.toml" and found_pyproject_toml is None: + found_pyproject_toml = p + ini_config = load_config_dict_from_file(p) + if ini_config is not None: + return base, p, ini_config + if found_pyproject_toml is not None: + return found_pyproject_toml.parent, found_pyproject_toml, {} + return None, None, {} + + +def get_common_ancestor( + invocation_dir: Path, + paths: Iterable[Path], +) -> Path: + common_ancestor: Optional[Path] = None + for path in paths: + if not path.exists(): + continue + if common_ancestor is None: + common_ancestor = path + else: + if common_ancestor in path.parents or path == common_ancestor: + continue + elif path in common_ancestor.parents: + common_ancestor = path + else: + shared = commonpath(path, common_ancestor) + if shared is not None: + common_ancestor = shared + if common_ancestor is None: + common_ancestor = invocation_dir + elif common_ancestor.is_file(): + common_ancestor = common_ancestor.parent + return common_ancestor + + +def get_dirs_from_args(args: Iterable[str]) -> List[Path]: + def is_option(x: str) -> bool: + return x.startswith("-") + + def get_file_part_from_node_id(x: str) -> str: + return x.split("::")[0] + + def get_dir_from_path(path: Path) -> Path: + if path.is_dir(): + return path + return path.parent + + # These look like paths but may not exist + possible_paths = ( + absolutepath(get_file_part_from_node_id(arg)) + for arg in args + if not is_option(arg) + ) + + return [get_dir_from_path(path) for path in possible_paths if safe_exists(path)] + + +CFG_PYTEST_SECTION = "[pytest] section in {filename} files is no longer supported, change to [tool:pytest] instead." + + +def determine_setup( + *, + inifile: Optional[str], + args: Sequence[str], + rootdir_cmd_arg: Optional[str], + invocation_dir: Path, +) -> Tuple[Path, Optional[Path], Dict[str, Union[str, List[str]]]]: + """Determine the rootdir, inifile and ini configuration values from the + command line arguments. + + :param inifile: + The `--inifile` command line argument, if given. + :param args: + The free command line arguments. + :param rootdir_cmd_arg: + The `--rootdir` command line argument, if given. + :param invocation_dir: + The working directory when pytest was invoked. + """ + rootdir = None + dirs = get_dirs_from_args(args) + if inifile: + inipath_ = absolutepath(inifile) + inipath: Optional[Path] = inipath_ + inicfg = load_config_dict_from_file(inipath_) or {} + if rootdir_cmd_arg is None: + rootdir = inipath_.parent + else: + ancestor = get_common_ancestor(invocation_dir, dirs) + rootdir, inipath, inicfg = locate_config(invocation_dir, [ancestor]) + if rootdir is None and rootdir_cmd_arg is None: + for possible_rootdir in (ancestor, *ancestor.parents): + if (possible_rootdir / "setup.py").is_file(): + rootdir = possible_rootdir + break + else: + if dirs != [ancestor]: + rootdir, inipath, inicfg = locate_config(invocation_dir, dirs) + if rootdir is None: + rootdir = get_common_ancestor( + invocation_dir, [invocation_dir, ancestor] + ) + if is_fs_root(rootdir): + rootdir = ancestor + if rootdir_cmd_arg: + rootdir = absolutepath(os.path.expandvars(rootdir_cmd_arg)) + if not rootdir.is_dir(): + raise UsageError( + f"Directory '{rootdir}' not found. Check your '--rootdir' option." + ) + assert rootdir is not None + return rootdir, inipath, inicfg or {} + + +def is_fs_root(p: Path) -> bool: + r""" + Return True if the given path is pointing to the root of the + file system ("/" on Unix and "C:\\" on Windows for example). + """ + return os.path.splitdrive(str(p))[1] == os.sep diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/debugging.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/debugging.py new file mode 100644 index 000000000..6ed0c5c7a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/debugging.py @@ -0,0 +1,392 @@ +# mypy: allow-untyped-defs +"""Interactive debugging with PDB, the Python Debugger.""" + +import argparse +import functools +import sys +import types +from typing import Any +from typing import Callable +from typing import Generator +from typing import List +from typing import Optional +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import Union +import unittest + +from _pytest import outcomes +from _pytest._code import ExceptionInfo +from _pytest.config import Config +from _pytest.config import ConftestImportFailure +from _pytest.config import hookimpl +from _pytest.config import PytestPluginManager +from _pytest.config.argparsing import Parser +from _pytest.config.exceptions import UsageError +from _pytest.nodes import Node +from _pytest.reports import BaseReport + + +if TYPE_CHECKING: + from _pytest.capture import CaptureManager + from _pytest.runner import CallInfo + + +def _validate_usepdb_cls(value: str) -> Tuple[str, str]: + """Validate syntax of --pdbcls option.""" + try: + modname, classname = value.split(":") + except ValueError as e: + raise argparse.ArgumentTypeError( + f"{value!r} is not in the format 'modname:classname'" + ) from e + return (modname, classname) + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group._addoption( + "--pdb", + dest="usepdb", + action="store_true", + help="Start the interactive Python debugger on errors or KeyboardInterrupt", + ) + group._addoption( + "--pdbcls", + dest="usepdb_cls", + metavar="modulename:classname", + type=_validate_usepdb_cls, + help="Specify a custom interactive Python debugger for use with --pdb." + "For example: --pdbcls=IPython.terminal.debugger:TerminalPdb", + ) + group._addoption( + "--trace", + dest="trace", + action="store_true", + help="Immediately break when running each test", + ) + + +def pytest_configure(config: Config) -> None: + import pdb + + if config.getvalue("trace"): + config.pluginmanager.register(PdbTrace(), "pdbtrace") + if config.getvalue("usepdb"): + config.pluginmanager.register(PdbInvoke(), "pdbinvoke") + + pytestPDB._saved.append( + (pdb.set_trace, pytestPDB._pluginmanager, pytestPDB._config) + ) + pdb.set_trace = pytestPDB.set_trace + pytestPDB._pluginmanager = config.pluginmanager + pytestPDB._config = config + + # NOTE: not using pytest_unconfigure, since it might get called although + # pytest_configure was not (if another plugin raises UsageError). + def fin() -> None: + ( + pdb.set_trace, + pytestPDB._pluginmanager, + pytestPDB._config, + ) = pytestPDB._saved.pop() + + config.add_cleanup(fin) + + +class pytestPDB: + """Pseudo PDB that defers to the real pdb.""" + + _pluginmanager: Optional[PytestPluginManager] = None + _config: Optional[Config] = None + _saved: List[ + Tuple[Callable[..., None], Optional[PytestPluginManager], Optional[Config]] + ] = [] + _recursive_debug = 0 + _wrapped_pdb_cls: Optional[Tuple[Type[Any], Type[Any]]] = None + + @classmethod + def _is_capturing(cls, capman: Optional["CaptureManager"]) -> Union[str, bool]: + if capman: + return capman.is_capturing() + return False + + @classmethod + def _import_pdb_cls(cls, capman: Optional["CaptureManager"]): + if not cls._config: + import pdb + + # Happens when using pytest.set_trace outside of a test. + return pdb.Pdb + + usepdb_cls = cls._config.getvalue("usepdb_cls") + + if cls._wrapped_pdb_cls and cls._wrapped_pdb_cls[0] == usepdb_cls: + return cls._wrapped_pdb_cls[1] + + if usepdb_cls: + modname, classname = usepdb_cls + + try: + __import__(modname) + mod = sys.modules[modname] + + # Handle --pdbcls=pdb:pdb.Pdb (useful e.g. with pdbpp). + parts = classname.split(".") + pdb_cls = getattr(mod, parts[0]) + for part in parts[1:]: + pdb_cls = getattr(pdb_cls, part) + except Exception as exc: + value = ":".join((modname, classname)) + raise UsageError( + f"--pdbcls: could not import {value!r}: {exc}" + ) from exc + else: + import pdb + + pdb_cls = pdb.Pdb + + wrapped_cls = cls._get_pdb_wrapper_class(pdb_cls, capman) + cls._wrapped_pdb_cls = (usepdb_cls, wrapped_cls) + return wrapped_cls + + @classmethod + def _get_pdb_wrapper_class(cls, pdb_cls, capman: Optional["CaptureManager"]): + import _pytest.config + + class PytestPdbWrapper(pdb_cls): + _pytest_capman = capman + _continued = False + + def do_debug(self, arg): + cls._recursive_debug += 1 + ret = super().do_debug(arg) + cls._recursive_debug -= 1 + return ret + + def do_continue(self, arg): + ret = super().do_continue(arg) + if cls._recursive_debug == 0: + assert cls._config is not None + tw = _pytest.config.create_terminal_writer(cls._config) + tw.line() + + capman = self._pytest_capman + capturing = pytestPDB._is_capturing(capman) + if capturing: + if capturing == "global": + tw.sep(">", "PDB continue (IO-capturing resumed)") + else: + tw.sep( + ">", + "PDB continue (IO-capturing resumed for %s)" + % capturing, + ) + assert capman is not None + capman.resume() + else: + tw.sep(">", "PDB continue") + assert cls._pluginmanager is not None + cls._pluginmanager.hook.pytest_leave_pdb(config=cls._config, pdb=self) + self._continued = True + return ret + + do_c = do_cont = do_continue + + def do_quit(self, arg): + """Raise Exit outcome when quit command is used in pdb. + + This is a bit of a hack - it would be better if BdbQuit + could be handled, but this would require to wrap the + whole pytest run, and adjust the report etc. + """ + ret = super().do_quit(arg) + + if cls._recursive_debug == 0: + outcomes.exit("Quitting debugger") + + return ret + + do_q = do_quit + do_exit = do_quit + + def setup(self, f, tb): + """Suspend on setup(). + + Needed after do_continue resumed, and entering another + breakpoint again. + """ + ret = super().setup(f, tb) + if not ret and self._continued: + # pdb.setup() returns True if the command wants to exit + # from the interaction: do not suspend capturing then. + if self._pytest_capman: + self._pytest_capman.suspend_global_capture(in_=True) + return ret + + def get_stack(self, f, t): + stack, i = super().get_stack(f, t) + if f is None: + # Find last non-hidden frame. + i = max(0, len(stack) - 1) + while i and stack[i][0].f_locals.get("__tracebackhide__", False): + i -= 1 + return stack, i + + return PytestPdbWrapper + + @classmethod + def _init_pdb(cls, method, *args, **kwargs): + """Initialize PDB debugging, dropping any IO capturing.""" + import _pytest.config + + if cls._pluginmanager is None: + capman: Optional[CaptureManager] = None + else: + capman = cls._pluginmanager.getplugin("capturemanager") + if capman: + capman.suspend(in_=True) + + if cls._config: + tw = _pytest.config.create_terminal_writer(cls._config) + tw.line() + + if cls._recursive_debug == 0: + # Handle header similar to pdb.set_trace in py37+. + header = kwargs.pop("header", None) + if header is not None: + tw.sep(">", header) + else: + capturing = cls._is_capturing(capman) + if capturing == "global": + tw.sep(">", f"PDB {method} (IO-capturing turned off)") + elif capturing: + tw.sep( + ">", + f"PDB {method} (IO-capturing turned off for {capturing})", + ) + else: + tw.sep(">", f"PDB {method}") + + _pdb = cls._import_pdb_cls(capman)(**kwargs) + + if cls._pluginmanager: + cls._pluginmanager.hook.pytest_enter_pdb(config=cls._config, pdb=_pdb) + return _pdb + + @classmethod + def set_trace(cls, *args, **kwargs) -> None: + """Invoke debugging via ``Pdb.set_trace``, dropping any IO capturing.""" + frame = sys._getframe().f_back + _pdb = cls._init_pdb("set_trace", *args, **kwargs) + _pdb.set_trace(frame) + + +class PdbInvoke: + def pytest_exception_interact( + self, node: Node, call: "CallInfo[Any]", report: BaseReport + ) -> None: + capman = node.config.pluginmanager.getplugin("capturemanager") + if capman: + capman.suspend_global_capture(in_=True) + out, err = capman.read_global_capture() + sys.stdout.write(out) + sys.stdout.write(err) + assert call.excinfo is not None + + if not isinstance(call.excinfo.value, unittest.SkipTest): + _enter_pdb(node, call.excinfo, report) + + def pytest_internalerror(self, excinfo: ExceptionInfo[BaseException]) -> None: + tb = _postmortem_traceback(excinfo) + post_mortem(tb) + + +class PdbTrace: + @hookimpl(wrapper=True) + def pytest_pyfunc_call(self, pyfuncitem) -> Generator[None, object, object]: + wrap_pytest_function_for_tracing(pyfuncitem) + return (yield) + + +def wrap_pytest_function_for_tracing(pyfuncitem): + """Change the Python function object of the given Function item by a + wrapper which actually enters pdb before calling the python function + itself, effectively leaving the user in the pdb prompt in the first + statement of the function.""" + _pdb = pytestPDB._init_pdb("runcall") + testfunction = pyfuncitem.obj + + # we can't just return `partial(pdb.runcall, testfunction)` because (on + # python < 3.7.4) runcall's first param is `func`, which means we'd get + # an exception if one of the kwargs to testfunction was called `func`. + @functools.wraps(testfunction) + def wrapper(*args, **kwargs): + func = functools.partial(testfunction, *args, **kwargs) + _pdb.runcall(func) + + pyfuncitem.obj = wrapper + + +def maybe_wrap_pytest_function_for_tracing(pyfuncitem): + """Wrap the given pytestfunct item for tracing support if --trace was given in + the command line.""" + if pyfuncitem.config.getvalue("trace"): + wrap_pytest_function_for_tracing(pyfuncitem) + + +def _enter_pdb( + node: Node, excinfo: ExceptionInfo[BaseException], rep: BaseReport +) -> BaseReport: + # XXX we re-use the TerminalReporter's terminalwriter + # because this seems to avoid some encoding related troubles + # for not completely clear reasons. + tw = node.config.pluginmanager.getplugin("terminalreporter")._tw + tw.line() + + showcapture = node.config.option.showcapture + + for sectionname, content in ( + ("stdout", rep.capstdout), + ("stderr", rep.capstderr), + ("log", rep.caplog), + ): + if showcapture in (sectionname, "all") and content: + tw.sep(">", "captured " + sectionname) + if content[-1:] == "\n": + content = content[:-1] + tw.line(content) + + tw.sep(">", "traceback") + rep.toterminal(tw) + tw.sep(">", "entering PDB") + tb = _postmortem_traceback(excinfo) + rep._pdbshown = True # type: ignore[attr-defined] + post_mortem(tb) + return rep + + +def _postmortem_traceback(excinfo: ExceptionInfo[BaseException]) -> types.TracebackType: + from doctest import UnexpectedException + + if isinstance(excinfo.value, UnexpectedException): + # A doctest.UnexpectedException is not useful for post_mortem. + # Use the underlying exception instead: + return excinfo.value.exc_info[2] + elif isinstance(excinfo.value, ConftestImportFailure): + # A config.ConftestImportFailure is not useful for post_mortem. + # Use the underlying exception instead: + assert excinfo.value.cause.__traceback__ is not None + return excinfo.value.cause.__traceback__ + else: + assert excinfo._excinfo is not None + return excinfo._excinfo[2] + + +def post_mortem(t: types.TracebackType) -> None: + p = pytestPDB._init_pdb("post_mortem") + p.reset() + p.interaction(None, t) + if p.quitting: + outcomes.exit("Quitting debugger") diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/deprecated.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/deprecated.py new file mode 100644 index 000000000..10811d158 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/deprecated.py @@ -0,0 +1,89 @@ +"""Deprecation messages and bits of code used elsewhere in the codebase that +is planned to be removed in the next pytest release. + +Keeping it in a central location makes it easy to track what is deprecated and should +be removed when the time comes. + +All constants defined in this module should be either instances of +:class:`PytestWarning`, or :class:`UnformattedWarning` +in case of warnings which need to format their messages. +""" + +from warnings import warn + +from _pytest.warning_types import PytestDeprecationWarning +from _pytest.warning_types import PytestRemovedIn9Warning +from _pytest.warning_types import UnformattedWarning + + +# set of plugins which have been integrated into the core; we use this list to ignore +# them during registration to avoid conflicts +DEPRECATED_EXTERNAL_PLUGINS = { + "pytest_catchlog", + "pytest_capturelog", + "pytest_faulthandler", +} + + +# This can be* removed pytest 8, but it's harmless and common, so no rush to remove. +# * If you're in the future: "could have been". +YIELD_FIXTURE = PytestDeprecationWarning( + "@pytest.yield_fixture is deprecated.\n" + "Use @pytest.fixture instead; they are the same." +) + +# This deprecation is never really meant to be removed. +PRIVATE = PytestDeprecationWarning("A private pytest class or function was used.") + + +HOOK_LEGACY_PATH_ARG = UnformattedWarning( + PytestRemovedIn9Warning, + "The ({pylib_path_arg}: py.path.local) argument is deprecated, please use ({pathlib_path_arg}: pathlib.Path)\n" + "see https://docs.pytest.org/en/latest/deprecations.html" + "#py-path-local-arguments-for-hooks-replaced-with-pathlib-path", +) + +NODE_CTOR_FSPATH_ARG = UnformattedWarning( + PytestRemovedIn9Warning, + "The (fspath: py.path.local) argument to {node_type_name} is deprecated. " + "Please use the (path: pathlib.Path) argument instead.\n" + "See https://docs.pytest.org/en/latest/deprecations.html" + "#fspath-argument-for-node-constructors-replaced-with-pathlib-path", +) + +HOOK_LEGACY_MARKING = UnformattedWarning( + PytestDeprecationWarning, + "The hook{type} {fullname} uses old-style configuration options (marks or attributes).\n" + "Please use the pytest.hook{type}({hook_opts}) decorator instead\n" + " to configure the hooks.\n" + " See https://docs.pytest.org/en/latest/deprecations.html" + "#configuring-hook-specs-impls-using-markers", +) + +MARKED_FIXTURE = PytestRemovedIn9Warning( + "Marks applied to fixtures have no effect\n" + "See docs: https://docs.pytest.org/en/stable/deprecations.html#applying-a-mark-to-a-fixture-function" +) + +# You want to make some `__init__` or function "private". +# +# def my_private_function(some, args): +# ... +# +# Do this: +# +# def my_private_function(some, args, *, _ispytest: bool = False): +# check_ispytest(_ispytest) +# ... +# +# Change all internal/allowed calls to +# +# my_private_function(some, args, _ispytest=True) +# +# All other calls will get the default _ispytest=False and trigger +# the warning (possibly error in the future). + + +def check_ispytest(ispytest: bool) -> None: + if not ispytest: + warn(PRIVATE, stacklevel=3) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/doctest.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/doctest.py new file mode 100644 index 000000000..7fff99f37 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/doctest.py @@ -0,0 +1,752 @@ +# mypy: allow-untyped-defs +"""Discover and run doctests in modules and test files.""" + +import bdb +from contextlib import contextmanager +import functools +import inspect +import os +from pathlib import Path +import platform +import sys +import traceback +import types +from typing import Any +from typing import Callable +from typing import Dict +from typing import Generator +from typing import Iterable +from typing import List +from typing import Optional +from typing import Pattern +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import Union +import warnings + +from _pytest import outcomes +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import ReprFileLocation +from _pytest._code.code import TerminalRepr +from _pytest._io import TerminalWriter +from _pytest.compat import safe_getattr +from _pytest.config import Config +from _pytest.config.argparsing import Parser +from _pytest.fixtures import fixture +from _pytest.fixtures import TopRequest +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.outcomes import OutcomeException +from _pytest.outcomes import skip +from _pytest.pathlib import fnmatch_ex +from _pytest.python import Module +from _pytest.python_api import approx +from _pytest.warning_types import PytestWarning + + +if TYPE_CHECKING: + import doctest + from typing import Self + +DOCTEST_REPORT_CHOICE_NONE = "none" +DOCTEST_REPORT_CHOICE_CDIFF = "cdiff" +DOCTEST_REPORT_CHOICE_NDIFF = "ndiff" +DOCTEST_REPORT_CHOICE_UDIFF = "udiff" +DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE = "only_first_failure" + +DOCTEST_REPORT_CHOICES = ( + DOCTEST_REPORT_CHOICE_NONE, + DOCTEST_REPORT_CHOICE_CDIFF, + DOCTEST_REPORT_CHOICE_NDIFF, + DOCTEST_REPORT_CHOICE_UDIFF, + DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE, +) + +# Lazy definition of runner class +RUNNER_CLASS = None +# Lazy definition of output checker class +CHECKER_CLASS: Optional[Type["doctest.OutputChecker"]] = None + + +def pytest_addoption(parser: Parser) -> None: + parser.addini( + "doctest_optionflags", + "Option flags for doctests", + type="args", + default=["ELLIPSIS"], + ) + parser.addini( + "doctest_encoding", "Encoding used for doctest files", default="utf-8" + ) + group = parser.getgroup("collect") + group.addoption( + "--doctest-modules", + action="store_true", + default=False, + help="Run doctests in all .py modules", + dest="doctestmodules", + ) + group.addoption( + "--doctest-report", + type=str.lower, + default="udiff", + help="Choose another output format for diffs on doctest failure", + choices=DOCTEST_REPORT_CHOICES, + dest="doctestreport", + ) + group.addoption( + "--doctest-glob", + action="append", + default=[], + metavar="pat", + help="Doctests file matching pattern, default: test*.txt", + dest="doctestglob", + ) + group.addoption( + "--doctest-ignore-import-errors", + action="store_true", + default=False, + help="Ignore doctest collection errors", + dest="doctest_ignore_import_errors", + ) + group.addoption( + "--doctest-continue-on-failure", + action="store_true", + default=False, + help="For a given doctest, continue to run after the first failure", + dest="doctest_continue_on_failure", + ) + + +def pytest_unconfigure() -> None: + global RUNNER_CLASS + + RUNNER_CLASS = None + + +def pytest_collect_file( + file_path: Path, + parent: Collector, +) -> Optional[Union["DoctestModule", "DoctestTextfile"]]: + config = parent.config + if file_path.suffix == ".py": + if config.option.doctestmodules and not any( + (_is_setup_py(file_path), _is_main_py(file_path)) + ): + return DoctestModule.from_parent(parent, path=file_path) + elif _is_doctest(config, file_path, parent): + return DoctestTextfile.from_parent(parent, path=file_path) + return None + + +def _is_setup_py(path: Path) -> bool: + if path.name != "setup.py": + return False + contents = path.read_bytes() + return b"setuptools" in contents or b"distutils" in contents + + +def _is_doctest(config: Config, path: Path, parent: Collector) -> bool: + if path.suffix in (".txt", ".rst") and parent.session.isinitpath(path): + return True + globs = config.getoption("doctestglob") or ["test*.txt"] + return any(fnmatch_ex(glob, path) for glob in globs) + + +def _is_main_py(path: Path) -> bool: + return path.name == "__main__.py" + + +class ReprFailDoctest(TerminalRepr): + def __init__( + self, reprlocation_lines: Sequence[Tuple[ReprFileLocation, Sequence[str]]] + ) -> None: + self.reprlocation_lines = reprlocation_lines + + def toterminal(self, tw: TerminalWriter) -> None: + for reprlocation, lines in self.reprlocation_lines: + for line in lines: + tw.line(line) + reprlocation.toterminal(tw) + + +class MultipleDoctestFailures(Exception): + def __init__(self, failures: Sequence["doctest.DocTestFailure"]) -> None: + super().__init__() + self.failures = failures + + +def _init_runner_class() -> Type["doctest.DocTestRunner"]: + import doctest + + class PytestDoctestRunner(doctest.DebugRunner): + """Runner to collect failures. + + Note that the out variable in this case is a list instead of a + stdout-like object. + """ + + def __init__( + self, + checker: Optional["doctest.OutputChecker"] = None, + verbose: Optional[bool] = None, + optionflags: int = 0, + continue_on_failure: bool = True, + ) -> None: + super().__init__(checker=checker, verbose=verbose, optionflags=optionflags) + self.continue_on_failure = continue_on_failure + + def report_failure( + self, + out, + test: "doctest.DocTest", + example: "doctest.Example", + got: str, + ) -> None: + failure = doctest.DocTestFailure(test, example, got) + if self.continue_on_failure: + out.append(failure) + else: + raise failure + + def report_unexpected_exception( + self, + out, + test: "doctest.DocTest", + example: "doctest.Example", + exc_info: Tuple[Type[BaseException], BaseException, types.TracebackType], + ) -> None: + if isinstance(exc_info[1], OutcomeException): + raise exc_info[1] + if isinstance(exc_info[1], bdb.BdbQuit): + outcomes.exit("Quitting debugger") + failure = doctest.UnexpectedException(test, example, exc_info) + if self.continue_on_failure: + out.append(failure) + else: + raise failure + + return PytestDoctestRunner + + +def _get_runner( + checker: Optional["doctest.OutputChecker"] = None, + verbose: Optional[bool] = None, + optionflags: int = 0, + continue_on_failure: bool = True, +) -> "doctest.DocTestRunner": + # We need this in order to do a lazy import on doctest + global RUNNER_CLASS + if RUNNER_CLASS is None: + RUNNER_CLASS = _init_runner_class() + # Type ignored because the continue_on_failure argument is only defined on + # PytestDoctestRunner, which is lazily defined so can't be used as a type. + return RUNNER_CLASS( # type: ignore + checker=checker, + verbose=verbose, + optionflags=optionflags, + continue_on_failure=continue_on_failure, + ) + + +class DoctestItem(Item): + def __init__( + self, + name: str, + parent: "Union[DoctestTextfile, DoctestModule]", + runner: "doctest.DocTestRunner", + dtest: "doctest.DocTest", + ) -> None: + super().__init__(name, parent) + self.runner = runner + self.dtest = dtest + + # Stuff needed for fixture support. + self.obj = None + fm = self.session._fixturemanager + fixtureinfo = fm.getfixtureinfo(node=self, func=None, cls=None) + self._fixtureinfo = fixtureinfo + self.fixturenames = fixtureinfo.names_closure + self._initrequest() + + @classmethod + def from_parent( # type: ignore[override] + cls, + parent: "Union[DoctestTextfile, DoctestModule]", + *, + name: str, + runner: "doctest.DocTestRunner", + dtest: "doctest.DocTest", + ) -> "Self": + # incompatible signature due to imposed limits on subclass + """The public named constructor.""" + return super().from_parent(name=name, parent=parent, runner=runner, dtest=dtest) + + def _initrequest(self) -> None: + self.funcargs: Dict[str, object] = {} + self._request = TopRequest(self, _ispytest=True) # type: ignore[arg-type] + + def setup(self) -> None: + self._request._fillfixtures() + globs = dict(getfixture=self._request.getfixturevalue) + for name, value in self._request.getfixturevalue("doctest_namespace").items(): + globs[name] = value + self.dtest.globs.update(globs) + + def runtest(self) -> None: + _check_all_skipped(self.dtest) + self._disable_output_capturing_for_darwin() + failures: List["doctest.DocTestFailure"] = [] + # Type ignored because we change the type of `out` from what + # doctest expects. + self.runner.run(self.dtest, out=failures) # type: ignore[arg-type] + if failures: + raise MultipleDoctestFailures(failures) + + def _disable_output_capturing_for_darwin(self) -> None: + """Disable output capturing. Otherwise, stdout is lost to doctest (#985).""" + if platform.system() != "Darwin": + return + capman = self.config.pluginmanager.getplugin("capturemanager") + if capman: + capman.suspend_global_capture(in_=True) + out, err = capman.read_global_capture() + sys.stdout.write(out) + sys.stderr.write(err) + + # TODO: Type ignored -- breaks Liskov Substitution. + def repr_failure( # type: ignore[override] + self, + excinfo: ExceptionInfo[BaseException], + ) -> Union[str, TerminalRepr]: + import doctest + + failures: Optional[ + Sequence[Union[doctest.DocTestFailure, doctest.UnexpectedException]] + ] = None + if isinstance( + excinfo.value, (doctest.DocTestFailure, doctest.UnexpectedException) + ): + failures = [excinfo.value] + elif isinstance(excinfo.value, MultipleDoctestFailures): + failures = excinfo.value.failures + + if failures is None: + return super().repr_failure(excinfo) + + reprlocation_lines = [] + for failure in failures: + example = failure.example + test = failure.test + filename = test.filename + if test.lineno is None: + lineno = None + else: + lineno = test.lineno + example.lineno + 1 + message = type(failure).__name__ + # TODO: ReprFileLocation doesn't expect a None lineno. + reprlocation = ReprFileLocation(filename, lineno, message) # type: ignore[arg-type] + checker = _get_checker() + report_choice = _get_report_choice(self.config.getoption("doctestreport")) + if lineno is not None: + assert failure.test.docstring is not None + lines = failure.test.docstring.splitlines(False) + # add line numbers to the left of the error message + assert test.lineno is not None + lines = [ + "%03d %s" % (i + test.lineno + 1, x) for (i, x) in enumerate(lines) + ] + # trim docstring error lines to 10 + lines = lines[max(example.lineno - 9, 0) : example.lineno + 1] + else: + lines = [ + "EXAMPLE LOCATION UNKNOWN, not showing all tests of that example" + ] + indent = ">>>" + for line in example.source.splitlines(): + lines.append(f"??? {indent} {line}") + indent = "..." + if isinstance(failure, doctest.DocTestFailure): + lines += checker.output_difference( + example, failure.got, report_choice + ).split("\n") + else: + inner_excinfo = ExceptionInfo.from_exc_info(failure.exc_info) + lines += ["UNEXPECTED EXCEPTION: %s" % repr(inner_excinfo.value)] + lines += [ + x.strip("\n") for x in traceback.format_exception(*failure.exc_info) + ] + reprlocation_lines.append((reprlocation, lines)) + return ReprFailDoctest(reprlocation_lines) + + def reportinfo(self) -> Tuple[Union["os.PathLike[str]", str], Optional[int], str]: + return self.path, self.dtest.lineno, "[doctest] %s" % self.name + + +def _get_flag_lookup() -> Dict[str, int]: + import doctest + + return dict( + DONT_ACCEPT_TRUE_FOR_1=doctest.DONT_ACCEPT_TRUE_FOR_1, + DONT_ACCEPT_BLANKLINE=doctest.DONT_ACCEPT_BLANKLINE, + NORMALIZE_WHITESPACE=doctest.NORMALIZE_WHITESPACE, + ELLIPSIS=doctest.ELLIPSIS, + IGNORE_EXCEPTION_DETAIL=doctest.IGNORE_EXCEPTION_DETAIL, + COMPARISON_FLAGS=doctest.COMPARISON_FLAGS, + ALLOW_UNICODE=_get_allow_unicode_flag(), + ALLOW_BYTES=_get_allow_bytes_flag(), + NUMBER=_get_number_flag(), + ) + + +def get_optionflags(config: Config) -> int: + optionflags_str = config.getini("doctest_optionflags") + flag_lookup_table = _get_flag_lookup() + flag_acc = 0 + for flag in optionflags_str: + flag_acc |= flag_lookup_table[flag] + return flag_acc + + +def _get_continue_on_failure(config: Config) -> bool: + continue_on_failure: bool = config.getvalue("doctest_continue_on_failure") + if continue_on_failure: + # We need to turn off this if we use pdb since we should stop at + # the first failure. + if config.getvalue("usepdb"): + continue_on_failure = False + return continue_on_failure + + +class DoctestTextfile(Module): + obj = None + + def collect(self) -> Iterable[DoctestItem]: + import doctest + + # Inspired by doctest.testfile; ideally we would use it directly, + # but it doesn't support passing a custom checker. + encoding = self.config.getini("doctest_encoding") + text = self.path.read_text(encoding) + filename = str(self.path) + name = self.path.name + globs = {"__name__": "__main__"} + + optionflags = get_optionflags(self.config) + + runner = _get_runner( + verbose=False, + optionflags=optionflags, + checker=_get_checker(), + continue_on_failure=_get_continue_on_failure(self.config), + ) + + parser = doctest.DocTestParser() + test = parser.get_doctest(text, globs, name, filename, 0) + if test.examples: + yield DoctestItem.from_parent( + self, name=test.name, runner=runner, dtest=test + ) + + +def _check_all_skipped(test: "doctest.DocTest") -> None: + """Raise pytest.skip() if all examples in the given DocTest have the SKIP + option set.""" + import doctest + + all_skipped = all(x.options.get(doctest.SKIP, False) for x in test.examples) + if all_skipped: + skip("all tests skipped by +SKIP option") + + +def _is_mocked(obj: object) -> bool: + """Return if an object is possibly a mock object by checking the + existence of a highly improbable attribute.""" + return ( + safe_getattr(obj, "pytest_mock_example_attribute_that_shouldnt_exist", None) + is not None + ) + + +@contextmanager +def _patch_unwrap_mock_aware() -> Generator[None, None, None]: + """Context manager which replaces ``inspect.unwrap`` with a version + that's aware of mock objects and doesn't recurse into them.""" + real_unwrap = inspect.unwrap + + def _mock_aware_unwrap( + func: Callable[..., Any], *, stop: Optional[Callable[[Any], Any]] = None + ) -> Any: + try: + if stop is None or stop is _is_mocked: + return real_unwrap(func, stop=_is_mocked) + _stop = stop + return real_unwrap(func, stop=lambda obj: _is_mocked(obj) or _stop(func)) + except Exception as e: + warnings.warn( + f"Got {e!r} when unwrapping {func!r}. This is usually caused " + "by a violation of Python's object protocol; see e.g. " + "https://github.com/pytest-dev/pytest/issues/5080", + PytestWarning, + ) + raise + + inspect.unwrap = _mock_aware_unwrap + try: + yield + finally: + inspect.unwrap = real_unwrap + + +class DoctestModule(Module): + def collect(self) -> Iterable[DoctestItem]: + import doctest + + class MockAwareDocTestFinder(doctest.DocTestFinder): + """A hackish doctest finder that overrides stdlib internals to fix a stdlib bug. + + https://github.com/pytest-dev/pytest/issues/3456 + https://bugs.python.org/issue25532 + """ + + def _find_lineno(self, obj, source_lines): + """Doctest code does not take into account `@property`, this + is a hackish way to fix it. https://bugs.python.org/issue17446 + + Wrapped Doctests will need to be unwrapped so the correct + line number is returned. This will be reported upstream. #8796 + """ + if isinstance(obj, property): + obj = getattr(obj, "fget", obj) + + if hasattr(obj, "__wrapped__"): + # Get the main obj in case of it being wrapped + obj = inspect.unwrap(obj) + + # Type ignored because this is a private function. + return super()._find_lineno( # type:ignore[misc] + obj, + source_lines, + ) + + def _find( + self, tests, obj, name, module, source_lines, globs, seen + ) -> None: + if _is_mocked(obj): + return + with _patch_unwrap_mock_aware(): + # Type ignored because this is a private function. + super()._find( # type:ignore[misc] + tests, obj, name, module, source_lines, globs, seen + ) + + if sys.version_info < (3, 13): + + def _from_module(self, module, object): + """`cached_property` objects are never considered a part + of the 'current module'. As such they are skipped by doctest. + Here we override `_from_module` to check the underlying + function instead. https://github.com/python/cpython/issues/107995 + """ + if isinstance(object, functools.cached_property): + object = object.func + + # Type ignored because this is a private function. + return super()._from_module(module, object) # type: ignore[misc] + + else: # pragma: no cover + pass + + try: + module = self.obj + except Collector.CollectError: + if self.config.getvalue("doctest_ignore_import_errors"): + skip("unable to import module %r" % self.path) + else: + raise + + # While doctests currently don't support fixtures directly, we still + # need to pick up autouse fixtures. + self.session._fixturemanager.parsefactories(self) + + # Uses internal doctest module parsing mechanism. + finder = MockAwareDocTestFinder() + optionflags = get_optionflags(self.config) + runner = _get_runner( + verbose=False, + optionflags=optionflags, + checker=_get_checker(), + continue_on_failure=_get_continue_on_failure(self.config), + ) + + for test in finder.find(module, module.__name__): + if test.examples: # skip empty doctests + yield DoctestItem.from_parent( + self, name=test.name, runner=runner, dtest=test + ) + + +def _init_checker_class() -> Type["doctest.OutputChecker"]: + import doctest + import re + + class LiteralsOutputChecker(doctest.OutputChecker): + # Based on doctest_nose_plugin.py from the nltk project + # (https://github.com/nltk/nltk) and on the "numtest" doctest extension + # by Sebastien Boisgerault (https://github.com/boisgera/numtest). + + _unicode_literal_re = re.compile(r"(\W|^)[uU]([rR]?[\'\"])", re.UNICODE) + _bytes_literal_re = re.compile(r"(\W|^)[bB]([rR]?[\'\"])", re.UNICODE) + _number_re = re.compile( + r""" + (?P + (?P + (?P [+-]?\d*)\.(?P\d+) + | + (?P [+-]?\d+)\. + ) + (?: + [Ee] + (?P [+-]?\d+) + )? + | + (?P [+-]?\d+) + (?: + [Ee] + (?P [+-]?\d+) + ) + ) + """, + re.VERBOSE, + ) + + def check_output(self, want: str, got: str, optionflags: int) -> bool: + if super().check_output(want, got, optionflags): + return True + + allow_unicode = optionflags & _get_allow_unicode_flag() + allow_bytes = optionflags & _get_allow_bytes_flag() + allow_number = optionflags & _get_number_flag() + + if not allow_unicode and not allow_bytes and not allow_number: + return False + + def remove_prefixes(regex: Pattern[str], txt: str) -> str: + return re.sub(regex, r"\1\2", txt) + + if allow_unicode: + want = remove_prefixes(self._unicode_literal_re, want) + got = remove_prefixes(self._unicode_literal_re, got) + + if allow_bytes: + want = remove_prefixes(self._bytes_literal_re, want) + got = remove_prefixes(self._bytes_literal_re, got) + + if allow_number: + got = self._remove_unwanted_precision(want, got) + + return super().check_output(want, got, optionflags) + + def _remove_unwanted_precision(self, want: str, got: str) -> str: + wants = list(self._number_re.finditer(want)) + gots = list(self._number_re.finditer(got)) + if len(wants) != len(gots): + return got + offset = 0 + for w, g in zip(wants, gots): + fraction: Optional[str] = w.group("fraction") + exponent: Optional[str] = w.group("exponent1") + if exponent is None: + exponent = w.group("exponent2") + precision = 0 if fraction is None else len(fraction) + if exponent is not None: + precision -= int(exponent) + if float(w.group()) == approx(float(g.group()), abs=10**-precision): + # They're close enough. Replace the text we actually + # got with the text we want, so that it will match when we + # check the string literally. + got = ( + got[: g.start() + offset] + w.group() + got[g.end() + offset :] + ) + offset += w.end() - w.start() - (g.end() - g.start()) + return got + + return LiteralsOutputChecker + + +def _get_checker() -> "doctest.OutputChecker": + """Return a doctest.OutputChecker subclass that supports some + additional options: + + * ALLOW_UNICODE and ALLOW_BYTES options to ignore u'' and b'' + prefixes (respectively) in string literals. Useful when the same + doctest should run in Python 2 and Python 3. + + * NUMBER to ignore floating-point differences smaller than the + precision of the literal number in the doctest. + + An inner class is used to avoid importing "doctest" at the module + level. + """ + global CHECKER_CLASS + if CHECKER_CLASS is None: + CHECKER_CLASS = _init_checker_class() + return CHECKER_CLASS() + + +def _get_allow_unicode_flag() -> int: + """Register and return the ALLOW_UNICODE flag.""" + import doctest + + return doctest.register_optionflag("ALLOW_UNICODE") + + +def _get_allow_bytes_flag() -> int: + """Register and return the ALLOW_BYTES flag.""" + import doctest + + return doctest.register_optionflag("ALLOW_BYTES") + + +def _get_number_flag() -> int: + """Register and return the NUMBER flag.""" + import doctest + + return doctest.register_optionflag("NUMBER") + + +def _get_report_choice(key: str) -> int: + """Return the actual `doctest` module flag value. + + We want to do it as late as possible to avoid importing `doctest` and all + its dependencies when parsing options, as it adds overhead and breaks tests. + """ + import doctest + + return { + DOCTEST_REPORT_CHOICE_UDIFF: doctest.REPORT_UDIFF, + DOCTEST_REPORT_CHOICE_CDIFF: doctest.REPORT_CDIFF, + DOCTEST_REPORT_CHOICE_NDIFF: doctest.REPORT_NDIFF, + DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE: doctest.REPORT_ONLY_FIRST_FAILURE, + DOCTEST_REPORT_CHOICE_NONE: 0, + }[key] + + +@fixture(scope="session") +def doctest_namespace() -> Dict[str, Any]: + """Fixture that returns a :py:class:`dict` that will be injected into the + namespace of doctests. + + Usually this fixture is used in conjunction with another ``autouse`` fixture: + + .. code-block:: python + + @pytest.fixture(autouse=True) + def add_np(doctest_namespace): + doctest_namespace["np"] = numpy + + For more details: :ref:`doctest_namespace`. + """ + return dict() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/faulthandler.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/faulthandler.py new file mode 100644 index 000000000..083bcb837 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/faulthandler.py @@ -0,0 +1,102 @@ +import os +import sys +from typing import Generator + +from _pytest.config import Config +from _pytest.config.argparsing import Parser +from _pytest.nodes import Item +from _pytest.stash import StashKey +import pytest + + +fault_handler_original_stderr_fd_key = StashKey[int]() +fault_handler_stderr_fd_key = StashKey[int]() + + +def pytest_addoption(parser: Parser) -> None: + help = ( + "Dump the traceback of all threads if a test takes " + "more than TIMEOUT seconds to finish" + ) + parser.addini("faulthandler_timeout", help, default=0.0) + + +def pytest_configure(config: Config) -> None: + import faulthandler + + # at teardown we want to restore the original faulthandler fileno + # but faulthandler has no api to return the original fileno + # so here we stash the stderr fileno to be used at teardown + # sys.stderr and sys.__stderr__ may be closed or patched during the session + # so we can't rely on their values being good at that point (#11572). + stderr_fileno = get_stderr_fileno() + if faulthandler.is_enabled(): + config.stash[fault_handler_original_stderr_fd_key] = stderr_fileno + config.stash[fault_handler_stderr_fd_key] = os.dup(stderr_fileno) + faulthandler.enable(file=config.stash[fault_handler_stderr_fd_key]) + + +def pytest_unconfigure(config: Config) -> None: + import faulthandler + + faulthandler.disable() + # Close the dup file installed during pytest_configure. + if fault_handler_stderr_fd_key in config.stash: + os.close(config.stash[fault_handler_stderr_fd_key]) + del config.stash[fault_handler_stderr_fd_key] + # Re-enable the faulthandler if it was originally enabled. + if fault_handler_original_stderr_fd_key in config.stash: + faulthandler.enable(config.stash[fault_handler_original_stderr_fd_key]) + del config.stash[fault_handler_original_stderr_fd_key] + + +def get_stderr_fileno() -> int: + try: + fileno = sys.stderr.fileno() + # The Twisted Logger will return an invalid file descriptor since it is not backed + # by an FD. So, let's also forward this to the same code path as with pytest-xdist. + if fileno == -1: + raise AttributeError() + return fileno + except (AttributeError, ValueError): + # pytest-xdist monkeypatches sys.stderr with an object that is not an actual file. + # https://docs.python.org/3/library/faulthandler.html#issue-with-file-descriptors + # This is potentially dangerous, but the best we can do. + return sys.__stderr__.fileno() + + +def get_timeout_config_value(config: Config) -> float: + return float(config.getini("faulthandler_timeout") or 0.0) + + +@pytest.hookimpl(wrapper=True, trylast=True) +def pytest_runtest_protocol(item: Item) -> Generator[None, object, object]: + timeout = get_timeout_config_value(item.config) + if timeout > 0: + import faulthandler + + stderr = item.config.stash[fault_handler_stderr_fd_key] + faulthandler.dump_traceback_later(timeout, file=stderr) + try: + return (yield) + finally: + faulthandler.cancel_dump_traceback_later() + else: + return (yield) + + +@pytest.hookimpl(tryfirst=True) +def pytest_enter_pdb() -> None: + """Cancel any traceback dumping due to timeout before entering pdb.""" + import faulthandler + + faulthandler.cancel_dump_traceback_later() + + +@pytest.hookimpl(tryfirst=True) +def pytest_exception_interact() -> None: + """Cancel any traceback dumping due to an interactive exception being + raised.""" + import faulthandler + + faulthandler.cancel_dump_traceback_later() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/fixtures.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/fixtures.py new file mode 100644 index 000000000..09fd07422 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/fixtures.py @@ -0,0 +1,1912 @@ +# mypy: allow-untyped-defs +import abc +from collections import defaultdict +from collections import deque +import dataclasses +import functools +import inspect +import os +from pathlib import Path +import sys +from typing import AbstractSet +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import Final +from typing import final +from typing import Generator +from typing import Generic +from typing import Iterable +from typing import Iterator +from typing import List +from typing import MutableMapping +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union +import warnings + +import _pytest +from _pytest import nodes +from _pytest._code import getfslineno +from _pytest._code import Source +from _pytest._code.code import FormattedExcinfo +from _pytest._code.code import TerminalRepr +from _pytest._io import TerminalWriter +from _pytest.compat import _PytestWrapper +from _pytest.compat import assert_never +from _pytest.compat import get_real_func +from _pytest.compat import get_real_method +from _pytest.compat import getfuncargnames +from _pytest.compat import getimfunc +from _pytest.compat import getlocation +from _pytest.compat import is_generator +from _pytest.compat import NOTSET +from _pytest.compat import NotSetType +from _pytest.compat import safe_getattr +from _pytest.config import _PluggyPlugin +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.deprecated import MARKED_FIXTURE +from _pytest.deprecated import YIELD_FIXTURE +from _pytest.mark import Mark +from _pytest.mark import ParameterSet +from _pytest.mark.structures import MarkDecorator +from _pytest.outcomes import fail +from _pytest.outcomes import skip +from _pytest.outcomes import TEST_OUTCOME +from _pytest.pathlib import absolutepath +from _pytest.pathlib import bestrelpath +from _pytest.scope import _ScopeName +from _pytest.scope import HIGH_SCOPES +from _pytest.scope import Scope + + +if sys.version_info < (3, 11): + from exceptiongroup import BaseExceptionGroup + + +if TYPE_CHECKING: + from typing import Deque + + from _pytest.main import Session + from _pytest.python import CallSpec2 + from _pytest.python import Function + from _pytest.python import Metafunc + + +# The value of the fixture -- return/yield of the fixture function (type variable). +FixtureValue = TypeVar("FixtureValue") +# The type of the fixture function (type variable). +FixtureFunction = TypeVar("FixtureFunction", bound=Callable[..., object]) +# The type of a fixture function (type alias generic in fixture value). +_FixtureFunc = Union[ + Callable[..., FixtureValue], Callable[..., Generator[FixtureValue, None, None]] +] +# The type of FixtureDef.cached_result (type alias generic in fixture value). +_FixtureCachedResult = Union[ + Tuple[ + # The result. + FixtureValue, + # Cache key. + object, + None, + ], + Tuple[ + None, + # Cache key. + object, + # Exception if raised. + BaseException, + ], +] + + +@dataclasses.dataclass(frozen=True) +class PseudoFixtureDef(Generic[FixtureValue]): + cached_result: "_FixtureCachedResult[FixtureValue]" + _scope: Scope + + +def pytest_sessionstart(session: "Session") -> None: + session._fixturemanager = FixtureManager(session) + + +def get_scope_package( + node: nodes.Item, + fixturedef: "FixtureDef[object]", +) -> Optional[nodes.Node]: + from _pytest.python import Package + + for parent in node.iter_parents(): + if isinstance(parent, Package) and parent.nodeid == fixturedef.baseid: + return parent + return node.session + + +def get_scope_node(node: nodes.Node, scope: Scope) -> Optional[nodes.Node]: + import _pytest.python + + if scope is Scope.Function: + # Type ignored because this is actually safe, see: + # https://github.com/python/mypy/issues/4717 + return node.getparent(nodes.Item) # type: ignore[type-abstract] + elif scope is Scope.Class: + return node.getparent(_pytest.python.Class) + elif scope is Scope.Module: + return node.getparent(_pytest.python.Module) + elif scope is Scope.Package: + return node.getparent(_pytest.python.Package) + elif scope is Scope.Session: + return node.getparent(_pytest.main.Session) + else: + assert_never(scope) + + +def getfixturemarker(obj: object) -> Optional["FixtureFunctionMarker"]: + """Return fixturemarker or None if it doesn't exist or raised + exceptions.""" + return cast( + Optional[FixtureFunctionMarker], + safe_getattr(obj, "_pytestfixturefunction", None), + ) + + +@dataclasses.dataclass(frozen=True) +class FixtureArgKey: + argname: str + param_index: int + scoped_item_path: Optional[Path] + item_cls: Optional[type] + + +def get_parametrized_fixture_keys( + item: nodes.Item, scope: Scope +) -> Iterator[FixtureArgKey]: + """Return list of keys for all parametrized arguments which match + the specified scope.""" + assert scope is not Scope.Function + try: + callspec: CallSpec2 = item.callspec # type: ignore[attr-defined] + except AttributeError: + return + for argname in callspec.indices: + if callspec._arg2scope[argname] != scope: + continue + + item_cls = None + if scope is Scope.Session: + scoped_item_path = None + elif scope is Scope.Package: + scoped_item_path = item.path + elif scope is Scope.Module: + scoped_item_path = item.path + elif scope is Scope.Class: + scoped_item_path = item.path + item_cls = item.cls # type: ignore[attr-defined] + else: + assert_never(scope) + + param_index = callspec.indices[argname] + yield FixtureArgKey(argname, param_index, scoped_item_path, item_cls) + + +# Algorithm for sorting on a per-parametrized resource setup basis. +# It is called for Session scope first and performs sorting +# down to the lower scopes such as to minimize number of "high scope" +# setups and teardowns. + + +def reorder_items(items: Sequence[nodes.Item]) -> List[nodes.Item]: + argkeys_cache: Dict[Scope, Dict[nodes.Item, Dict[FixtureArgKey, None]]] = {} + items_by_argkey: Dict[Scope, Dict[FixtureArgKey, Deque[nodes.Item]]] = {} + for scope in HIGH_SCOPES: + scoped_argkeys_cache = argkeys_cache[scope] = {} + scoped_items_by_argkey = items_by_argkey[scope] = defaultdict(deque) + for item in items: + keys = dict.fromkeys(get_parametrized_fixture_keys(item, scope), None) + if keys: + scoped_argkeys_cache[item] = keys + for key in keys: + scoped_items_by_argkey[key].append(item) + items_dict = dict.fromkeys(items, None) + return list( + reorder_items_atscope(items_dict, argkeys_cache, items_by_argkey, Scope.Session) + ) + + +def fix_cache_order( + item: nodes.Item, + argkeys_cache: Dict[Scope, Dict[nodes.Item, Dict[FixtureArgKey, None]]], + items_by_argkey: Dict[Scope, Dict[FixtureArgKey, "Deque[nodes.Item]"]], +) -> None: + for scope in HIGH_SCOPES: + for key in argkeys_cache[scope].get(item, []): + items_by_argkey[scope][key].appendleft(item) + + +def reorder_items_atscope( + items: Dict[nodes.Item, None], + argkeys_cache: Dict[Scope, Dict[nodes.Item, Dict[FixtureArgKey, None]]], + items_by_argkey: Dict[Scope, Dict[FixtureArgKey, "Deque[nodes.Item]"]], + scope: Scope, +) -> Dict[nodes.Item, None]: + if scope is Scope.Function or len(items) < 3: + return items + ignore: Set[Optional[FixtureArgKey]] = set() + items_deque = deque(items) + items_done: Dict[nodes.Item, None] = {} + scoped_items_by_argkey = items_by_argkey[scope] + scoped_argkeys_cache = argkeys_cache[scope] + while items_deque: + no_argkey_group: Dict[nodes.Item, None] = {} + slicing_argkey = None + while items_deque: + item = items_deque.popleft() + if item in items_done or item in no_argkey_group: + continue + argkeys = dict.fromkeys( + (k for k in scoped_argkeys_cache.get(item, []) if k not in ignore), None + ) + if not argkeys: + no_argkey_group[item] = None + else: + slicing_argkey, _ = argkeys.popitem() + # We don't have to remove relevant items from later in the + # deque because they'll just be ignored. + matching_items = [ + i for i in scoped_items_by_argkey[slicing_argkey] if i in items + ] + for i in reversed(matching_items): + fix_cache_order(i, argkeys_cache, items_by_argkey) + items_deque.appendleft(i) + break + if no_argkey_group: + no_argkey_group = reorder_items_atscope( + no_argkey_group, argkeys_cache, items_by_argkey, scope.next_lower() + ) + for item in no_argkey_group: + items_done[item] = None + ignore.add(slicing_argkey) + return items_done + + +@dataclasses.dataclass(frozen=True) +class FuncFixtureInfo: + """Fixture-related information for a fixture-requesting item (e.g. test + function). + + This is used to examine the fixtures which an item requests statically + (known during collection). This includes autouse fixtures, fixtures + requested by the `usefixtures` marker, fixtures requested in the function + parameters, and the transitive closure of these. + + An item may also request fixtures dynamically (using `request.getfixturevalue`); + these are not reflected here. + """ + + __slots__ = ("argnames", "initialnames", "names_closure", "name2fixturedefs") + + # Fixture names that the item requests directly by function parameters. + argnames: Tuple[str, ...] + # Fixture names that the item immediately requires. These include + # argnames + fixture names specified via usefixtures and via autouse=True in + # fixture definitions. + initialnames: Tuple[str, ...] + # The transitive closure of the fixture names that the item requires. + # Note: can't include dynamic dependencies (`request.getfixturevalue` calls). + names_closure: List[str] + # A map from a fixture name in the transitive closure to the FixtureDefs + # matching the name which are applicable to this function. + # There may be multiple overriding fixtures with the same name. The + # sequence is ordered from furthest to closes to the function. + name2fixturedefs: Dict[str, Sequence["FixtureDef[Any]"]] + + def prune_dependency_tree(self) -> None: + """Recompute names_closure from initialnames and name2fixturedefs. + + Can only reduce names_closure, which means that the new closure will + always be a subset of the old one. The order is preserved. + + This method is needed because direct parametrization may shadow some + of the fixtures that were included in the originally built dependency + tree. In this way the dependency tree can get pruned, and the closure + of argnames may get reduced. + """ + closure: Set[str] = set() + working_set = set(self.initialnames) + while working_set: + argname = working_set.pop() + # Argname may be smth not included in the original names_closure, + # in which case we ignore it. This currently happens with pseudo + # FixtureDefs which wrap 'get_direct_param_fixture_func(request)'. + # So they introduce the new dependency 'request' which might have + # been missing in the original tree (closure). + if argname not in closure and argname in self.names_closure: + closure.add(argname) + if argname in self.name2fixturedefs: + working_set.update(self.name2fixturedefs[argname][-1].argnames) + + self.names_closure[:] = sorted(closure, key=self.names_closure.index) + + +class FixtureRequest(abc.ABC): + """The type of the ``request`` fixture. + + A request object gives access to the requesting test context and has a + ``param`` attribute in case the fixture is parametrized. + """ + + def __init__( + self, + pyfuncitem: "Function", + fixturename: Optional[str], + arg2fixturedefs: Dict[str, Sequence["FixtureDef[Any]"]], + fixture_defs: Dict[str, "FixtureDef[Any]"], + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + #: Fixture for which this request is being performed. + self.fixturename: Final = fixturename + self._pyfuncitem: Final = pyfuncitem + # The FixtureDefs for each fixture name requested by this item. + # Starts from the statically-known fixturedefs resolved during + # collection. Dynamically requested fixtures (using + # `request.getfixturevalue("foo")`) are added dynamically. + self._arg2fixturedefs: Final = arg2fixturedefs + # The evaluated argnames so far, mapping to the FixtureDef they resolved + # to. + self._fixture_defs: Final = fixture_defs + # Notes on the type of `param`: + # -`request.param` is only defined in parametrized fixtures, and will raise + # AttributeError otherwise. Python typing has no notion of "undefined", so + # this cannot be reflected in the type. + # - Technically `param` is only (possibly) defined on SubRequest, not + # FixtureRequest, but the typing of that is still in flux so this cheats. + # - In the future we might consider using a generic for the param type, but + # for now just using Any. + self.param: Any + + @property + def _fixturemanager(self) -> "FixtureManager": + return self._pyfuncitem.session._fixturemanager + + @property + @abc.abstractmethod + def _scope(self) -> Scope: + raise NotImplementedError() + + @property + def scope(self) -> _ScopeName: + """Scope string, one of "function", "class", "module", "package", "session".""" + return self._scope.value + + @abc.abstractmethod + def _check_scope( + self, + requested_fixturedef: Union["FixtureDef[object]", PseudoFixtureDef[object]], + requested_scope: Scope, + ) -> None: + raise NotImplementedError() + + @property + def fixturenames(self) -> List[str]: + """Names of all active fixtures in this request.""" + result = list(self._pyfuncitem.fixturenames) + result.extend(set(self._fixture_defs).difference(result)) + return result + + @property + @abc.abstractmethod + def node(self): + """Underlying collection node (depends on current request scope).""" + raise NotImplementedError() + + @property + def config(self) -> Config: + """The pytest config object associated with this request.""" + return self._pyfuncitem.config + + @property + def function(self): + """Test function object if the request has a per-function scope.""" + if self.scope != "function": + raise AttributeError( + f"function not available in {self.scope}-scoped context" + ) + return self._pyfuncitem.obj + + @property + def cls(self): + """Class (can be None) where the test function was collected.""" + if self.scope not in ("class", "function"): + raise AttributeError(f"cls not available in {self.scope}-scoped context") + clscol = self._pyfuncitem.getparent(_pytest.python.Class) + if clscol: + return clscol.obj + + @property + def instance(self): + """Instance (can be None) on which test function was collected.""" + if self.scope != "function": + return None + return getattr(self._pyfuncitem, "instance", None) + + @property + def module(self): + """Python module object where the test function was collected.""" + if self.scope not in ("function", "class", "module"): + raise AttributeError(f"module not available in {self.scope}-scoped context") + mod = self._pyfuncitem.getparent(_pytest.python.Module) + assert mod is not None + return mod.obj + + @property + def path(self) -> Path: + """Path where the test function was collected.""" + if self.scope not in ("function", "class", "module", "package"): + raise AttributeError(f"path not available in {self.scope}-scoped context") + return self._pyfuncitem.path + + @property + def keywords(self) -> MutableMapping[str, Any]: + """Keywords/markers dictionary for the underlying node.""" + node: nodes.Node = self.node + return node.keywords + + @property + def session(self) -> "Session": + """Pytest session object.""" + return self._pyfuncitem.session + + @abc.abstractmethod + def addfinalizer(self, finalizer: Callable[[], object]) -> None: + """Add finalizer/teardown function to be called without arguments after + the last test within the requesting test context finished execution.""" + raise NotImplementedError() + + def applymarker(self, marker: Union[str, MarkDecorator]) -> None: + """Apply a marker to a single test function invocation. + + This method is useful if you don't want to have a keyword/marker + on all function invocations. + + :param marker: + An object created by a call to ``pytest.mark.NAME(...)``. + """ + self.node.add_marker(marker) + + def raiseerror(self, msg: Optional[str]) -> NoReturn: + """Raise a FixtureLookupError exception. + + :param msg: + An optional custom error message. + """ + raise FixtureLookupError(None, self, msg) + + def getfixturevalue(self, argname: str) -> Any: + """Dynamically run a named fixture function. + + Declaring fixtures via function argument is recommended where possible. + But if you can only decide whether to use another fixture at test + setup time, you may use this function to retrieve it inside a fixture + or test function body. + + This method can be used during the test setup phase or the test run + phase, but during the test teardown phase a fixture's value may not + be available. + + :param argname: + The fixture name. + :raises pytest.FixtureLookupError: + If the given fixture could not be found. + """ + # Note that in addition to the use case described in the docstring, + # getfixturevalue() is also called by pytest itself during item and fixture + # setup to evaluate the fixtures that are requested statically + # (using function parameters, autouse, etc). + + fixturedef = self._get_active_fixturedef(argname) + assert fixturedef.cached_result is not None, ( + f'The fixture value for "{argname}" is not available. ' + "This can happen when the fixture has already been torn down." + ) + return fixturedef.cached_result[0] + + def _iter_chain(self) -> Iterator["SubRequest"]: + """Yield all SubRequests in the chain, from self up. + + Note: does *not* yield the TopRequest. + """ + current = self + while isinstance(current, SubRequest): + yield current + current = current._parent_request + + def _get_active_fixturedef( + self, argname: str + ) -> Union["FixtureDef[object]", PseudoFixtureDef[object]]: + if argname == "request": + cached_result = (self, [0], None) + return PseudoFixtureDef(cached_result, Scope.Function) + + # If we already finished computing a fixture by this name in this item, + # return it. + fixturedef = self._fixture_defs.get(argname) + if fixturedef is not None: + self._check_scope(fixturedef, fixturedef._scope) + return fixturedef + + # Find the appropriate fixturedef. + fixturedefs = self._arg2fixturedefs.get(argname, None) + if fixturedefs is None: + # We arrive here because of a dynamic call to + # getfixturevalue(argname) which was naturally + # not known at parsing/collection time. + fixturedefs = self._fixturemanager.getfixturedefs(argname, self._pyfuncitem) + if fixturedefs is not None: + self._arg2fixturedefs[argname] = fixturedefs + # No fixtures defined with this name. + if fixturedefs is None: + raise FixtureLookupError(argname, self) + # The are no fixtures with this name applicable for the function. + if not fixturedefs: + raise FixtureLookupError(argname, self) + # A fixture may override another fixture with the same name, e.g. a + # fixture in a module can override a fixture in a conftest, a fixture in + # a class can override a fixture in the module, and so on. + # An overriding fixture can request its own name (possibly indirectly); + # in this case it gets the value of the fixture it overrides, one level + # up. + # Check how many `argname`s deep we are, and take the next one. + # `fixturedefs` is sorted from furthest to closest, so use negative + # indexing to go in reverse. + index = -1 + for request in self._iter_chain(): + if request.fixturename == argname: + index -= 1 + # If already consumed all of the available levels, fail. + if -index > len(fixturedefs): + raise FixtureLookupError(argname, self) + fixturedef = fixturedefs[index] + + # Prepare a SubRequest object for calling the fixture. + try: + callspec = self._pyfuncitem.callspec + except AttributeError: + callspec = None + if callspec is not None and argname in callspec.params: + param = callspec.params[argname] + param_index = callspec.indices[argname] + # The parametrize invocation scope overrides the fixture's scope. + scope = callspec._arg2scope[argname] + else: + param = NOTSET + param_index = 0 + scope = fixturedef._scope + self._check_fixturedef_without_param(fixturedef) + self._check_scope(fixturedef, scope) + subrequest = SubRequest( + self, scope, param, param_index, fixturedef, _ispytest=True + ) + + # Make sure the fixture value is cached, running it if it isn't + fixturedef.execute(request=subrequest) + + self._fixture_defs[argname] = fixturedef + return fixturedef + + def _check_fixturedef_without_param(self, fixturedef: "FixtureDef[object]") -> None: + """Check that this request is allowed to execute this fixturedef without + a param.""" + funcitem = self._pyfuncitem + has_params = fixturedef.params is not None + fixtures_not_supported = getattr(funcitem, "nofuncargs", False) + if has_params and fixtures_not_supported: + msg = ( + f"{funcitem.name} does not support fixtures, maybe unittest.TestCase subclass?\n" + f"Node id: {funcitem.nodeid}\n" + f"Function type: {type(funcitem).__name__}" + ) + fail(msg, pytrace=False) + if has_params: + frame = inspect.stack()[3] + frameinfo = inspect.getframeinfo(frame[0]) + source_path = absolutepath(frameinfo.filename) + source_lineno = frameinfo.lineno + try: + source_path_str = str(source_path.relative_to(funcitem.config.rootpath)) + except ValueError: + source_path_str = str(source_path) + location = getlocation(fixturedef.func, funcitem.config.rootpath) + msg = ( + "The requested fixture has no parameter defined for test:\n" + f" {funcitem.nodeid}\n\n" + f"Requested fixture '{fixturedef.argname}' defined in:\n" + f"{location}\n\n" + f"Requested here:\n" + f"{source_path_str}:{source_lineno}" + ) + fail(msg, pytrace=False) + + def _get_fixturestack(self) -> List["FixtureDef[Any]"]: + values = [request._fixturedef for request in self._iter_chain()] + values.reverse() + return values + + +@final +class TopRequest(FixtureRequest): + """The type of the ``request`` fixture in a test function.""" + + def __init__(self, pyfuncitem: "Function", *, _ispytest: bool = False) -> None: + super().__init__( + fixturename=None, + pyfuncitem=pyfuncitem, + arg2fixturedefs=pyfuncitem._fixtureinfo.name2fixturedefs.copy(), + fixture_defs={}, + _ispytest=_ispytest, + ) + + @property + def _scope(self) -> Scope: + return Scope.Function + + def _check_scope( + self, + requested_fixturedef: Union["FixtureDef[object]", PseudoFixtureDef[object]], + requested_scope: Scope, + ) -> None: + # TopRequest always has function scope so always valid. + pass + + @property + def node(self): + return self._pyfuncitem + + def __repr__(self) -> str: + return "" % (self.node) + + def _fillfixtures(self) -> None: + item = self._pyfuncitem + for argname in item.fixturenames: + if argname not in item.funcargs: + item.funcargs[argname] = self.getfixturevalue(argname) + + def addfinalizer(self, finalizer: Callable[[], object]) -> None: + self.node.addfinalizer(finalizer) + + +@final +class SubRequest(FixtureRequest): + """The type of the ``request`` fixture in a fixture function requested + (transitively) by a test function.""" + + def __init__( + self, + request: FixtureRequest, + scope: Scope, + param: Any, + param_index: int, + fixturedef: "FixtureDef[object]", + *, + _ispytest: bool = False, + ) -> None: + super().__init__( + pyfuncitem=request._pyfuncitem, + fixturename=fixturedef.argname, + fixture_defs=request._fixture_defs, + arg2fixturedefs=request._arg2fixturedefs, + _ispytest=_ispytest, + ) + self._parent_request: Final[FixtureRequest] = request + self._scope_field: Final = scope + self._fixturedef: Final[FixtureDef[object]] = fixturedef + if param is not NOTSET: + self.param = param + self.param_index: Final = param_index + + def __repr__(self) -> str: + return f"" + + @property + def _scope(self) -> Scope: + return self._scope_field + + @property + def node(self): + scope = self._scope + if scope is Scope.Function: + # This might also be a non-function Item despite its attribute name. + node: Optional[nodes.Node] = self._pyfuncitem + elif scope is Scope.Package: + node = get_scope_package(self._pyfuncitem, self._fixturedef) + else: + node = get_scope_node(self._pyfuncitem, scope) + if node is None and scope is Scope.Class: + # Fallback to function item itself. + node = self._pyfuncitem + assert node, f'Could not obtain a node for scope "{scope}" for function {self._pyfuncitem!r}' + return node + + def _check_scope( + self, + requested_fixturedef: Union["FixtureDef[object]", PseudoFixtureDef[object]], + requested_scope: Scope, + ) -> None: + if isinstance(requested_fixturedef, PseudoFixtureDef): + return + if self._scope > requested_scope: + # Try to report something helpful. + argname = requested_fixturedef.argname + fixture_stack = "\n".join( + self._format_fixturedef_line(fixturedef) + for fixturedef in self._get_fixturestack() + ) + requested_fixture = self._format_fixturedef_line(requested_fixturedef) + fail( + f"ScopeMismatch: You tried to access the {requested_scope.value} scoped " + f"fixture {argname} with a {self._scope.value} scoped request object. " + f"Requesting fixture stack:\n{fixture_stack}\n" + f"Requested fixture:\n{requested_fixture}", + pytrace=False, + ) + + def _format_fixturedef_line(self, fixturedef: "FixtureDef[object]") -> str: + factory = fixturedef.func + path, lineno = getfslineno(factory) + if isinstance(path, Path): + path = bestrelpath(self._pyfuncitem.session.path, path) + signature = inspect.signature(factory) + return f"{path}:{lineno + 1}: def {factory.__name__}{signature}" + + def addfinalizer(self, finalizer: Callable[[], object]) -> None: + self._fixturedef.addfinalizer(finalizer) + + +@final +class FixtureLookupError(LookupError): + """Could not return a requested fixture (missing or invalid).""" + + def __init__( + self, argname: Optional[str], request: FixtureRequest, msg: Optional[str] = None + ) -> None: + self.argname = argname + self.request = request + self.fixturestack = request._get_fixturestack() + self.msg = msg + + def formatrepr(self) -> "FixtureLookupErrorRepr": + tblines: List[str] = [] + addline = tblines.append + stack = [self.request._pyfuncitem.obj] + stack.extend(map(lambda x: x.func, self.fixturestack)) + msg = self.msg + if msg is not None: + # The last fixture raise an error, let's present + # it at the requesting side. + stack = stack[:-1] + for function in stack: + fspath, lineno = getfslineno(function) + try: + lines, _ = inspect.getsourcelines(get_real_func(function)) + except (OSError, IndexError, TypeError): + error_msg = "file %s, line %s: source code not available" + addline(error_msg % (fspath, lineno + 1)) + else: + addline(f"file {fspath}, line {lineno + 1}") + for i, line in enumerate(lines): + line = line.rstrip() + addline(" " + line) + if line.lstrip().startswith("def"): + break + + if msg is None: + fm = self.request._fixturemanager + available = set() + parent = self.request._pyfuncitem.parent + assert parent is not None + for name, fixturedefs in fm._arg2fixturedefs.items(): + faclist = list(fm._matchfactories(fixturedefs, parent)) + if faclist: + available.add(name) + if self.argname in available: + msg = ( + f" recursive dependency involving fixture '{self.argname}' detected" + ) + else: + msg = f"fixture '{self.argname}' not found" + msg += "\n available fixtures: {}".format(", ".join(sorted(available))) + msg += "\n use 'pytest --fixtures [testpath]' for help on them." + + return FixtureLookupErrorRepr(fspath, lineno, tblines, msg, self.argname) + + +class FixtureLookupErrorRepr(TerminalRepr): + def __init__( + self, + filename: Union[str, "os.PathLike[str]"], + firstlineno: int, + tblines: Sequence[str], + errorstring: str, + argname: Optional[str], + ) -> None: + self.tblines = tblines + self.errorstring = errorstring + self.filename = filename + self.firstlineno = firstlineno + self.argname = argname + + def toterminal(self, tw: TerminalWriter) -> None: + # tw.line("FixtureLookupError: %s" %(self.argname), red=True) + for tbline in self.tblines: + tw.line(tbline.rstrip()) + lines = self.errorstring.split("\n") + if lines: + tw.line( + f"{FormattedExcinfo.fail_marker} {lines[0].strip()}", + red=True, + ) + for line in lines[1:]: + tw.line( + f"{FormattedExcinfo.flow_marker} {line.strip()}", + red=True, + ) + tw.line() + tw.line("%s:%d" % (os.fspath(self.filename), self.firstlineno + 1)) + + +def call_fixture_func( + fixturefunc: "_FixtureFunc[FixtureValue]", request: FixtureRequest, kwargs +) -> FixtureValue: + if is_generator(fixturefunc): + fixturefunc = cast( + Callable[..., Generator[FixtureValue, None, None]], fixturefunc + ) + generator = fixturefunc(**kwargs) + try: + fixture_result = next(generator) + except StopIteration: + raise ValueError(f"{request.fixturename} did not yield a value") from None + finalizer = functools.partial(_teardown_yield_fixture, fixturefunc, generator) + request.addfinalizer(finalizer) + else: + fixturefunc = cast(Callable[..., FixtureValue], fixturefunc) + fixture_result = fixturefunc(**kwargs) + return fixture_result + + +def _teardown_yield_fixture(fixturefunc, it) -> None: + """Execute the teardown of a fixture function by advancing the iterator + after the yield and ensure the iteration ends (if not it means there is + more than one yield in the function).""" + try: + next(it) + except StopIteration: + pass + else: + fs, lineno = getfslineno(fixturefunc) + fail( + f"fixture function has more than one 'yield':\n\n" + f"{Source(fixturefunc).indent()}\n" + f"{fs}:{lineno + 1}", + pytrace=False, + ) + + +def _eval_scope_callable( + scope_callable: Callable[[str, Config], _ScopeName], + fixture_name: str, + config: Config, +) -> _ScopeName: + try: + # Type ignored because there is no typing mechanism to specify + # keyword arguments, currently. + result = scope_callable(fixture_name=fixture_name, config=config) # type: ignore[call-arg] + except Exception as e: + raise TypeError( + f"Error evaluating {scope_callable} while defining fixture '{fixture_name}'.\n" + "Expected a function with the signature (*, fixture_name, config)" + ) from e + if not isinstance(result, str): + fail( + f"Expected {scope_callable} to return a 'str' while defining fixture '{fixture_name}', but it returned:\n" + f"{result!r}", + pytrace=False, + ) + return result + + +@final +class FixtureDef(Generic[FixtureValue]): + """A container for a fixture definition. + + Note: At this time, only explicitly documented fields and methods are + considered public stable API. + """ + + def __init__( + self, + config: Config, + baseid: Optional[str], + argname: str, + func: "_FixtureFunc[FixtureValue]", + scope: Union[Scope, _ScopeName, Callable[[str, Config], _ScopeName], None], + params: Optional[Sequence[object]], + ids: Optional[ + Union[Tuple[Optional[object], ...], Callable[[Any], Optional[object]]] + ] = None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + # The "base" node ID for the fixture. + # + # This is a node ID prefix. A fixture is only available to a node (e.g. + # a `Function` item) if the fixture's baseid is a nodeid of a parent of + # node. + # + # For a fixture found in a Collector's object (e.g. a `Module`s module, + # a `Class`'s class), the baseid is the Collector's nodeid. + # + # For a fixture found in a conftest plugin, the baseid is the conftest's + # directory path relative to the rootdir. + # + # For other plugins, the baseid is the empty string (always matches). + self.baseid: Final = baseid or "" + # Whether the fixture was found from a node or a conftest in the + # collection tree. Will be false for fixtures defined in non-conftest + # plugins. + self.has_location: Final = baseid is not None + # The fixture factory function. + self.func: Final = func + # The name by which the fixture may be requested. + self.argname: Final = argname + if scope is None: + scope = Scope.Function + elif callable(scope): + scope = _eval_scope_callable(scope, argname, config) + if isinstance(scope, str): + scope = Scope.from_user( + scope, descr=f"Fixture '{func.__name__}'", where=baseid + ) + self._scope: Final = scope + # If the fixture is directly parametrized, the parameter values. + self.params: Final = params + # If the fixture is directly parametrized, a tuple of explicit IDs to + # assign to the parameter values, or a callable to generate an ID given + # a parameter value. + self.ids: Final = ids + # The names requested by the fixtures. + self.argnames: Final = getfuncargnames(func, name=argname) + # If the fixture was executed, the current value of the fixture. + # Can change if the fixture is executed with different parameters. + self.cached_result: Optional[_FixtureCachedResult[FixtureValue]] = None + self._finalizers: Final[List[Callable[[], object]]] = [] + + @property + def scope(self) -> _ScopeName: + """Scope string, one of "function", "class", "module", "package", "session".""" + return self._scope.value + + def addfinalizer(self, finalizer: Callable[[], object]) -> None: + self._finalizers.append(finalizer) + + def finish(self, request: SubRequest) -> None: + exceptions: List[BaseException] = [] + while self._finalizers: + fin = self._finalizers.pop() + try: + fin() + except BaseException as e: + exceptions.append(e) + node = request.node + node.ihook.pytest_fixture_post_finalizer(fixturedef=self, request=request) + # Even if finalization fails, we invalidate the cached fixture + # value and remove all finalizers because they may be bound methods + # which will keep instances alive. + self.cached_result = None + self._finalizers.clear() + if len(exceptions) == 1: + raise exceptions[0] + elif len(exceptions) > 1: + msg = f'errors while tearing down fixture "{self.argname}" of {node}' + raise BaseExceptionGroup(msg, exceptions[::-1]) + + def execute(self, request: SubRequest) -> FixtureValue: + """Return the value of this fixture, executing it if not cached.""" + # Ensure that the dependent fixtures requested by this fixture are loaded. + # This needs to be done before checking if we have a cached value, since + # if a dependent fixture has their cache invalidated, e.g. due to + # parametrization, they finalize themselves and fixtures depending on it + # (which will likely include this fixture) setting `self.cached_result = None`. + # See #4871 + requested_fixtures_that_should_finalize_us = [] + for argname in self.argnames: + fixturedef = request._get_active_fixturedef(argname) + # Saves requested fixtures in a list so we later can add our finalizer + # to them, ensuring that if a requested fixture gets torn down we get torn + # down first. This is generally handled by SetupState, but still currently + # needed when this fixture is not parametrized but depends on a parametrized + # fixture. + if not isinstance(fixturedef, PseudoFixtureDef): + requested_fixtures_that_should_finalize_us.append(fixturedef) + + # Check for (and return) cached value/exception. + my_cache_key = self.cache_key(request) + if self.cached_result is not None: + cache_key = self.cached_result[1] + # note: comparison with `==` can fail (or be expensive) for e.g. + # numpy arrays (#6497). + if my_cache_key is cache_key: + if self.cached_result[2] is not None: + exc = self.cached_result[2] + raise exc + else: + result = self.cached_result[0] + return result + # We have a previous but differently parametrized fixture instance + # so we need to tear it down before creating a new one. + self.finish(request) + assert self.cached_result is None + + # Add finalizer to requested fixtures we saved previously. + # We make sure to do this after checking for cached value to avoid + # adding our finalizer multiple times. (#12135) + finalizer = functools.partial(self.finish, request=request) + for parent_fixture in requested_fixtures_that_should_finalize_us: + parent_fixture.addfinalizer(finalizer) + + ihook = request.node.ihook + try: + # Setup the fixture, run the code in it, and cache the value + # in self.cached_result + result = ihook.pytest_fixture_setup(fixturedef=self, request=request) + finally: + # schedule our finalizer, even if the setup failed + request.node.addfinalizer(finalizer) + + return result + + def cache_key(self, request: SubRequest) -> object: + return getattr(request, "param", None) + + def __repr__(self) -> str: + return f"" + + +def resolve_fixture_function( + fixturedef: FixtureDef[FixtureValue], request: FixtureRequest +) -> "_FixtureFunc[FixtureValue]": + """Get the actual callable that can be called to obtain the fixture + value.""" + fixturefunc = fixturedef.func + # The fixture function needs to be bound to the actual + # request.instance so that code working with "fixturedef" behaves + # as expected. + instance = request.instance + if instance is not None: + # Handle the case where fixture is defined not in a test class, but some other class + # (for example a plugin class with a fixture), see #2270. + if hasattr(fixturefunc, "__self__") and not isinstance( + instance, + fixturefunc.__self__.__class__, + ): + return fixturefunc + fixturefunc = getimfunc(fixturedef.func) + if fixturefunc != fixturedef.func: + fixturefunc = fixturefunc.__get__(instance) + return fixturefunc + + +def pytest_fixture_setup( + fixturedef: FixtureDef[FixtureValue], request: SubRequest +) -> FixtureValue: + """Execution of fixture setup.""" + kwargs = {} + for argname in fixturedef.argnames: + kwargs[argname] = request.getfixturevalue(argname) + + fixturefunc = resolve_fixture_function(fixturedef, request) + my_cache_key = fixturedef.cache_key(request) + try: + result = call_fixture_func(fixturefunc, request, kwargs) + except TEST_OUTCOME as e: + if isinstance(e, skip.Exception): + # The test requested a fixture which caused a skip. + # Don't show the fixture as the skip location, as then the user + # wouldn't know which test skipped. + e._use_item_location = True + fixturedef.cached_result = (None, my_cache_key, e) + raise + fixturedef.cached_result = (result, my_cache_key, None) + return result + + +def wrap_function_to_error_out_if_called_directly( + function: FixtureFunction, + fixture_marker: "FixtureFunctionMarker", +) -> FixtureFunction: + """Wrap the given fixture function so we can raise an error about it being called directly, + instead of used as an argument in a test function.""" + name = fixture_marker.name or function.__name__ + message = ( + f'Fixture "{name}" called directly. Fixtures are not meant to be called directly,\n' + "but are created automatically when test functions request them as parameters.\n" + "See https://docs.pytest.org/en/stable/explanation/fixtures.html for more information about fixtures, and\n" + "https://docs.pytest.org/en/stable/deprecations.html#calling-fixtures-directly about how to update your code." + ) + + @functools.wraps(function) + def result(*args, **kwargs): + fail(message, pytrace=False) + + # Keep reference to the original function in our own custom attribute so we don't unwrap + # further than this point and lose useful wrappings like @mock.patch (#3774). + result.__pytest_wrapped__ = _PytestWrapper(function) # type: ignore[attr-defined] + + return cast(FixtureFunction, result) + + +@final +@dataclasses.dataclass(frozen=True) +class FixtureFunctionMarker: + scope: "Union[_ScopeName, Callable[[str, Config], _ScopeName]]" + params: Optional[Tuple[object, ...]] + autouse: bool = False + ids: Optional[ + Union[Tuple[Optional[object], ...], Callable[[Any], Optional[object]]] + ] = None + name: Optional[str] = None + + _ispytest: dataclasses.InitVar[bool] = False + + def __post_init__(self, _ispytest: bool) -> None: + check_ispytest(_ispytest) + + def __call__(self, function: FixtureFunction) -> FixtureFunction: + if inspect.isclass(function): + raise ValueError("class fixtures not supported (maybe in the future)") + + if getattr(function, "_pytestfixturefunction", False): + raise ValueError( + f"@pytest.fixture is being applied more than once to the same function {function.__name__!r}" + ) + + if hasattr(function, "pytestmark"): + warnings.warn(MARKED_FIXTURE, stacklevel=2) + + function = wrap_function_to_error_out_if_called_directly(function, self) + + name = self.name or function.__name__ + if name == "request": + location = getlocation(function) + fail( + f"'request' is a reserved word for fixtures, use another name:\n {location}", + pytrace=False, + ) + + # Type ignored because https://github.com/python/mypy/issues/2087. + function._pytestfixturefunction = self # type: ignore[attr-defined] + return function + + +@overload +def fixture( + fixture_function: FixtureFunction, + *, + scope: "Union[_ScopeName, Callable[[str, Config], _ScopeName]]" = ..., + params: Optional[Iterable[object]] = ..., + autouse: bool = ..., + ids: Optional[ + Union[Sequence[Optional[object]], Callable[[Any], Optional[object]]] + ] = ..., + name: Optional[str] = ..., +) -> FixtureFunction: ... + + +@overload +def fixture( + fixture_function: None = ..., + *, + scope: "Union[_ScopeName, Callable[[str, Config], _ScopeName]]" = ..., + params: Optional[Iterable[object]] = ..., + autouse: bool = ..., + ids: Optional[ + Union[Sequence[Optional[object]], Callable[[Any], Optional[object]]] + ] = ..., + name: Optional[str] = None, +) -> FixtureFunctionMarker: ... + + +def fixture( + fixture_function: Optional[FixtureFunction] = None, + *, + scope: "Union[_ScopeName, Callable[[str, Config], _ScopeName]]" = "function", + params: Optional[Iterable[object]] = None, + autouse: bool = False, + ids: Optional[ + Union[Sequence[Optional[object]], Callable[[Any], Optional[object]]] + ] = None, + name: Optional[str] = None, +) -> Union[FixtureFunctionMarker, FixtureFunction]: + """Decorator to mark a fixture factory function. + + This decorator can be used, with or without parameters, to define a + fixture function. + + The name of the fixture function can later be referenced to cause its + invocation ahead of running tests: test modules or classes can use the + ``pytest.mark.usefixtures(fixturename)`` marker. + + Test functions can directly use fixture names as input arguments in which + case the fixture instance returned from the fixture function will be + injected. + + Fixtures can provide their values to test functions using ``return`` or + ``yield`` statements. When using ``yield`` the code block after the + ``yield`` statement is executed as teardown code regardless of the test + outcome, and must yield exactly once. + + :param scope: + The scope for which this fixture is shared; one of ``"function"`` + (default), ``"class"``, ``"module"``, ``"package"`` or ``"session"``. + + This parameter may also be a callable which receives ``(fixture_name, config)`` + as parameters, and must return a ``str`` with one of the values mentioned above. + + See :ref:`dynamic scope` in the docs for more information. + + :param params: + An optional list of parameters which will cause multiple invocations + of the fixture function and all of the tests using it. The current + parameter is available in ``request.param``. + + :param autouse: + If True, the fixture func is activated for all tests that can see it. + If False (the default), an explicit reference is needed to activate + the fixture. + + :param ids: + Sequence of ids each corresponding to the params so that they are + part of the test id. If no ids are provided they will be generated + automatically from the params. + + :param name: + The name of the fixture. This defaults to the name of the decorated + function. If a fixture is used in the same module in which it is + defined, the function name of the fixture will be shadowed by the + function arg that requests the fixture; one way to resolve this is to + name the decorated function ``fixture_`` and then use + ``@pytest.fixture(name='')``. + """ + fixture_marker = FixtureFunctionMarker( + scope=scope, + params=tuple(params) if params is not None else None, + autouse=autouse, + ids=None if ids is None else ids if callable(ids) else tuple(ids), + name=name, + _ispytest=True, + ) + + # Direct decoration. + if fixture_function: + return fixture_marker(fixture_function) + + return fixture_marker + + +def yield_fixture( + fixture_function=None, + *args, + scope="function", + params=None, + autouse=False, + ids=None, + name=None, +): + """(Return a) decorator to mark a yield-fixture factory function. + + .. deprecated:: 3.0 + Use :py:func:`pytest.fixture` directly instead. + """ + warnings.warn(YIELD_FIXTURE, stacklevel=2) + return fixture( + fixture_function, + *args, + scope=scope, + params=params, + autouse=autouse, + ids=ids, + name=name, + ) + + +@fixture(scope="session") +def pytestconfig(request: FixtureRequest) -> Config: + """Session-scoped fixture that returns the session's :class:`pytest.Config` + object. + + Example:: + + def test_foo(pytestconfig): + if pytestconfig.getoption("verbose") > 0: + ... + + """ + return request.config + + +def pytest_addoption(parser: Parser) -> None: + parser.addini( + "usefixtures", + type="args", + default=[], + help="List of default fixtures to be used with this project", + ) + group = parser.getgroup("general") + group.addoption( + "--fixtures", + "--funcargs", + action="store_true", + dest="showfixtures", + default=False, + help="Show available fixtures, sorted by plugin appearance " + "(fixtures with leading '_' are only shown with '-v')", + ) + group.addoption( + "--fixtures-per-test", + action="store_true", + dest="show_fixtures_per_test", + default=False, + help="Show fixtures per test", + ) + + +def pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]: + if config.option.showfixtures: + showfixtures(config) + return 0 + if config.option.show_fixtures_per_test: + show_fixtures_per_test(config) + return 0 + return None + + +def _get_direct_parametrize_args(node: nodes.Node) -> Set[str]: + """Return all direct parametrization arguments of a node, so we don't + mistake them for fixtures. + + Check https://github.com/pytest-dev/pytest/issues/5036. + + These things are done later as well when dealing with parametrization + so this could be improved. + """ + parametrize_argnames: Set[str] = set() + for marker in node.iter_markers(name="parametrize"): + if not marker.kwargs.get("indirect", False): + p_argnames, _ = ParameterSet._parse_parametrize_args( + *marker.args, **marker.kwargs + ) + parametrize_argnames.update(p_argnames) + return parametrize_argnames + + +def deduplicate_names(*seqs: Iterable[str]) -> Tuple[str, ...]: + """De-duplicate the sequence of names while keeping the original order.""" + # Ideally we would use a set, but it does not preserve insertion order. + return tuple(dict.fromkeys(name for seq in seqs for name in seq)) + + +class FixtureManager: + """pytest fixture definitions and information is stored and managed + from this class. + + During collection fm.parsefactories() is called multiple times to parse + fixture function definitions into FixtureDef objects and internal + data structures. + + During collection of test functions, metafunc-mechanics instantiate + a FuncFixtureInfo object which is cached per node/func-name. + This FuncFixtureInfo object is later retrieved by Function nodes + which themselves offer a fixturenames attribute. + + The FuncFixtureInfo object holds information about fixtures and FixtureDefs + relevant for a particular function. An initial list of fixtures is + assembled like this: + + - ini-defined usefixtures + - autouse-marked fixtures along the collection chain up from the function + - usefixtures markers at module/class/function level + - test function funcargs + + Subsequently the funcfixtureinfo.fixturenames attribute is computed + as the closure of the fixtures needed to setup the initial fixtures, + i.e. fixtures needed by fixture functions themselves are appended + to the fixturenames list. + + Upon the test-setup phases all fixturenames are instantiated, retrieved + by a lookup of their FuncFixtureInfo. + """ + + def __init__(self, session: "Session") -> None: + self.session = session + self.config: Config = session.config + # Maps a fixture name (argname) to all of the FixtureDefs in the test + # suite/plugins defined with this name. Populated by parsefactories(). + # TODO: The order of the FixtureDefs list of each arg is significant, + # explain. + self._arg2fixturedefs: Final[Dict[str, List[FixtureDef[Any]]]] = {} + self._holderobjseen: Final[Set[object]] = set() + # A mapping from a nodeid to a list of autouse fixtures it defines. + self._nodeid_autousenames: Final[Dict[str, List[str]]] = { + "": self.config.getini("usefixtures"), + } + session.config.pluginmanager.register(self, "funcmanage") + + def getfixtureinfo( + self, + node: nodes.Item, + func: Optional[Callable[..., object]], + cls: Optional[type], + ) -> FuncFixtureInfo: + """Calculate the :class:`FuncFixtureInfo` for an item. + + If ``func`` is None, or if the item sets an attribute + ``nofuncargs = True``, then ``func`` is not examined at all. + + :param node: + The item requesting the fixtures. + :param func: + The item's function. + :param cls: + If the function is a method, the method's class. + """ + if func is not None and not getattr(node, "nofuncargs", False): + argnames = getfuncargnames(func, name=node.name, cls=cls) + else: + argnames = () + usefixturesnames = self._getusefixturesnames(node) + autousenames = self._getautousenames(node) + initialnames = deduplicate_names(autousenames, usefixturesnames, argnames) + + direct_parametrize_args = _get_direct_parametrize_args(node) + + names_closure, arg2fixturedefs = self.getfixtureclosure( + parentnode=node, + initialnames=initialnames, + ignore_args=direct_parametrize_args, + ) + + return FuncFixtureInfo(argnames, initialnames, names_closure, arg2fixturedefs) + + def pytest_plugin_registered(self, plugin: _PluggyPlugin, plugin_name: str) -> None: + # Fixtures defined in conftest plugins are only visible to within the + # conftest's directory. This is unlike fixtures in non-conftest plugins + # which have global visibility. So for conftests, construct the base + # nodeid from the plugin name (which is the conftest path). + if plugin_name and plugin_name.endswith("conftest.py"): + # Note: we explicitly do *not* use `plugin.__file__` here -- The + # difference is that plugin_name has the correct capitalization on + # case-insensitive systems (Windows) and other normalization issues + # (issue #11816). + conftestpath = absolutepath(plugin_name) + try: + nodeid = str(conftestpath.parent.relative_to(self.config.rootpath)) + except ValueError: + nodeid = "" + if nodeid == ".": + nodeid = "" + if os.sep != nodes.SEP: + nodeid = nodeid.replace(os.sep, nodes.SEP) + else: + nodeid = None + + self.parsefactories(plugin, nodeid) + + def _getautousenames(self, node: nodes.Node) -> Iterator[str]: + """Return the names of autouse fixtures applicable to node.""" + for parentnode in node.listchain(): + basenames = self._nodeid_autousenames.get(parentnode.nodeid) + if basenames: + yield from basenames + + def _getusefixturesnames(self, node: nodes.Item) -> Iterator[str]: + """Return the names of usefixtures fixtures applicable to node.""" + for mark in node.iter_markers(name="usefixtures"): + yield from mark.args + + def getfixtureclosure( + self, + parentnode: nodes.Node, + initialnames: Tuple[str, ...], + ignore_args: AbstractSet[str], + ) -> Tuple[List[str], Dict[str, Sequence[FixtureDef[Any]]]]: + # Collect the closure of all fixtures, starting with the given + # fixturenames as the initial set. As we have to visit all + # factory definitions anyway, we also return an arg2fixturedefs + # mapping so that the caller can reuse it and does not have + # to re-discover fixturedefs again for each fixturename + # (discovering matching fixtures for a given name/node is expensive). + + fixturenames_closure = list(initialnames) + + arg2fixturedefs: Dict[str, Sequence[FixtureDef[Any]]] = {} + lastlen = -1 + while lastlen != len(fixturenames_closure): + lastlen = len(fixturenames_closure) + for argname in fixturenames_closure: + if argname in ignore_args: + continue + if argname in arg2fixturedefs: + continue + fixturedefs = self.getfixturedefs(argname, parentnode) + if fixturedefs: + arg2fixturedefs[argname] = fixturedefs + for arg in fixturedefs[-1].argnames: + if arg not in fixturenames_closure: + fixturenames_closure.append(arg) + + def sort_by_scope(arg_name: str) -> Scope: + try: + fixturedefs = arg2fixturedefs[arg_name] + except KeyError: + return Scope.Function + else: + return fixturedefs[-1]._scope + + fixturenames_closure.sort(key=sort_by_scope, reverse=True) + return fixturenames_closure, arg2fixturedefs + + def pytest_generate_tests(self, metafunc: "Metafunc") -> None: + """Generate new tests based on parametrized fixtures used by the given metafunc""" + + def get_parametrize_mark_argnames(mark: Mark) -> Sequence[str]: + args, _ = ParameterSet._parse_parametrize_args(*mark.args, **mark.kwargs) + return args + + for argname in metafunc.fixturenames: + # Get the FixtureDefs for the argname. + fixture_defs = metafunc._arg2fixturedefs.get(argname) + if not fixture_defs: + # Will raise FixtureLookupError at setup time if not parametrized somewhere + # else (e.g @pytest.mark.parametrize) + continue + + # If the test itself parametrizes using this argname, give it + # precedence. + if any( + argname in get_parametrize_mark_argnames(mark) + for mark in metafunc.definition.iter_markers("parametrize") + ): + continue + + # In the common case we only look at the fixture def with the + # closest scope (last in the list). But if the fixture overrides + # another fixture, while requesting the super fixture, keep going + # in case the super fixture is parametrized (#1953). + for fixturedef in reversed(fixture_defs): + # Fixture is parametrized, apply it and stop. + if fixturedef.params is not None: + metafunc.parametrize( + argname, + fixturedef.params, + indirect=True, + scope=fixturedef.scope, + ids=fixturedef.ids, + ) + break + + # Not requesting the overridden super fixture, stop. + if argname not in fixturedef.argnames: + break + + # Try next super fixture, if any. + + def pytest_collection_modifyitems(self, items: List[nodes.Item]) -> None: + # Separate parametrized setups. + items[:] = reorder_items(items) + + def _register_fixture( + self, + *, + name: str, + func: "_FixtureFunc[object]", + nodeid: Optional[str], + scope: Union[ + Scope, _ScopeName, Callable[[str, Config], _ScopeName], None + ] = "function", + params: Optional[Sequence[object]] = None, + ids: Optional[ + Union[Tuple[Optional[object], ...], Callable[[Any], Optional[object]]] + ] = None, + autouse: bool = False, + ) -> None: + """Register a fixture + + :param name: + The fixture's name. + :param func: + The fixture's implementation function. + :param nodeid: + The visibility of the fixture. The fixture will be available to the + node with this nodeid and its children in the collection tree. + None means that the fixture is visible to the entire collection tree, + e.g. a fixture defined for general use in a plugin. + :param scope: + The fixture's scope. + :param params: + The fixture's parametrization params. + :param ids: + The fixture's IDs. + :param autouse: + Whether this is an autouse fixture. + """ + fixture_def = FixtureDef( + config=self.config, + baseid=nodeid, + argname=name, + func=func, + scope=scope, + params=params, + ids=ids, + _ispytest=True, + ) + + faclist = self._arg2fixturedefs.setdefault(name, []) + if fixture_def.has_location: + faclist.append(fixture_def) + else: + # fixturedefs with no location are at the front + # so this inserts the current fixturedef after the + # existing fixturedefs from external plugins but + # before the fixturedefs provided in conftests. + i = len([f for f in faclist if not f.has_location]) + faclist.insert(i, fixture_def) + if autouse: + self._nodeid_autousenames.setdefault(nodeid or "", []).append(name) + + @overload + def parsefactories( + self, + node_or_obj: nodes.Node, + ) -> None: + raise NotImplementedError() + + @overload + def parsefactories( + self, + node_or_obj: object, + nodeid: Optional[str], + ) -> None: + raise NotImplementedError() + + def parsefactories( + self, + node_or_obj: Union[nodes.Node, object], + nodeid: Union[str, NotSetType, None] = NOTSET, + ) -> None: + """Collect fixtures from a collection node or object. + + Found fixtures are parsed into `FixtureDef`s and saved. + + If `node_or_object` is a collection node (with an underlying Python + object), the node's object is traversed and the node's nodeid is used to + determine the fixtures' visibilty. `nodeid` must not be specified in + this case. + + If `node_or_object` is an object (e.g. a plugin), the object is + traversed and the given `nodeid` is used to determine the fixtures' + visibility. `nodeid` must be specified in this case; None and "" mean + total visibility. + """ + if nodeid is not NOTSET: + holderobj = node_or_obj + else: + assert isinstance(node_or_obj, nodes.Node) + holderobj = cast(object, node_or_obj.obj) # type: ignore[attr-defined] + assert isinstance(node_or_obj.nodeid, str) + nodeid = node_or_obj.nodeid + if holderobj in self._holderobjseen: + return + + self._holderobjseen.add(holderobj) + for name in dir(holderobj): + # The attribute can be an arbitrary descriptor, so the attribute + # access below can raise. safe_getatt() ignores such exceptions. + obj = safe_getattr(holderobj, name, None) + marker = getfixturemarker(obj) + if not isinstance(marker, FixtureFunctionMarker): + # Magic globals with __getattr__ might have got us a wrong + # fixture attribute. + continue + + if marker.name: + name = marker.name + + # During fixture definition we wrap the original fixture function + # to issue a warning if called directly, so here we unwrap it in + # order to not emit the warning when pytest itself calls the + # fixture function. + func = get_real_method(obj, holderobj) + + self._register_fixture( + name=name, + nodeid=nodeid, + func=func, + scope=marker.scope, + params=marker.params, + ids=marker.ids, + autouse=marker.autouse, + ) + + def getfixturedefs( + self, argname: str, node: nodes.Node + ) -> Optional[Sequence[FixtureDef[Any]]]: + """Get FixtureDefs for a fixture name which are applicable + to a given node. + + Returns None if there are no fixtures at all defined with the given + name. (This is different from the case in which there are fixtures + with the given name, but none applicable to the node. In this case, + an empty result is returned). + + :param argname: Name of the fixture to search for. + :param node: The requesting Node. + """ + try: + fixturedefs = self._arg2fixturedefs[argname] + except KeyError: + return None + return tuple(self._matchfactories(fixturedefs, node)) + + def _matchfactories( + self, fixturedefs: Iterable[FixtureDef[Any]], node: nodes.Node + ) -> Iterator[FixtureDef[Any]]: + parentnodeids = {n.nodeid for n in node.iter_parents()} + for fixturedef in fixturedefs: + if fixturedef.baseid in parentnodeids: + yield fixturedef + + +def show_fixtures_per_test(config: Config) -> Union[int, ExitCode]: + from _pytest.main import wrap_session + + return wrap_session(config, _show_fixtures_per_test) + + +_PYTEST_DIR = Path(_pytest.__file__).parent + + +def _pretty_fixture_path(invocation_dir: Path, func) -> str: + loc = Path(getlocation(func, invocation_dir)) + prefix = Path("...", "_pytest") + try: + return str(prefix / loc.relative_to(_PYTEST_DIR)) + except ValueError: + return bestrelpath(invocation_dir, loc) + + +def _show_fixtures_per_test(config: Config, session: "Session") -> None: + import _pytest.config + + session.perform_collect() + invocation_dir = config.invocation_params.dir + tw = _pytest.config.create_terminal_writer(config) + verbose = config.getvalue("verbose") + + def get_best_relpath(func) -> str: + loc = getlocation(func, invocation_dir) + return bestrelpath(invocation_dir, Path(loc)) + + def write_fixture(fixture_def: FixtureDef[object]) -> None: + argname = fixture_def.argname + if verbose <= 0 and argname.startswith("_"): + return + prettypath = _pretty_fixture_path(invocation_dir, fixture_def.func) + tw.write(f"{argname}", green=True) + tw.write(f" -- {prettypath}", yellow=True) + tw.write("\n") + fixture_doc = inspect.getdoc(fixture_def.func) + if fixture_doc: + write_docstring( + tw, fixture_doc.split("\n\n")[0] if verbose <= 0 else fixture_doc + ) + else: + tw.line(" no docstring available", red=True) + + def write_item(item: nodes.Item) -> None: + # Not all items have _fixtureinfo attribute. + info: Optional[FuncFixtureInfo] = getattr(item, "_fixtureinfo", None) + if info is None or not info.name2fixturedefs: + # This test item does not use any fixtures. + return + tw.line() + tw.sep("-", f"fixtures used by {item.name}") + # TODO: Fix this type ignore. + tw.sep("-", f"({get_best_relpath(item.function)})") # type: ignore[attr-defined] + # dict key not used in loop but needed for sorting. + for _, fixturedefs in sorted(info.name2fixturedefs.items()): + assert fixturedefs is not None + if not fixturedefs: + continue + # Last item is expected to be the one used by the test item. + write_fixture(fixturedefs[-1]) + + for session_item in session.items: + write_item(session_item) + + +def showfixtures(config: Config) -> Union[int, ExitCode]: + from _pytest.main import wrap_session + + return wrap_session(config, _showfixtures_main) + + +def _showfixtures_main(config: Config, session: "Session") -> None: + import _pytest.config + + session.perform_collect() + invocation_dir = config.invocation_params.dir + tw = _pytest.config.create_terminal_writer(config) + verbose = config.getvalue("verbose") + + fm = session._fixturemanager + + available = [] + seen: Set[Tuple[str, str]] = set() + + for argname, fixturedefs in fm._arg2fixturedefs.items(): + assert fixturedefs is not None + if not fixturedefs: + continue + for fixturedef in fixturedefs: + loc = getlocation(fixturedef.func, invocation_dir) + if (fixturedef.argname, loc) in seen: + continue + seen.add((fixturedef.argname, loc)) + available.append( + ( + len(fixturedef.baseid), + fixturedef.func.__module__, + _pretty_fixture_path(invocation_dir, fixturedef.func), + fixturedef.argname, + fixturedef, + ) + ) + + available.sort() + currentmodule = None + for baseid, module, prettypath, argname, fixturedef in available: + if currentmodule != module: + if not module.startswith("_pytest."): + tw.line() + tw.sep("-", f"fixtures defined from {module}") + currentmodule = module + if verbose <= 0 and argname.startswith("_"): + continue + tw.write(f"{argname}", green=True) + if fixturedef.scope != "function": + tw.write(" [%s scope]" % fixturedef.scope, cyan=True) + tw.write(f" -- {prettypath}", yellow=True) + tw.write("\n") + doc = inspect.getdoc(fixturedef.func) + if doc: + write_docstring(tw, doc.split("\n\n")[0] if verbose <= 0 else doc) + else: + tw.line(" no docstring available", red=True) + tw.line() + + +def write_docstring(tw: TerminalWriter, doc: str, indent: str = " ") -> None: + for line in doc.split("\n"): + tw.line(indent + line) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/freeze_support.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/freeze_support.py new file mode 100644 index 000000000..e03a6d175 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/freeze_support.py @@ -0,0 +1,45 @@ +"""Provides a function to report all internal modules for using freezing +tools.""" + +import types +from typing import Iterator +from typing import List +from typing import Union + + +def freeze_includes() -> List[str]: + """Return a list of module names used by pytest that should be + included by cx_freeze.""" + import _pytest + + result = list(_iter_all_modules(_pytest)) + return result + + +def _iter_all_modules( + package: Union[str, types.ModuleType], + prefix: str = "", +) -> Iterator[str]: + """Iterate over the names of all modules that can be found in the given + package, recursively. + + >>> import _pytest + >>> list(_iter_all_modules(_pytest)) + ['_pytest._argcomplete', '_pytest._code.code', ...] + """ + import os + import pkgutil + + if isinstance(package, str): + path = package + else: + # Type ignored because typeshed doesn't define ModuleType.__path__ + # (only defined on packages). + package_path = package.__path__ + path, prefix = package_path[0], package.__name__ + "." + for _, name, is_package in pkgutil.iter_modules([path]): + if is_package: + for m in _iter_all_modules(os.path.join(path, name), prefix=name + "."): + yield prefix + m + else: + yield prefix + name diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/helpconfig.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/helpconfig.py new file mode 100644 index 000000000..37fbdf04d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/helpconfig.py @@ -0,0 +1,272 @@ +# mypy: allow-untyped-defs +"""Version info, help messages, tracing configuration.""" + +from argparse import Action +import os +import sys +from typing import Generator +from typing import List +from typing import Optional +from typing import Union + +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import PrintHelp +from _pytest.config.argparsing import Parser +from _pytest.terminal import TerminalReporter +import pytest + + +class HelpAction(Action): + """An argparse Action that will raise an exception in order to skip the + rest of the argument parsing when --help is passed. + + This prevents argparse from quitting due to missing required arguments + when any are defined, for example by ``pytest_addoption``. + This is similar to the way that the builtin argparse --help option is + implemented by raising SystemExit. + """ + + def __init__(self, option_strings, dest=None, default=False, help=None): + super().__init__( + option_strings=option_strings, + dest=dest, + const=True, + default=default, + nargs=0, + help=help, + ) + + def __call__(self, parser, namespace, values, option_string=None): + setattr(namespace, self.dest, self.const) + + # We should only skip the rest of the parsing after preparse is done. + if getattr(parser._parser, "after_preparse", False): + raise PrintHelp + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("debugconfig") + group.addoption( + "--version", + "-V", + action="count", + default=0, + dest="version", + help="Display pytest version and information about plugins. " + "When given twice, also display information about plugins.", + ) + group._addoption( + "-h", + "--help", + action=HelpAction, + dest="help", + help="Show help message and configuration info", + ) + group._addoption( + "-p", + action="append", + dest="plugins", + default=[], + metavar="name", + help="Early-load given plugin module name or entry point (multi-allowed). " + "To avoid loading of plugins, use the `no:` prefix, e.g. " + "`no:doctest`.", + ) + group.addoption( + "--traceconfig", + "--trace-config", + action="store_true", + default=False, + help="Trace considerations of conftest.py files", + ) + group.addoption( + "--debug", + action="store", + nargs="?", + const="pytestdebug.log", + dest="debug", + metavar="DEBUG_FILE_NAME", + help="Store internal tracing debug information in this log file. " + "This file is opened with 'w' and truncated as a result, care advised. " + "Default: pytestdebug.log.", + ) + group._addoption( + "-o", + "--override-ini", + dest="override_ini", + action="append", + help='Override ini option with "option=value" style, ' + "e.g. `-o xfail_strict=True -o cache_dir=cache`.", + ) + + +@pytest.hookimpl(wrapper=True) +def pytest_cmdline_parse() -> Generator[None, Config, Config]: + config = yield + + if config.option.debug: + # --debug | --debug was provided. + path = config.option.debug + debugfile = open(path, "w", encoding="utf-8") + debugfile.write( + "versions pytest-{}, " + "python-{}\ninvocation_dir={}\ncwd={}\nargs={}\n\n".format( + pytest.__version__, + ".".join(map(str, sys.version_info)), + config.invocation_params.dir, + os.getcwd(), + config.invocation_params.args, + ) + ) + config.trace.root.setwriter(debugfile.write) + undo_tracing = config.pluginmanager.enable_tracing() + sys.stderr.write("writing pytest debug information to %s\n" % path) + + def unset_tracing() -> None: + debugfile.close() + sys.stderr.write("wrote pytest debug information to %s\n" % debugfile.name) + config.trace.root.setwriter(None) + undo_tracing() + + config.add_cleanup(unset_tracing) + + return config + + +def showversion(config: Config) -> None: + if config.option.version > 1: + sys.stdout.write( + f"This is pytest version {pytest.__version__}, imported from {pytest.__file__}\n" + ) + plugininfo = getpluginversioninfo(config) + if plugininfo: + for line in plugininfo: + sys.stdout.write(line + "\n") + else: + sys.stdout.write(f"pytest {pytest.__version__}\n") + + +def pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]: + if config.option.version > 0: + showversion(config) + return 0 + elif config.option.help: + config._do_configure() + showhelp(config) + config._ensure_unconfigure() + return 0 + return None + + +def showhelp(config: Config) -> None: + import textwrap + + reporter: Optional[TerminalReporter] = config.pluginmanager.get_plugin( + "terminalreporter" + ) + assert reporter is not None + tw = reporter._tw + tw.write(config._parser.optparser.format_help()) + tw.line() + tw.line( + "[pytest] ini-options in the first " + "pytest.ini|tox.ini|setup.cfg|pyproject.toml file found:" + ) + tw.line() + + columns = tw.fullwidth # costly call + indent_len = 24 # based on argparse's max_help_position=24 + indent = " " * indent_len + for name in config._parser._ininames: + help, type, default = config._parser._inidict[name] + if type is None: + type = "string" + if help is None: + raise TypeError(f"help argument cannot be None for {name}") + spec = f"{name} ({type}):" + tw.write(" %s" % spec) + spec_len = len(spec) + if spec_len > (indent_len - 3): + # Display help starting at a new line. + tw.line() + helplines = textwrap.wrap( + help, + columns, + initial_indent=indent, + subsequent_indent=indent, + break_on_hyphens=False, + ) + + for line in helplines: + tw.line(line) + else: + # Display help starting after the spec, following lines indented. + tw.write(" " * (indent_len - spec_len - 2)) + wrapped = textwrap.wrap(help, columns - indent_len, break_on_hyphens=False) + + if wrapped: + tw.line(wrapped[0]) + for line in wrapped[1:]: + tw.line(indent + line) + + tw.line() + tw.line("Environment variables:") + vars = [ + ("PYTEST_ADDOPTS", "Extra command line options"), + ("PYTEST_PLUGINS", "Comma-separated plugins to load during startup"), + ("PYTEST_DISABLE_PLUGIN_AUTOLOAD", "Set to disable plugin auto-loading"), + ("PYTEST_DEBUG", "Set to enable debug tracing of pytest's internals"), + ] + for name, help in vars: + tw.line(f" {name:<24} {help}") + tw.line() + tw.line() + + tw.line("to see available markers type: pytest --markers") + tw.line("to see available fixtures type: pytest --fixtures") + tw.line( + "(shown according to specified file_or_dir or current dir " + "if not specified; fixtures with leading '_' are only shown " + "with the '-v' option" + ) + + for warningreport in reporter.stats.get("warnings", []): + tw.line("warning : " + warningreport.message, red=True) + return + + +conftest_options = [("pytest_plugins", "list of plugin names to load")] + + +def getpluginversioninfo(config: Config) -> List[str]: + lines = [] + plugininfo = config.pluginmanager.list_plugin_distinfo() + if plugininfo: + lines.append("setuptools registered plugins:") + for plugin, dist in plugininfo: + loc = getattr(plugin, "__file__", repr(plugin)) + content = f"{dist.project_name}-{dist.version} at {loc}" + lines.append(" " + content) + return lines + + +def pytest_report_header(config: Config) -> List[str]: + lines = [] + if config.option.debug or config.option.traceconfig: + lines.append(f"using: pytest-{pytest.__version__}") + + verinfo = getpluginversioninfo(config) + if verinfo: + lines.extend(verinfo) + + if config.option.traceconfig: + lines.append("active plugins:") + items = config.pluginmanager.list_name_plugin() + for name, plugin in items: + if hasattr(plugin, "__file__"): + r = plugin.__file__ + else: + r = repr(plugin) + lines.append(f" {name:<20}: {r}") + return lines diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/hookspec.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/hookspec.py new file mode 100644 index 000000000..acfe7eb95 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/hookspec.py @@ -0,0 +1,1321 @@ +# mypy: allow-untyped-defs +"""Hook specifications for pytest plugins which are invoked by pytest itself +and by builtin plugins.""" + +from pathlib import Path +from typing import Any +from typing import Dict +from typing import List +from typing import Mapping +from typing import Optional +from typing import Sequence +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union + +from pluggy import HookspecMarker + +from .deprecated import HOOK_LEGACY_PATH_ARG + + +if TYPE_CHECKING: + import pdb + from typing import Literal + import warnings + + from _pytest._code.code import ExceptionInfo + from _pytest._code.code import ExceptionRepr + from _pytest.compat import LEGACY_PATH + from _pytest.config import _PluggyPlugin + from _pytest.config import Config + from _pytest.config import ExitCode + from _pytest.config import PytestPluginManager + from _pytest.config.argparsing import Parser + from _pytest.fixtures import FixtureDef + from _pytest.fixtures import SubRequest + from _pytest.main import Session + from _pytest.nodes import Collector + from _pytest.nodes import Item + from _pytest.outcomes import Exit + from _pytest.python import Class + from _pytest.python import Function + from _pytest.python import Metafunc + from _pytest.python import Module + from _pytest.reports import CollectReport + from _pytest.reports import TestReport + from _pytest.runner import CallInfo + from _pytest.terminal import TerminalReporter + from _pytest.terminal import TestShortLogReport + + +hookspec = HookspecMarker("pytest") + +# ------------------------------------------------------------------------- +# Initialization hooks called for every plugin +# ------------------------------------------------------------------------- + + +@hookspec(historic=True) +def pytest_addhooks(pluginmanager: "PytestPluginManager") -> None: + """Called at plugin registration time to allow adding new hooks via a call to + :func:`pluginmanager.add_hookspecs(module_or_class, prefix) `. + + :param pluginmanager: The pytest plugin manager. + + .. note:: + This hook is incompatible with hook wrappers. + + Use in conftest plugins + ======================= + + If a conftest plugin implements this hook, it will be called immediately + when the conftest is registered. + """ + + +@hookspec(historic=True) +def pytest_plugin_registered( + plugin: "_PluggyPlugin", + plugin_name: str, + manager: "PytestPluginManager", +) -> None: + """A new pytest plugin got registered. + + :param plugin: The plugin module or instance. + :param plugin_name: The name by which the plugin is registered. + :param manager: The pytest plugin manager. + + .. note:: + This hook is incompatible with hook wrappers. + + Use in conftest plugins + ======================= + + If a conftest plugin implements this hook, it will be called immediately + when the conftest is registered, once for each plugin registered thus far + (including itself!), and for all plugins thereafter when they are + registered. + """ + + +@hookspec(historic=True) +def pytest_addoption(parser: "Parser", pluginmanager: "PytestPluginManager") -> None: + """Register argparse-style options and ini-style config values, + called once at the beginning of a test run. + + :param parser: + To add command line options, call + :py:func:`parser.addoption(...) `. + To add ini-file values call :py:func:`parser.addini(...) + `. + + :param pluginmanager: + The pytest plugin manager, which can be used to install :py:func:`~pytest.hookspec`'s + or :py:func:`~pytest.hookimpl`'s and allow one plugin to call another plugin's hooks + to change how command line options are added. + + Options can later be accessed through the + :py:class:`config ` object, respectively: + + - :py:func:`config.getoption(name) ` to + retrieve the value of a command line option. + + - :py:func:`config.getini(name) ` to retrieve + a value read from an ini-style file. + + The config object is passed around on many internal objects via the ``.config`` + attribute or can be retrieved as the ``pytestconfig`` fixture. + + .. note:: + This hook is incompatible with hook wrappers. + + Use in conftest plugins + ======================= + + If a conftest plugin implements this hook, it will be called immediately + when the conftest is registered. + + This hook is only called for :ref:`initial conftests `. + """ + + +@hookspec(historic=True) +def pytest_configure(config: "Config") -> None: + """Allow plugins and conftest files to perform initial configuration. + + .. note:: + This hook is incompatible with hook wrappers. + + :param config: The pytest config object. + + Use in conftest plugins + ======================= + + This hook is called for every :ref:`initial conftest ` file + after command line options have been parsed. After that, the hook is called + for other conftest files as they are registered. + """ + + +# ------------------------------------------------------------------------- +# Bootstrapping hooks called for plugins registered early enough: +# internal and 3rd party plugins. +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_cmdline_parse( + pluginmanager: "PytestPluginManager", args: List[str] +) -> Optional["Config"]: + """Return an initialized :class:`~pytest.Config`, parsing the specified args. + + Stops at first non-None result, see :ref:`firstresult`. + + .. note:: + This hook is only called for plugin classes passed to the + ``plugins`` arg when using `pytest.main`_ to perform an in-process + test run. + + :param pluginmanager: The pytest plugin manager. + :param args: List of arguments passed on the command line. + :returns: A pytest config object. + + Use in conftest plugins + ======================= + + This hook is not called for conftest files. + """ + + +def pytest_load_initial_conftests( + early_config: "Config", parser: "Parser", args: List[str] +) -> None: + """Called to implement the loading of :ref:`initial conftest files + ` ahead of command line option parsing. + + :param early_config: The pytest config object. + :param args: Arguments passed on the command line. + :param parser: To add command line options. + + Use in conftest plugins + ======================= + + This hook is not called for conftest files. + """ + + +@hookspec(firstresult=True) +def pytest_cmdline_main(config: "Config") -> Optional[Union["ExitCode", int]]: + """Called for performing the main command line action. + + The default implementation will invoke the configure hooks and + :hook:`pytest_runtestloop`. + + Stops at first non-None result, see :ref:`firstresult`. + + :param config: The pytest config object. + :returns: The exit code. + + Use in conftest plugins + ======================= + + This hook is only called for :ref:`initial conftests `. + """ + + +# ------------------------------------------------------------------------- +# collection hooks +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_collection(session: "Session") -> Optional[object]: + """Perform the collection phase for the given session. + + Stops at first non-None result, see :ref:`firstresult`. + The return value is not used, but only stops further processing. + + The default collection phase is this (see individual hooks for full details): + + 1. Starting from ``session`` as the initial collector: + + 1. ``pytest_collectstart(collector)`` + 2. ``report = pytest_make_collect_report(collector)`` + 3. ``pytest_exception_interact(collector, call, report)`` if an interactive exception occurred + 4. For each collected node: + + 1. If an item, ``pytest_itemcollected(item)`` + 2. If a collector, recurse into it. + + 5. ``pytest_collectreport(report)`` + + 2. ``pytest_collection_modifyitems(session, config, items)`` + + 1. ``pytest_deselected(items)`` for any deselected items (may be called multiple times) + + 3. ``pytest_collection_finish(session)`` + 4. Set ``session.items`` to the list of collected items + 5. Set ``session.testscollected`` to the number of collected items + + You can implement this hook to only perform some action before collection, + for example the terminal plugin uses it to start displaying the collection + counter (and returns `None`). + + :param session: The pytest session object. + + Use in conftest plugins + ======================= + + This hook is only called for :ref:`initial conftests `. + """ + + +def pytest_collection_modifyitems( + session: "Session", config: "Config", items: List["Item"] +) -> None: + """Called after collection has been performed. May filter or re-order + the items in-place. + + :param session: The pytest session object. + :param config: The pytest config object. + :param items: List of item objects. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +def pytest_collection_finish(session: "Session") -> None: + """Called after collection has been performed and modified. + + :param session: The pytest session object. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +@hookspec( + firstresult=True, + warn_on_impl_args={ + "path": HOOK_LEGACY_PATH_ARG.format( + pylib_path_arg="path", pathlib_path_arg="collection_path" + ), + }, +) +def pytest_ignore_collect( + collection_path: Path, path: "LEGACY_PATH", config: "Config" +) -> Optional[bool]: + """Return True to prevent considering this path for collection. + + This hook is consulted for all files and directories prior to calling + more specific hooks. + + Stops at first non-None result, see :ref:`firstresult`. + + :param collection_path: The path to analyze. + :param path: The path to analyze (deprecated). + :param config: The pytest config object. + + .. versionchanged:: 7.0.0 + The ``collection_path`` parameter was added as a :class:`pathlib.Path` + equivalent of the ``path`` parameter. The ``path`` parameter + has been deprecated. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given collection path, only + conftest files in parent directories of the collection path are consulted + (if the path is a directory, its own conftest file is *not* consulted - a + directory cannot ignore itself!). + """ + + +@hookspec(firstresult=True) +def pytest_collect_directory(path: Path, parent: "Collector") -> "Optional[Collector]": + """Create a :class:`~pytest.Collector` for the given directory, or None if + not relevant. + + .. versionadded:: 8.0 + + For best results, the returned collector should be a subclass of + :class:`~pytest.Directory`, but this is not required. + + The new node needs to have the specified ``parent`` as a parent. + + Stops at first non-None result, see :ref:`firstresult`. + + :param path: The path to analyze. + + See :ref:`custom directory collectors` for a simple example of use of this + hook. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given collection path, only + conftest files in parent directories of the collection path are consulted + (if the path is a directory, its own conftest file is *not* consulted - a + directory cannot collect itself!). + """ + + +@hookspec( + warn_on_impl_args={ + "path": HOOK_LEGACY_PATH_ARG.format( + pylib_path_arg="path", pathlib_path_arg="file_path" + ), + }, +) +def pytest_collect_file( + file_path: Path, path: "LEGACY_PATH", parent: "Collector" +) -> "Optional[Collector]": + """Create a :class:`~pytest.Collector` for the given path, or None if not relevant. + + For best results, the returned collector should be a subclass of + :class:`~pytest.File`, but this is not required. + + The new node needs to have the specified ``parent`` as a parent. + + :param file_path: The path to analyze. + :param path: The path to collect (deprecated). + + .. versionchanged:: 7.0.0 + The ``file_path`` parameter was added as a :class:`pathlib.Path` + equivalent of the ``path`` parameter. The ``path`` parameter + has been deprecated. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given file path, only + conftest files in parent directories of the file path are consulted. + """ + + +# logging hooks for collection + + +def pytest_collectstart(collector: "Collector") -> None: + """Collector starts collecting. + + :param collector: + The collector. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given collector, only + conftest files in the collector's directory and its parent directories are + consulted. + """ + + +def pytest_itemcollected(item: "Item") -> None: + """We just collected a test item. + + :param item: + The item. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_collectreport(report: "CollectReport") -> None: + """Collector finished collecting. + + :param report: + The collect report. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given collector, only + conftest files in the collector's directory and its parent directories are + consulted. + """ + + +def pytest_deselected(items: Sequence["Item"]) -> None: + """Called for deselected test items, e.g. by keyword. + + May be called multiple times. + + :param items: + The items. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. + """ + + +@hookspec(firstresult=True) +def pytest_make_collect_report(collector: "Collector") -> "Optional[CollectReport]": + """Perform :func:`collector.collect() ` and return + a :class:`~pytest.CollectReport`. + + Stops at first non-None result, see :ref:`firstresult`. + + :param collector: + The collector. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given collector, only + conftest files in the collector's directory and its parent directories are + consulted. + """ + + +# ------------------------------------------------------------------------- +# Python test function related hooks +# ------------------------------------------------------------------------- + + +@hookspec( + firstresult=True, + warn_on_impl_args={ + "path": HOOK_LEGACY_PATH_ARG.format( + pylib_path_arg="path", pathlib_path_arg="module_path" + ), + }, +) +def pytest_pycollect_makemodule( + module_path: Path, path: "LEGACY_PATH", parent +) -> Optional["Module"]: + """Return a :class:`pytest.Module` collector or None for the given path. + + This hook will be called for each matching test module path. + The :hook:`pytest_collect_file` hook needs to be used if you want to + create test modules for files that do not match as a test module. + + Stops at first non-None result, see :ref:`firstresult`. + + :param module_path: The path of the module to collect. + :param path: The path of the module to collect (deprecated). + + .. versionchanged:: 7.0.0 + The ``module_path`` parameter was added as a :class:`pathlib.Path` + equivalent of the ``path`` parameter. + + The ``path`` parameter has been deprecated in favor of ``fspath``. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given parent collector, + only conftest files in the collector's directory and its parent directories + are consulted. + """ + + +@hookspec(firstresult=True) +def pytest_pycollect_makeitem( + collector: Union["Module", "Class"], name: str, obj: object +) -> Union[None, "Item", "Collector", List[Union["Item", "Collector"]]]: + """Return a custom item/collector for a Python object in a module, or None. + + Stops at first non-None result, see :ref:`firstresult`. + + :param collector: + The module/class collector. + :param name: + The name of the object in the module/class. + :param obj: + The object. + :returns: + The created items/collectors. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given collector, only + conftest files in the collector's directory and its parent directories + are consulted. + """ + + +@hookspec(firstresult=True) +def pytest_pyfunc_call(pyfuncitem: "Function") -> Optional[object]: + """Call underlying test function. + + Stops at first non-None result, see :ref:`firstresult`. + + :param pyfuncitem: + The function item. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only + conftest files in the item's directory and its parent directories + are consulted. + """ + + +def pytest_generate_tests(metafunc: "Metafunc") -> None: + """Generate (multiple) parametrized calls to a test function. + + :param metafunc: + The :class:`~pytest.Metafunc` helper for the test function. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given function definition, + only conftest files in the functions's directory and its parent directories + are consulted. + """ + + +@hookspec(firstresult=True) +def pytest_make_parametrize_id( + config: "Config", val: object, argname: str +) -> Optional[str]: + """Return a user-friendly string representation of the given ``val`` + that will be used by @pytest.mark.parametrize calls, or None if the hook + doesn't know about ``val``. + + The parameter name is available as ``argname``, if required. + + Stops at first non-None result, see :ref:`firstresult`. + + :param config: The pytest config object. + :param val: The parametrized value. + :param argname: The automatic parameter name produced by pytest. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. + """ + + +# ------------------------------------------------------------------------- +# runtest related hooks +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_runtestloop(session: "Session") -> Optional[object]: + """Perform the main runtest loop (after collection finished). + + The default hook implementation performs the runtest protocol for all items + collected in the session (``session.items``), unless the collection failed + or the ``collectonly`` pytest option is set. + + If at any point :py:func:`pytest.exit` is called, the loop is + terminated immediately. + + If at any point ``session.shouldfail`` or ``session.shouldstop`` are set, the + loop is terminated after the runtest protocol for the current item is finished. + + :param session: The pytest session object. + + Stops at first non-None result, see :ref:`firstresult`. + The return value is not used, but only stops further processing. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. + """ + + +@hookspec(firstresult=True) +def pytest_runtest_protocol( + item: "Item", nextitem: "Optional[Item]" +) -> Optional[object]: + """Perform the runtest protocol for a single test item. + + The default runtest protocol is this (see individual hooks for full details): + + - ``pytest_runtest_logstart(nodeid, location)`` + + - Setup phase: + - ``call = pytest_runtest_setup(item)`` (wrapped in ``CallInfo(when="setup")``) + - ``report = pytest_runtest_makereport(item, call)`` + - ``pytest_runtest_logreport(report)`` + - ``pytest_exception_interact(call, report)`` if an interactive exception occurred + + - Call phase, if the the setup passed and the ``setuponly`` pytest option is not set: + - ``call = pytest_runtest_call(item)`` (wrapped in ``CallInfo(when="call")``) + - ``report = pytest_runtest_makereport(item, call)`` + - ``pytest_runtest_logreport(report)`` + - ``pytest_exception_interact(call, report)`` if an interactive exception occurred + + - Teardown phase: + - ``call = pytest_runtest_teardown(item, nextitem)`` (wrapped in ``CallInfo(when="teardown")``) + - ``report = pytest_runtest_makereport(item, call)`` + - ``pytest_runtest_logreport(report)`` + - ``pytest_exception_interact(call, report)`` if an interactive exception occurred + + - ``pytest_runtest_logfinish(nodeid, location)`` + + :param item: Test item for which the runtest protocol is performed. + :param nextitem: The scheduled-to-be-next test item (or None if this is the end my friend). + + Stops at first non-None result, see :ref:`firstresult`. + The return value is not used, but only stops further processing. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. + """ + + +def pytest_runtest_logstart( + nodeid: str, location: Tuple[str, Optional[int], str] +) -> None: + """Called at the start of running the runtest protocol for a single item. + + See :hook:`pytest_runtest_protocol` for a description of the runtest protocol. + + :param nodeid: Full node ID of the item. + :param location: A tuple of ``(filename, lineno, testname)`` + where ``filename`` is a file path relative to ``config.rootpath`` + and ``lineno`` is 0-based. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_runtest_logfinish( + nodeid: str, location: Tuple[str, Optional[int], str] +) -> None: + """Called at the end of running the runtest protocol for a single item. + + See :hook:`pytest_runtest_protocol` for a description of the runtest protocol. + + :param nodeid: Full node ID of the item. + :param location: A tuple of ``(filename, lineno, testname)`` + where ``filename`` is a file path relative to ``config.rootpath`` + and ``lineno`` is 0-based. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_runtest_setup(item: "Item") -> None: + """Called to perform the setup phase for a test item. + + The default implementation runs ``setup()`` on ``item`` and all of its + parents (which haven't been setup yet). This includes obtaining the + values of fixtures required by the item (which haven't been obtained + yet). + + :param item: + The item. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_runtest_call(item: "Item") -> None: + """Called to run the test for test item (the call phase). + + The default implementation calls ``item.runtest()``. + + :param item: + The item. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_runtest_teardown(item: "Item", nextitem: Optional["Item"]) -> None: + """Called to perform the teardown phase for a test item. + + The default implementation runs the finalizers and calls ``teardown()`` + on ``item`` and all of its parents (which need to be torn down). This + includes running the teardown phase of fixtures required by the item (if + they go out of scope). + + :param item: + The item. + :param nextitem: + The scheduled-to-be-next test item (None if no further test item is + scheduled). This argument is used to perform exact teardowns, i.e. + calling just enough finalizers so that nextitem only needs to call + setup functions. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +@hookspec(firstresult=True) +def pytest_runtest_makereport( + item: "Item", call: "CallInfo[None]" +) -> Optional["TestReport"]: + """Called to create a :class:`~pytest.TestReport` for each of + the setup, call and teardown runtest phases of a test item. + + See :hook:`pytest_runtest_protocol` for a description of the runtest protocol. + + :param item: The item. + :param call: The :class:`~pytest.CallInfo` for the phase. + + Stops at first non-None result, see :ref:`firstresult`. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_runtest_logreport(report: "TestReport") -> None: + """Process the :class:`~pytest.TestReport` produced for each + of the setup, call and teardown runtest phases of an item. + + See :hook:`pytest_runtest_protocol` for a description of the runtest protocol. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +@hookspec(firstresult=True) +def pytest_report_to_serializable( + config: "Config", + report: Union["CollectReport", "TestReport"], +) -> Optional[Dict[str, Any]]: + """Serialize the given report object into a data structure suitable for + sending over the wire, e.g. converted to JSON. + + :param config: The pytest config object. + :param report: The report. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. The exact details may depend + on the plugin which calls the hook. + """ + + +@hookspec(firstresult=True) +def pytest_report_from_serializable( + config: "Config", + data: Dict[str, Any], +) -> Optional[Union["CollectReport", "TestReport"]]: + """Restore a report object previously serialized with + :hook:`pytest_report_to_serializable`. + + :param config: The pytest config object. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. The exact details may depend + on the plugin which calls the hook. + """ + + +# ------------------------------------------------------------------------- +# Fixture related hooks +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_fixture_setup( + fixturedef: "FixtureDef[Any]", request: "SubRequest" +) -> Optional[object]: + """Perform fixture setup execution. + + :param fixturdef: + The fixture definition object. + :param request: + The fixture request object. + :returns: + The return value of the call to the fixture function. + + Stops at first non-None result, see :ref:`firstresult`. + + .. note:: + If the fixture function returns None, other implementations of + this hook function will continue to be called, according to the + behavior of the :ref:`firstresult` option. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given fixture, only + conftest files in the fixture scope's directory and its parent directories + are consulted. + """ + + +def pytest_fixture_post_finalizer( + fixturedef: "FixtureDef[Any]", request: "SubRequest" +) -> None: + """Called after fixture teardown, but before the cache is cleared, so + the fixture result ``fixturedef.cached_result`` is still available (not + ``None``). + + :param fixturdef: + The fixture definition object. + :param request: + The fixture request object. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given fixture, only + conftest files in the fixture scope's directory and its parent directories + are consulted. + """ + + +# ------------------------------------------------------------------------- +# test session related hooks +# ------------------------------------------------------------------------- + + +def pytest_sessionstart(session: "Session") -> None: + """Called after the ``Session`` object has been created and before performing collection + and entering the run test loop. + + :param session: The pytest session object. + + Use in conftest plugins + ======================= + + This hook is only called for :ref:`initial conftests `. + """ + + +def pytest_sessionfinish( + session: "Session", + exitstatus: Union[int, "ExitCode"], +) -> None: + """Called after whole test run finished, right before returning the exit status to the system. + + :param session: The pytest session object. + :param exitstatus: The status which pytest will return to the system. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. + """ + + +def pytest_unconfigure(config: "Config") -> None: + """Called before test process is exited. + + :param config: The pytest config object. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. + """ + + +# ------------------------------------------------------------------------- +# hooks for customizing the assert methods +# ------------------------------------------------------------------------- + + +def pytest_assertrepr_compare( + config: "Config", op: str, left: object, right: object +) -> Optional[List[str]]: + """Return explanation for comparisons in failing assert expressions. + + Return None for no custom explanation, otherwise return a list + of strings. The strings will be joined by newlines but any newlines + *in* a string will be escaped. Note that all but the first line will + be indented slightly, the intention is for the first line to be a summary. + + :param config: The pytest config object. + :param op: The operator, e.g. `"=="`, `"!="`, `"not in"`. + :param left: The left operand. + :param right: The right operand. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_assertion_pass(item: "Item", lineno: int, orig: str, expl: str) -> None: + """Called whenever an assertion passes. + + .. versionadded:: 5.0 + + Use this hook to do some processing after a passing assertion. + The original assertion information is available in the `orig` string + and the pytest introspected assertion information is available in the + `expl` string. + + This hook must be explicitly enabled by the ``enable_assertion_pass_hook`` + ini-file option: + + .. code-block:: ini + + [pytest] + enable_assertion_pass_hook=true + + You need to **clean the .pyc** files in your project directory and interpreter libraries + when enabling this option, as assertions will require to be re-written. + + :param item: pytest item object of current test. + :param lineno: Line number of the assert statement. + :param orig: String with the original assertion. + :param expl: String with the assert explanation. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +# ------------------------------------------------------------------------- +# Hooks for influencing reporting (invoked from _pytest_terminal). +# ------------------------------------------------------------------------- + + +@hookspec( + warn_on_impl_args={ + "startdir": HOOK_LEGACY_PATH_ARG.format( + pylib_path_arg="startdir", pathlib_path_arg="start_path" + ), + }, +) +def pytest_report_header( # type:ignore[empty-body] + config: "Config", start_path: Path, startdir: "LEGACY_PATH" +) -> Union[str, List[str]]: + """Return a string or list of strings to be displayed as header info for terminal reporting. + + :param config: The pytest config object. + :param start_path: The starting dir. + :param startdir: The starting dir (deprecated). + + .. note:: + + Lines returned by a plugin are displayed before those of plugins which + ran before it. + If you want to have your line(s) displayed first, use + :ref:`trylast=True `. + + .. versionchanged:: 7.0.0 + The ``start_path`` parameter was added as a :class:`pathlib.Path` + equivalent of the ``startdir`` parameter. The ``startdir`` parameter + has been deprecated. + + Use in conftest plugins + ======================= + + This hook is only called for :ref:`initial conftests `. + """ + + +@hookspec( + warn_on_impl_args={ + "startdir": HOOK_LEGACY_PATH_ARG.format( + pylib_path_arg="startdir", pathlib_path_arg="start_path" + ), + }, +) +def pytest_report_collectionfinish( # type:ignore[empty-body] + config: "Config", + start_path: Path, + startdir: "LEGACY_PATH", + items: Sequence["Item"], +) -> Union[str, List[str]]: + """Return a string or list of strings to be displayed after collection + has finished successfully. + + These strings will be displayed after the standard "collected X items" message. + + .. versionadded:: 3.2 + + :param config: The pytest config object. + :param start_path: The starting dir. + :param startdir: The starting dir (deprecated). + :param items: List of pytest items that are going to be executed; this list should not be modified. + + .. note:: + + Lines returned by a plugin are displayed before those of plugins which + ran before it. + If you want to have your line(s) displayed first, use + :ref:`trylast=True `. + + .. versionchanged:: 7.0.0 + The ``start_path`` parameter was added as a :class:`pathlib.Path` + equivalent of the ``startdir`` parameter. The ``startdir`` parameter + has been deprecated. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +@hookspec(firstresult=True) +def pytest_report_teststatus( # type:ignore[empty-body] + report: Union["CollectReport", "TestReport"], config: "Config" +) -> "TestShortLogReport | Tuple[str, str, Union[str, Tuple[str, Mapping[str, bool]]]]": + """Return result-category, shortletter and verbose word for status + reporting. + + The result-category is a category in which to count the result, for + example "passed", "skipped", "error" or the empty string. + + The shortletter is shown as testing progresses, for example ".", "s", + "E" or the empty string. + + The verbose word is shown as testing progresses in verbose mode, for + example "PASSED", "SKIPPED", "ERROR" or the empty string. + + pytest may style these implicitly according to the report outcome. + To provide explicit styling, return a tuple for the verbose word, + for example ``"rerun", "R", ("RERUN", {"yellow": True})``. + + :param report: The report object whose status is to be returned. + :param config: The pytest config object. + :returns: The test status. + + Stops at first non-None result, see :ref:`firstresult`. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +def pytest_terminal_summary( + terminalreporter: "TerminalReporter", + exitstatus: "ExitCode", + config: "Config", +) -> None: + """Add a section to terminal summary reporting. + + :param terminalreporter: The internal terminal reporter object. + :param exitstatus: The exit status that will be reported back to the OS. + :param config: The pytest config object. + + .. versionadded:: 4.2 + The ``config`` parameter. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +@hookspec(historic=True) +def pytest_warning_recorded( + warning_message: "warnings.WarningMessage", + when: "Literal['config', 'collect', 'runtest']", + nodeid: str, + location: Optional[Tuple[str, int, str]], +) -> None: + """Process a warning captured by the internal pytest warnings plugin. + + :param warning_message: + The captured warning. This is the same object produced by :class:`warnings.catch_warnings`, + and contains the same attributes as the parameters of :py:func:`warnings.showwarning`. + + :param when: + Indicates when the warning was captured. Possible values: + + * ``"config"``: during pytest configuration/initialization stage. + * ``"collect"``: during test collection. + * ``"runtest"``: during test execution. + + :param nodeid: + Full id of the item. Empty string for warnings that are not specific to + a particular node. + + :param location: + When available, holds information about the execution context of the captured + warning (filename, linenumber, function). ``function`` evaluates to + when the execution context is at the module level. + + .. versionadded:: 6.0 + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. If the warning is specific to a + particular node, only conftest files in parent directories of the node are + consulted. + """ + + +# ------------------------------------------------------------------------- +# Hooks for influencing skipping +# ------------------------------------------------------------------------- + + +def pytest_markeval_namespace( # type:ignore[empty-body] + config: "Config", +) -> Dict[str, Any]: + """Called when constructing the globals dictionary used for + evaluating string conditions in xfail/skipif markers. + + This is useful when the condition for a marker requires + objects that are expensive or impossible to obtain during + collection time, which is required by normal boolean + conditions. + + .. versionadded:: 6.2 + + :param config: The pytest config object. + :returns: A dictionary of additional globals to add. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in parent directories of the item are consulted. + """ + + +# ------------------------------------------------------------------------- +# error handling and internal debugging hooks +# ------------------------------------------------------------------------- + + +def pytest_internalerror( + excrepr: "ExceptionRepr", + excinfo: "ExceptionInfo[BaseException]", +) -> Optional[bool]: + """Called for internal errors. + + Return True to suppress the fallback handling of printing an + INTERNALERROR message directly to sys.stderr. + + :param excrepr: The exception repr object. + :param excinfo: The exception info. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +def pytest_keyboard_interrupt( + excinfo: "ExceptionInfo[Union[KeyboardInterrupt, Exit]]", +) -> None: + """Called for keyboard interrupt. + + :param excinfo: The exception info. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +def pytest_exception_interact( + node: Union["Item", "Collector"], + call: "CallInfo[Any]", + report: Union["CollectReport", "TestReport"], +) -> None: + """Called when an exception was raised which can potentially be + interactively handled. + + May be called during collection (see :hook:`pytest_make_collect_report`), + in which case ``report`` is a :class:`~pytest.CollectReport`. + + May be called during runtest of an item (see :hook:`pytest_runtest_protocol`), + in which case ``report`` is a :class:`~pytest.TestReport`. + + This hook is not called if the exception that was raised is an internal + exception like ``skip.Exception``. + + :param node: + The item or collector. + :param call: + The call information. Contains the exception. + :param report: + The collection or test report. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given node, only conftest + files in parent directories of the node are consulted. + """ + + +def pytest_enter_pdb(config: "Config", pdb: "pdb.Pdb") -> None: + """Called upon pdb.set_trace(). + + Can be used by plugins to take special action just before the python + debugger enters interactive mode. + + :param config: The pytest config object. + :param pdb: The Pdb instance. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +def pytest_leave_pdb(config: "Config", pdb: "pdb.Pdb") -> None: + """Called when leaving pdb (e.g. with continue after pdb.set_trace()). + + Can be used by plugins to take special action just after the python + debugger leaves interactive mode. + + :param config: The pytest config object. + :param pdb: The Pdb instance. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/junitxml.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/junitxml.py new file mode 100644 index 000000000..13fc9277a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/junitxml.py @@ -0,0 +1,701 @@ +# mypy: allow-untyped-defs +"""Report test results in JUnit-XML format, for use with Jenkins and build +integration servers. + +Based on initial code from Ross Lawley. + +Output conforms to +https://github.com/jenkinsci/xunit-plugin/blob/master/src/main/resources/org/jenkinsci/plugins/xunit/types/model/xsd/junit-10.xsd +""" + +from datetime import datetime +import functools +import os +import platform +import re +from typing import Callable +from typing import Dict +from typing import List +from typing import Match +from typing import Optional +from typing import Tuple +from typing import Union +import xml.etree.ElementTree as ET + +from _pytest import nodes +from _pytest import timing +from _pytest._code.code import ExceptionRepr +from _pytest._code.code import ReprFileLocation +from _pytest.config import Config +from _pytest.config import filename_arg +from _pytest.config.argparsing import Parser +from _pytest.fixtures import FixtureRequest +from _pytest.reports import TestReport +from _pytest.stash import StashKey +from _pytest.terminal import TerminalReporter +import pytest + + +xml_key = StashKey["LogXML"]() + + +def bin_xml_escape(arg: object) -> str: + r"""Visually escape invalid XML characters. + + For example, transforms + 'hello\aworld\b' + into + 'hello#x07world#x08' + Note that the #xABs are *not* XML escapes - missing the ampersand «. + The idea is to escape visually for the user rather than for XML itself. + """ + + def repl(matchobj: Match[str]) -> str: + i = ord(matchobj.group()) + if i <= 0xFF: + return "#x%02X" % i + else: + return "#x%04X" % i + + # The spec range of valid chars is: + # Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF] + # For an unknown(?) reason, we disallow #x7F (DEL) as well. + illegal_xml_re = ( + "[^\u0009\u000a\u000d\u0020-\u007e\u0080-\ud7ff\ue000-\ufffd\u10000-\u10ffff]" + ) + return re.sub(illegal_xml_re, repl, str(arg)) + + +def merge_family(left, right) -> None: + result = {} + for kl, vl in left.items(): + for kr, vr in right.items(): + if not isinstance(vl, list): + raise TypeError(type(vl)) + result[kl] = vl + vr + left.update(result) + + +families = {} +families["_base"] = {"testcase": ["classname", "name"]} +families["_base_legacy"] = {"testcase": ["file", "line", "url"]} + +# xUnit 1.x inherits legacy attributes. +families["xunit1"] = families["_base"].copy() +merge_family(families["xunit1"], families["_base_legacy"]) + +# xUnit 2.x uses strict base attributes. +families["xunit2"] = families["_base"] + + +class _NodeReporter: + def __init__(self, nodeid: Union[str, TestReport], xml: "LogXML") -> None: + self.id = nodeid + self.xml = xml + self.add_stats = self.xml.add_stats + self.family = self.xml.family + self.duration = 0.0 + self.properties: List[Tuple[str, str]] = [] + self.nodes: List[ET.Element] = [] + self.attrs: Dict[str, str] = {} + + def append(self, node: ET.Element) -> None: + self.xml.add_stats(node.tag) + self.nodes.append(node) + + def add_property(self, name: str, value: object) -> None: + self.properties.append((str(name), bin_xml_escape(value))) + + def add_attribute(self, name: str, value: object) -> None: + self.attrs[str(name)] = bin_xml_escape(value) + + def make_properties_node(self) -> Optional[ET.Element]: + """Return a Junit node containing custom properties, if any.""" + if self.properties: + properties = ET.Element("properties") + for name, value in self.properties: + properties.append(ET.Element("property", name=name, value=value)) + return properties + return None + + def record_testreport(self, testreport: TestReport) -> None: + names = mangle_test_address(testreport.nodeid) + existing_attrs = self.attrs + classnames = names[:-1] + if self.xml.prefix: + classnames.insert(0, self.xml.prefix) + attrs: Dict[str, str] = { + "classname": ".".join(classnames), + "name": bin_xml_escape(names[-1]), + "file": testreport.location[0], + } + if testreport.location[1] is not None: + attrs["line"] = str(testreport.location[1]) + if hasattr(testreport, "url"): + attrs["url"] = testreport.url + self.attrs = attrs + self.attrs.update(existing_attrs) # Restore any user-defined attributes. + + # Preserve legacy testcase behavior. + if self.family == "xunit1": + return + + # Filter out attributes not permitted by this test family. + # Including custom attributes because they are not valid here. + temp_attrs = {} + for key in self.attrs: + if key in families[self.family]["testcase"]: + temp_attrs[key] = self.attrs[key] + self.attrs = temp_attrs + + def to_xml(self) -> ET.Element: + testcase = ET.Element("testcase", self.attrs, time="%.3f" % self.duration) + properties = self.make_properties_node() + if properties is not None: + testcase.append(properties) + testcase.extend(self.nodes) + return testcase + + def _add_simple(self, tag: str, message: str, data: Optional[str] = None) -> None: + node = ET.Element(tag, message=message) + node.text = bin_xml_escape(data) + self.append(node) + + def write_captured_output(self, report: TestReport) -> None: + if not self.xml.log_passing_tests and report.passed: + return + + content_out = report.capstdout + content_log = report.caplog + content_err = report.capstderr + if self.xml.logging == "no": + return + content_all = "" + if self.xml.logging in ["log", "all"]: + content_all = self._prepare_content(content_log, " Captured Log ") + if self.xml.logging in ["system-out", "out-err", "all"]: + content_all += self._prepare_content(content_out, " Captured Out ") + self._write_content(report, content_all, "system-out") + content_all = "" + if self.xml.logging in ["system-err", "out-err", "all"]: + content_all += self._prepare_content(content_err, " Captured Err ") + self._write_content(report, content_all, "system-err") + content_all = "" + if content_all: + self._write_content(report, content_all, "system-out") + + def _prepare_content(self, content: str, header: str) -> str: + return "\n".join([header.center(80, "-"), content, ""]) + + def _write_content(self, report: TestReport, content: str, jheader: str) -> None: + tag = ET.Element(jheader) + tag.text = bin_xml_escape(content) + self.append(tag) + + def append_pass(self, report: TestReport) -> None: + self.add_stats("passed") + + def append_failure(self, report: TestReport) -> None: + # msg = str(report.longrepr.reprtraceback.extraline) + if hasattr(report, "wasxfail"): + self._add_simple("skipped", "xfail-marked test passes unexpectedly") + else: + assert report.longrepr is not None + reprcrash: Optional[ReprFileLocation] = getattr( + report.longrepr, "reprcrash", None + ) + if reprcrash is not None: + message = reprcrash.message + else: + message = str(report.longrepr) + message = bin_xml_escape(message) + self._add_simple("failure", message, str(report.longrepr)) + + def append_collect_error(self, report: TestReport) -> None: + # msg = str(report.longrepr.reprtraceback.extraline) + assert report.longrepr is not None + self._add_simple("error", "collection failure", str(report.longrepr)) + + def append_collect_skipped(self, report: TestReport) -> None: + self._add_simple("skipped", "collection skipped", str(report.longrepr)) + + def append_error(self, report: TestReport) -> None: + assert report.longrepr is not None + reprcrash: Optional[ReprFileLocation] = getattr( + report.longrepr, "reprcrash", None + ) + if reprcrash is not None: + reason = reprcrash.message + else: + reason = str(report.longrepr) + + if report.when == "teardown": + msg = f'failed on teardown with "{reason}"' + else: + msg = f'failed on setup with "{reason}"' + self._add_simple("error", bin_xml_escape(msg), str(report.longrepr)) + + def append_skipped(self, report: TestReport) -> None: + if hasattr(report, "wasxfail"): + xfailreason = report.wasxfail + if xfailreason.startswith("reason: "): + xfailreason = xfailreason[8:] + xfailreason = bin_xml_escape(xfailreason) + skipped = ET.Element("skipped", type="pytest.xfail", message=xfailreason) + self.append(skipped) + else: + assert isinstance(report.longrepr, tuple) + filename, lineno, skipreason = report.longrepr + if skipreason.startswith("Skipped: "): + skipreason = skipreason[9:] + details = f"{filename}:{lineno}: {skipreason}" + + skipped = ET.Element( + "skipped", type="pytest.skip", message=bin_xml_escape(skipreason) + ) + skipped.text = bin_xml_escape(details) + self.append(skipped) + self.write_captured_output(report) + + def finalize(self) -> None: + data = self.to_xml() + self.__dict__.clear() + # Type ignored because mypy doesn't like overriding a method. + # Also the return value doesn't match... + self.to_xml = lambda: data # type: ignore[method-assign] + + +def _warn_incompatibility_with_xunit2( + request: FixtureRequest, fixture_name: str +) -> None: + """Emit a PytestWarning about the given fixture being incompatible with newer xunit revisions.""" + from _pytest.warning_types import PytestWarning + + xml = request.config.stash.get(xml_key, None) + if xml is not None and xml.family not in ("xunit1", "legacy"): + request.node.warn( + PytestWarning( + f"{fixture_name} is incompatible with junit_family '{xml.family}' (use 'legacy' or 'xunit1')" + ) + ) + + +@pytest.fixture +def record_property(request: FixtureRequest) -> Callable[[str, object], None]: + """Add extra properties to the calling test. + + User properties become part of the test report and are available to the + configured reporters, like JUnit XML. + + The fixture is callable with ``name, value``. The value is automatically + XML-encoded. + + Example:: + + def test_function(record_property): + record_property("example_key", 1) + """ + _warn_incompatibility_with_xunit2(request, "record_property") + + def append_property(name: str, value: object) -> None: + request.node.user_properties.append((name, value)) + + return append_property + + +@pytest.fixture +def record_xml_attribute(request: FixtureRequest) -> Callable[[str, object], None]: + """Add extra xml attributes to the tag for the calling test. + + The fixture is callable with ``name, value``. The value is + automatically XML-encoded. + """ + from _pytest.warning_types import PytestExperimentalApiWarning + + request.node.warn( + PytestExperimentalApiWarning("record_xml_attribute is an experimental feature") + ) + + _warn_incompatibility_with_xunit2(request, "record_xml_attribute") + + # Declare noop + def add_attr_noop(name: str, value: object) -> None: + pass + + attr_func = add_attr_noop + + xml = request.config.stash.get(xml_key, None) + if xml is not None: + node_reporter = xml.node_reporter(request.node.nodeid) + attr_func = node_reporter.add_attribute + + return attr_func + + +def _check_record_param_type(param: str, v: str) -> None: + """Used by record_testsuite_property to check that the given parameter name is of the proper + type.""" + __tracebackhide__ = True + if not isinstance(v, str): + msg = "{param} parameter needs to be a string, but {g} given" # type: ignore[unreachable] + raise TypeError(msg.format(param=param, g=type(v).__name__)) + + +@pytest.fixture(scope="session") +def record_testsuite_property(request: FixtureRequest) -> Callable[[str, object], None]: + """Record a new ```` tag as child of the root ````. + + This is suitable to writing global information regarding the entire test + suite, and is compatible with ``xunit2`` JUnit family. + + This is a ``session``-scoped fixture which is called with ``(name, value)``. Example: + + .. code-block:: python + + def test_foo(record_testsuite_property): + record_testsuite_property("ARCH", "PPC") + record_testsuite_property("STORAGE_TYPE", "CEPH") + + :param name: + The property name. + :param value: + The property value. Will be converted to a string. + + .. warning:: + + Currently this fixture **does not work** with the + `pytest-xdist `__ plugin. See + :issue:`7767` for details. + """ + __tracebackhide__ = True + + def record_func(name: str, value: object) -> None: + """No-op function in case --junit-xml was not passed in the command-line.""" + __tracebackhide__ = True + _check_record_param_type("name", name) + + xml = request.config.stash.get(xml_key, None) + if xml is not None: + record_func = xml.add_global_property + return record_func + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("terminal reporting") + group.addoption( + "--junitxml", + "--junit-xml", + action="store", + dest="xmlpath", + metavar="path", + type=functools.partial(filename_arg, optname="--junitxml"), + default=None, + help="Create junit-xml style report file at given path", + ) + group.addoption( + "--junitprefix", + "--junit-prefix", + action="store", + metavar="str", + default=None, + help="Prepend prefix to classnames in junit-xml output", + ) + parser.addini( + "junit_suite_name", "Test suite name for JUnit report", default="pytest" + ) + parser.addini( + "junit_logging", + "Write captured log messages to JUnit report: " + "one of no|log|system-out|system-err|out-err|all", + default="no", + ) + parser.addini( + "junit_log_passing_tests", + "Capture log information for passing tests to JUnit report: ", + type="bool", + default=True, + ) + parser.addini( + "junit_duration_report", + "Duration time to report: one of total|call", + default="total", + ) # choices=['total', 'call']) + parser.addini( + "junit_family", + "Emit XML for schema: one of legacy|xunit1|xunit2", + default="xunit2", + ) + + +def pytest_configure(config: Config) -> None: + xmlpath = config.option.xmlpath + # Prevent opening xmllog on worker nodes (xdist). + if xmlpath and not hasattr(config, "workerinput"): + junit_family = config.getini("junit_family") + config.stash[xml_key] = LogXML( + xmlpath, + config.option.junitprefix, + config.getini("junit_suite_name"), + config.getini("junit_logging"), + config.getini("junit_duration_report"), + junit_family, + config.getini("junit_log_passing_tests"), + ) + config.pluginmanager.register(config.stash[xml_key]) + + +def pytest_unconfigure(config: Config) -> None: + xml = config.stash.get(xml_key, None) + if xml: + del config.stash[xml_key] + config.pluginmanager.unregister(xml) + + +def mangle_test_address(address: str) -> List[str]: + path, possible_open_bracket, params = address.partition("[") + names = path.split("::") + # Convert file path to dotted path. + names[0] = names[0].replace(nodes.SEP, ".") + names[0] = re.sub(r"\.py$", "", names[0]) + # Put any params back. + names[-1] += possible_open_bracket + params + return names + + +class LogXML: + def __init__( + self, + logfile, + prefix: Optional[str], + suite_name: str = "pytest", + logging: str = "no", + report_duration: str = "total", + family="xunit1", + log_passing_tests: bool = True, + ) -> None: + logfile = os.path.expanduser(os.path.expandvars(logfile)) + self.logfile = os.path.normpath(os.path.abspath(logfile)) + self.prefix = prefix + self.suite_name = suite_name + self.logging = logging + self.log_passing_tests = log_passing_tests + self.report_duration = report_duration + self.family = family + self.stats: Dict[str, int] = dict.fromkeys( + ["error", "passed", "failure", "skipped"], 0 + ) + self.node_reporters: Dict[ + Tuple[Union[str, TestReport], object], _NodeReporter + ] = {} + self.node_reporters_ordered: List[_NodeReporter] = [] + self.global_properties: List[Tuple[str, str]] = [] + + # List of reports that failed on call but teardown is pending. + self.open_reports: List[TestReport] = [] + self.cnt_double_fail_tests = 0 + + # Replaces convenience family with real family. + if self.family == "legacy": + self.family = "xunit1" + + def finalize(self, report: TestReport) -> None: + nodeid = getattr(report, "nodeid", report) + # Local hack to handle xdist report order. + workernode = getattr(report, "node", None) + reporter = self.node_reporters.pop((nodeid, workernode)) + + for propname, propvalue in report.user_properties: + reporter.add_property(propname, str(propvalue)) + + if reporter is not None: + reporter.finalize() + + def node_reporter(self, report: Union[TestReport, str]) -> _NodeReporter: + nodeid: Union[str, TestReport] = getattr(report, "nodeid", report) + # Local hack to handle xdist report order. + workernode = getattr(report, "node", None) + + key = nodeid, workernode + + if key in self.node_reporters: + # TODO: breaks for --dist=each + return self.node_reporters[key] + + reporter = _NodeReporter(nodeid, self) + + self.node_reporters[key] = reporter + self.node_reporters_ordered.append(reporter) + + return reporter + + def add_stats(self, key: str) -> None: + if key in self.stats: + self.stats[key] += 1 + + def _opentestcase(self, report: TestReport) -> _NodeReporter: + reporter = self.node_reporter(report) + reporter.record_testreport(report) + return reporter + + def pytest_runtest_logreport(self, report: TestReport) -> None: + """Handle a setup/call/teardown report, generating the appropriate + XML tags as necessary. + + Note: due to plugins like xdist, this hook may be called in interlaced + order with reports from other nodes. For example: + + Usual call order: + -> setup node1 + -> call node1 + -> teardown node1 + -> setup node2 + -> call node2 + -> teardown node2 + + Possible call order in xdist: + -> setup node1 + -> call node1 + -> setup node2 + -> call node2 + -> teardown node2 + -> teardown node1 + """ + close_report = None + if report.passed: + if report.when == "call": # ignore setup/teardown + reporter = self._opentestcase(report) + reporter.append_pass(report) + elif report.failed: + if report.when == "teardown": + # The following vars are needed when xdist plugin is used. + report_wid = getattr(report, "worker_id", None) + report_ii = getattr(report, "item_index", None) + close_report = next( + ( + rep + for rep in self.open_reports + if ( + rep.nodeid == report.nodeid + and getattr(rep, "item_index", None) == report_ii + and getattr(rep, "worker_id", None) == report_wid + ) + ), + None, + ) + if close_report: + # We need to open new testcase in case we have failure in + # call and error in teardown in order to follow junit + # schema. + self.finalize(close_report) + self.cnt_double_fail_tests += 1 + reporter = self._opentestcase(report) + if report.when == "call": + reporter.append_failure(report) + self.open_reports.append(report) + if not self.log_passing_tests: + reporter.write_captured_output(report) + else: + reporter.append_error(report) + elif report.skipped: + reporter = self._opentestcase(report) + reporter.append_skipped(report) + self.update_testcase_duration(report) + if report.when == "teardown": + reporter = self._opentestcase(report) + reporter.write_captured_output(report) + + self.finalize(report) + report_wid = getattr(report, "worker_id", None) + report_ii = getattr(report, "item_index", None) + close_report = next( + ( + rep + for rep in self.open_reports + if ( + rep.nodeid == report.nodeid + and getattr(rep, "item_index", None) == report_ii + and getattr(rep, "worker_id", None) == report_wid + ) + ), + None, + ) + if close_report: + self.open_reports.remove(close_report) + + def update_testcase_duration(self, report: TestReport) -> None: + """Accumulate total duration for nodeid from given report and update + the Junit.testcase with the new total if already created.""" + if self.report_duration in {"total", report.when}: + reporter = self.node_reporter(report) + reporter.duration += getattr(report, "duration", 0.0) + + def pytest_collectreport(self, report: TestReport) -> None: + if not report.passed: + reporter = self._opentestcase(report) + if report.failed: + reporter.append_collect_error(report) + else: + reporter.append_collect_skipped(report) + + def pytest_internalerror(self, excrepr: ExceptionRepr) -> None: + reporter = self.node_reporter("internal") + reporter.attrs.update(classname="pytest", name="internal") + reporter._add_simple("error", "internal error", str(excrepr)) + + def pytest_sessionstart(self) -> None: + self.suite_start_time = timing.time() + + def pytest_sessionfinish(self) -> None: + dirname = os.path.dirname(os.path.abspath(self.logfile)) + # exist_ok avoids filesystem race conditions between checking path existence and requesting creation + os.makedirs(dirname, exist_ok=True) + + with open(self.logfile, "w", encoding="utf-8") as logfile: + suite_stop_time = timing.time() + suite_time_delta = suite_stop_time - self.suite_start_time + + numtests = ( + self.stats["passed"] + + self.stats["failure"] + + self.stats["skipped"] + + self.stats["error"] + - self.cnt_double_fail_tests + ) + logfile.write('') + + suite_node = ET.Element( + "testsuite", + name=self.suite_name, + errors=str(self.stats["error"]), + failures=str(self.stats["failure"]), + skipped=str(self.stats["skipped"]), + tests=str(numtests), + time="%.3f" % suite_time_delta, + timestamp=datetime.fromtimestamp(self.suite_start_time).isoformat(), + hostname=platform.node(), + ) + global_properties = self._get_global_properties_node() + if global_properties is not None: + suite_node.append(global_properties) + for node_reporter in self.node_reporters_ordered: + suite_node.append(node_reporter.to_xml()) + testsuites = ET.Element("testsuites") + testsuites.append(suite_node) + logfile.write(ET.tostring(testsuites, encoding="unicode")) + + def pytest_terminal_summary(self, terminalreporter: TerminalReporter) -> None: + terminalreporter.write_sep("-", f"generated xml file: {self.logfile}") + + def add_global_property(self, name: str, value: object) -> None: + __tracebackhide__ = True + _check_record_param_type("name", name) + self.global_properties.append((name, bin_xml_escape(value))) + + def _get_global_properties_node(self) -> Optional[ET.Element]: + """Return a Junit node containing custom properties, if any.""" + if self.global_properties: + properties = ET.Element("properties") + for name, value in self.global_properties: + properties.append(ET.Element("property", name=name, value=value)) + return properties + return None diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/legacypath.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/legacypath.py new file mode 100644 index 000000000..b28c89767 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/legacypath.py @@ -0,0 +1,480 @@ +# mypy: allow-untyped-defs +"""Add backward compatibility support for the legacy py path type.""" + +import dataclasses +from pathlib import Path +import shlex +import subprocess +from typing import Final +from typing import final +from typing import List +from typing import Optional +from typing import TYPE_CHECKING +from typing import Union + +from iniconfig import SectionWrapper + +from _pytest.cacheprovider import Cache +from _pytest.compat import LEGACY_PATH +from _pytest.compat import legacy_path +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config import PytestPluginManager +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.main import Session +from _pytest.monkeypatch import MonkeyPatch +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.nodes import Node +from _pytest.pytester import HookRecorder +from _pytest.pytester import Pytester +from _pytest.pytester import RunResult +from _pytest.terminal import TerminalReporter +from _pytest.tmpdir import TempPathFactory + + +if TYPE_CHECKING: + import pexpect + + +@final +class Testdir: + """ + Similar to :class:`Pytester`, but this class works with legacy legacy_path objects instead. + + All methods just forward to an internal :class:`Pytester` instance, converting results + to `legacy_path` objects as necessary. + """ + + __test__ = False + + CLOSE_STDIN: "Final" = Pytester.CLOSE_STDIN + TimeoutExpired: "Final" = Pytester.TimeoutExpired + + def __init__(self, pytester: Pytester, *, _ispytest: bool = False) -> None: + check_ispytest(_ispytest) + self._pytester = pytester + + @property + def tmpdir(self) -> LEGACY_PATH: + """Temporary directory where tests are executed.""" + return legacy_path(self._pytester.path) + + @property + def test_tmproot(self) -> LEGACY_PATH: + return legacy_path(self._pytester._test_tmproot) + + @property + def request(self): + return self._pytester._request + + @property + def plugins(self): + return self._pytester.plugins + + @plugins.setter + def plugins(self, plugins): + self._pytester.plugins = plugins + + @property + def monkeypatch(self) -> MonkeyPatch: + return self._pytester._monkeypatch + + def make_hook_recorder(self, pluginmanager) -> HookRecorder: + """See :meth:`Pytester.make_hook_recorder`.""" + return self._pytester.make_hook_recorder(pluginmanager) + + def chdir(self) -> None: + """See :meth:`Pytester.chdir`.""" + return self._pytester.chdir() + + def finalize(self) -> None: + return self._pytester._finalize() + + def makefile(self, ext, *args, **kwargs) -> LEGACY_PATH: + """See :meth:`Pytester.makefile`.""" + if ext and not ext.startswith("."): + # pytester.makefile is going to throw a ValueError in a way that + # testdir.makefile did not, because + # pathlib.Path is stricter suffixes than py.path + # This ext arguments is likely user error, but since testdir has + # allowed this, we will prepend "." as a workaround to avoid breaking + # testdir usage that worked before + ext = "." + ext + return legacy_path(self._pytester.makefile(ext, *args, **kwargs)) + + def makeconftest(self, source) -> LEGACY_PATH: + """See :meth:`Pytester.makeconftest`.""" + return legacy_path(self._pytester.makeconftest(source)) + + def makeini(self, source) -> LEGACY_PATH: + """See :meth:`Pytester.makeini`.""" + return legacy_path(self._pytester.makeini(source)) + + def getinicfg(self, source: str) -> SectionWrapper: + """See :meth:`Pytester.getinicfg`.""" + return self._pytester.getinicfg(source) + + def makepyprojecttoml(self, source) -> LEGACY_PATH: + """See :meth:`Pytester.makepyprojecttoml`.""" + return legacy_path(self._pytester.makepyprojecttoml(source)) + + def makepyfile(self, *args, **kwargs) -> LEGACY_PATH: + """See :meth:`Pytester.makepyfile`.""" + return legacy_path(self._pytester.makepyfile(*args, **kwargs)) + + def maketxtfile(self, *args, **kwargs) -> LEGACY_PATH: + """See :meth:`Pytester.maketxtfile`.""" + return legacy_path(self._pytester.maketxtfile(*args, **kwargs)) + + def syspathinsert(self, path=None) -> None: + """See :meth:`Pytester.syspathinsert`.""" + return self._pytester.syspathinsert(path) + + def mkdir(self, name) -> LEGACY_PATH: + """See :meth:`Pytester.mkdir`.""" + return legacy_path(self._pytester.mkdir(name)) + + def mkpydir(self, name) -> LEGACY_PATH: + """See :meth:`Pytester.mkpydir`.""" + return legacy_path(self._pytester.mkpydir(name)) + + def copy_example(self, name=None) -> LEGACY_PATH: + """See :meth:`Pytester.copy_example`.""" + return legacy_path(self._pytester.copy_example(name)) + + def getnode(self, config: Config, arg) -> Optional[Union[Item, Collector]]: + """See :meth:`Pytester.getnode`.""" + return self._pytester.getnode(config, arg) + + def getpathnode(self, path): + """See :meth:`Pytester.getpathnode`.""" + return self._pytester.getpathnode(path) + + def genitems(self, colitems: List[Union[Item, Collector]]) -> List[Item]: + """See :meth:`Pytester.genitems`.""" + return self._pytester.genitems(colitems) + + def runitem(self, source): + """See :meth:`Pytester.runitem`.""" + return self._pytester.runitem(source) + + def inline_runsource(self, source, *cmdlineargs): + """See :meth:`Pytester.inline_runsource`.""" + return self._pytester.inline_runsource(source, *cmdlineargs) + + def inline_genitems(self, *args): + """See :meth:`Pytester.inline_genitems`.""" + return self._pytester.inline_genitems(*args) + + def inline_run(self, *args, plugins=(), no_reraise_ctrlc: bool = False): + """See :meth:`Pytester.inline_run`.""" + return self._pytester.inline_run( + *args, plugins=plugins, no_reraise_ctrlc=no_reraise_ctrlc + ) + + def runpytest_inprocess(self, *args, **kwargs) -> RunResult: + """See :meth:`Pytester.runpytest_inprocess`.""" + return self._pytester.runpytest_inprocess(*args, **kwargs) + + def runpytest(self, *args, **kwargs) -> RunResult: + """See :meth:`Pytester.runpytest`.""" + return self._pytester.runpytest(*args, **kwargs) + + def parseconfig(self, *args) -> Config: + """See :meth:`Pytester.parseconfig`.""" + return self._pytester.parseconfig(*args) + + def parseconfigure(self, *args) -> Config: + """See :meth:`Pytester.parseconfigure`.""" + return self._pytester.parseconfigure(*args) + + def getitem(self, source, funcname="test_func"): + """See :meth:`Pytester.getitem`.""" + return self._pytester.getitem(source, funcname) + + def getitems(self, source): + """See :meth:`Pytester.getitems`.""" + return self._pytester.getitems(source) + + def getmodulecol(self, source, configargs=(), withinit=False): + """See :meth:`Pytester.getmodulecol`.""" + return self._pytester.getmodulecol( + source, configargs=configargs, withinit=withinit + ) + + def collect_by_name( + self, modcol: Collector, name: str + ) -> Optional[Union[Item, Collector]]: + """See :meth:`Pytester.collect_by_name`.""" + return self._pytester.collect_by_name(modcol, name) + + def popen( + self, + cmdargs, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + stdin=CLOSE_STDIN, + **kw, + ): + """See :meth:`Pytester.popen`.""" + return self._pytester.popen(cmdargs, stdout, stderr, stdin, **kw) + + def run(self, *cmdargs, timeout=None, stdin=CLOSE_STDIN) -> RunResult: + """See :meth:`Pytester.run`.""" + return self._pytester.run(*cmdargs, timeout=timeout, stdin=stdin) + + def runpython(self, script) -> RunResult: + """See :meth:`Pytester.runpython`.""" + return self._pytester.runpython(script) + + def runpython_c(self, command): + """See :meth:`Pytester.runpython_c`.""" + return self._pytester.runpython_c(command) + + def runpytest_subprocess(self, *args, timeout=None) -> RunResult: + """See :meth:`Pytester.runpytest_subprocess`.""" + return self._pytester.runpytest_subprocess(*args, timeout=timeout) + + def spawn_pytest( + self, string: str, expect_timeout: float = 10.0 + ) -> "pexpect.spawn": + """See :meth:`Pytester.spawn_pytest`.""" + return self._pytester.spawn_pytest(string, expect_timeout=expect_timeout) + + def spawn(self, cmd: str, expect_timeout: float = 10.0) -> "pexpect.spawn": + """See :meth:`Pytester.spawn`.""" + return self._pytester.spawn(cmd, expect_timeout=expect_timeout) + + def __repr__(self) -> str: + return f"" + + def __str__(self) -> str: + return str(self.tmpdir) + + +class LegacyTestdirPlugin: + @staticmethod + @fixture + def testdir(pytester: Pytester) -> Testdir: + """ + Identical to :fixture:`pytester`, and provides an instance whose methods return + legacy ``LEGACY_PATH`` objects instead when applicable. + + New code should avoid using :fixture:`testdir` in favor of :fixture:`pytester`. + """ + return Testdir(pytester, _ispytest=True) + + +@final +@dataclasses.dataclass +class TempdirFactory: + """Backward compatibility wrapper that implements ``py.path.local`` + for :class:`TempPathFactory`. + + .. note:: + These days, it is preferred to use ``tmp_path_factory``. + + :ref:`About the tmpdir and tmpdir_factory fixtures`. + + """ + + _tmppath_factory: TempPathFactory + + def __init__( + self, tmppath_factory: TempPathFactory, *, _ispytest: bool = False + ) -> None: + check_ispytest(_ispytest) + self._tmppath_factory = tmppath_factory + + def mktemp(self, basename: str, numbered: bool = True) -> LEGACY_PATH: + """Same as :meth:`TempPathFactory.mktemp`, but returns a ``py.path.local`` object.""" + return legacy_path(self._tmppath_factory.mktemp(basename, numbered).resolve()) + + def getbasetemp(self) -> LEGACY_PATH: + """Same as :meth:`TempPathFactory.getbasetemp`, but returns a ``py.path.local`` object.""" + return legacy_path(self._tmppath_factory.getbasetemp().resolve()) + + +class LegacyTmpdirPlugin: + @staticmethod + @fixture(scope="session") + def tmpdir_factory(request: FixtureRequest) -> TempdirFactory: + """Return a :class:`pytest.TempdirFactory` instance for the test session.""" + # Set dynamically by pytest_configure(). + return request.config._tmpdirhandler # type: ignore + + @staticmethod + @fixture + def tmpdir(tmp_path: Path) -> LEGACY_PATH: + """Return a temporary directory path object which is unique to each test + function invocation, created as a sub directory of the base temporary + directory. + + By default, a new base temporary directory is created each test session, + and old bases are removed after 3 sessions, to aid in debugging. If + ``--basetemp`` is used then it is cleared each session. See + :ref:`temporary directory location and retention`. + + The returned object is a `legacy_path`_ object. + + .. note:: + These days, it is preferred to use ``tmp_path``. + + :ref:`About the tmpdir and tmpdir_factory fixtures`. + + .. _legacy_path: https://py.readthedocs.io/en/latest/path.html + """ + return legacy_path(tmp_path) + + +def Cache_makedir(self: Cache, name: str) -> LEGACY_PATH: + """Return a directory path object with the given name. + + Same as :func:`mkdir`, but returns a legacy py path instance. + """ + return legacy_path(self.mkdir(name)) + + +def FixtureRequest_fspath(self: FixtureRequest) -> LEGACY_PATH: + """(deprecated) The file system path of the test module which collected this test.""" + return legacy_path(self.path) + + +def TerminalReporter_startdir(self: TerminalReporter) -> LEGACY_PATH: + """The directory from which pytest was invoked. + + Prefer to use ``startpath`` which is a :class:`pathlib.Path`. + + :type: LEGACY_PATH + """ + return legacy_path(self.startpath) + + +def Config_invocation_dir(self: Config) -> LEGACY_PATH: + """The directory from which pytest was invoked. + + Prefer to use :attr:`invocation_params.dir `, + which is a :class:`pathlib.Path`. + + :type: LEGACY_PATH + """ + return legacy_path(str(self.invocation_params.dir)) + + +def Config_rootdir(self: Config) -> LEGACY_PATH: + """The path to the :ref:`rootdir `. + + Prefer to use :attr:`rootpath`, which is a :class:`pathlib.Path`. + + :type: LEGACY_PATH + """ + return legacy_path(str(self.rootpath)) + + +def Config_inifile(self: Config) -> Optional[LEGACY_PATH]: + """The path to the :ref:`configfile `. + + Prefer to use :attr:`inipath`, which is a :class:`pathlib.Path`. + + :type: Optional[LEGACY_PATH] + """ + return legacy_path(str(self.inipath)) if self.inipath else None + + +def Session_stardir(self: Session) -> LEGACY_PATH: + """The path from which pytest was invoked. + + Prefer to use ``startpath`` which is a :class:`pathlib.Path`. + + :type: LEGACY_PATH + """ + return legacy_path(self.startpath) + + +def Config__getini_unknown_type( + self, name: str, type: str, value: Union[str, List[str]] +): + if type == "pathlist": + # TODO: This assert is probably not valid in all cases. + assert self.inipath is not None + dp = self.inipath.parent + input_values = shlex.split(value) if isinstance(value, str) else value + return [legacy_path(str(dp / x)) for x in input_values] + else: + raise ValueError(f"unknown configuration type: {type}", value) + + +def Node_fspath(self: Node) -> LEGACY_PATH: + """(deprecated) returns a legacy_path copy of self.path""" + return legacy_path(self.path) + + +def Node_fspath_set(self: Node, value: LEGACY_PATH) -> None: + self.path = Path(value) + + +@hookimpl(tryfirst=True) +def pytest_load_initial_conftests(early_config: Config) -> None: + """Monkeypatch legacy path attributes in several classes, as early as possible.""" + mp = MonkeyPatch() + early_config.add_cleanup(mp.undo) + + # Add Cache.makedir(). + mp.setattr(Cache, "makedir", Cache_makedir, raising=False) + + # Add FixtureRequest.fspath property. + mp.setattr(FixtureRequest, "fspath", property(FixtureRequest_fspath), raising=False) + + # Add TerminalReporter.startdir property. + mp.setattr( + TerminalReporter, "startdir", property(TerminalReporter_startdir), raising=False + ) + + # Add Config.{invocation_dir,rootdir,inifile} properties. + mp.setattr(Config, "invocation_dir", property(Config_invocation_dir), raising=False) + mp.setattr(Config, "rootdir", property(Config_rootdir), raising=False) + mp.setattr(Config, "inifile", property(Config_inifile), raising=False) + + # Add Session.startdir property. + mp.setattr(Session, "startdir", property(Session_stardir), raising=False) + + # Add pathlist configuration type. + mp.setattr(Config, "_getini_unknown_type", Config__getini_unknown_type) + + # Add Node.fspath property. + mp.setattr(Node, "fspath", property(Node_fspath, Node_fspath_set), raising=False) + + +@hookimpl +def pytest_configure(config: Config) -> None: + """Installs the LegacyTmpdirPlugin if the ``tmpdir`` plugin is also installed.""" + if config.pluginmanager.has_plugin("tmpdir"): + mp = MonkeyPatch() + config.add_cleanup(mp.undo) + # Create TmpdirFactory and attach it to the config object. + # + # This is to comply with existing plugins which expect the handler to be + # available at pytest_configure time, but ideally should be moved entirely + # to the tmpdir_factory session fixture. + try: + tmp_path_factory = config._tmp_path_factory # type: ignore[attr-defined] + except AttributeError: + # tmpdir plugin is blocked. + pass + else: + _tmpdirhandler = TempdirFactory(tmp_path_factory, _ispytest=True) + mp.setattr(config, "_tmpdirhandler", _tmpdirhandler, raising=False) + + config.pluginmanager.register(LegacyTmpdirPlugin, "legacypath-tmpdir") + + +@hookimpl +def pytest_plugin_registered(plugin: object, manager: PytestPluginManager) -> None: + # pytester is not loaded by default and is commonly loaded from a conftest, + # so checking for it in `pytest_configure` is not enough. + is_pytester = plugin is manager.get_plugin("pytester") + if is_pytester and not manager.is_registered(LegacyTestdirPlugin): + manager.register(LegacyTestdirPlugin, "legacypath-pytester") diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/logging.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/logging.py new file mode 100644 index 000000000..af5e443ce --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/logging.py @@ -0,0 +1,959 @@ +# mypy: allow-untyped-defs +"""Access and control log capturing.""" + +from contextlib import contextmanager +from contextlib import nullcontext +from datetime import datetime +from datetime import timedelta +from datetime import timezone +import io +from io import StringIO +import logging +from logging import LogRecord +import os +from pathlib import Path +import re +from types import TracebackType +from typing import AbstractSet +from typing import Dict +from typing import final +from typing import Generator +from typing import Generic +from typing import List +from typing import Literal +from typing import Mapping +from typing import Optional +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from _pytest import nodes +from _pytest._io import TerminalWriter +from _pytest.capture import CaptureManager +from _pytest.config import _strtobool +from _pytest.config import Config +from _pytest.config import create_terminal_writer +from _pytest.config import hookimpl +from _pytest.config import UsageError +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.main import Session +from _pytest.stash import StashKey +from _pytest.terminal import TerminalReporter + + +if TYPE_CHECKING: + logging_StreamHandler = logging.StreamHandler[StringIO] +else: + logging_StreamHandler = logging.StreamHandler + +DEFAULT_LOG_FORMAT = "%(levelname)-8s %(name)s:%(filename)s:%(lineno)d %(message)s" +DEFAULT_LOG_DATE_FORMAT = "%H:%M:%S" +_ANSI_ESCAPE_SEQ = re.compile(r"\x1b\[[\d;]+m") +caplog_handler_key = StashKey["LogCaptureHandler"]() +caplog_records_key = StashKey[Dict[str, List[logging.LogRecord]]]() + + +def _remove_ansi_escape_sequences(text: str) -> str: + return _ANSI_ESCAPE_SEQ.sub("", text) + + +class DatetimeFormatter(logging.Formatter): + """A logging formatter which formats record with + :func:`datetime.datetime.strftime` formatter instead of + :func:`time.strftime` in case of microseconds in format string. + """ + + def formatTime(self, record: LogRecord, datefmt: Optional[str] = None) -> str: + if datefmt and "%f" in datefmt: + ct = self.converter(record.created) + tz = timezone(timedelta(seconds=ct.tm_gmtoff), ct.tm_zone) + # Construct `datetime.datetime` object from `struct_time` + # and msecs information from `record` + # Using int() instead of round() to avoid it exceeding 1_000_000 and causing a ValueError (#11861). + dt = datetime(*ct[0:6], microsecond=int(record.msecs * 1000), tzinfo=tz) + return dt.strftime(datefmt) + # Use `logging.Formatter` for non-microsecond formats + return super().formatTime(record, datefmt) + + +class ColoredLevelFormatter(DatetimeFormatter): + """A logging formatter which colorizes the %(levelname)..s part of the + log format passed to __init__.""" + + LOGLEVEL_COLOROPTS: Mapping[int, AbstractSet[str]] = { + logging.CRITICAL: {"red"}, + logging.ERROR: {"red", "bold"}, + logging.WARNING: {"yellow"}, + logging.WARN: {"yellow"}, + logging.INFO: {"green"}, + logging.DEBUG: {"purple"}, + logging.NOTSET: set(), + } + LEVELNAME_FMT_REGEX = re.compile(r"%\(levelname\)([+-.]?\d*(?:\.\d+)?s)") + + def __init__(self, terminalwriter: TerminalWriter, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + self._terminalwriter = terminalwriter + self._original_fmt = self._style._fmt + self._level_to_fmt_mapping: Dict[int, str] = {} + + for level, color_opts in self.LOGLEVEL_COLOROPTS.items(): + self.add_color_level(level, *color_opts) + + def add_color_level(self, level: int, *color_opts: str) -> None: + """Add or update color opts for a log level. + + :param level: + Log level to apply a style to, e.g. ``logging.INFO``. + :param color_opts: + ANSI escape sequence color options. Capitalized colors indicates + background color, i.e. ``'green', 'Yellow', 'bold'`` will give bold + green text on yellow background. + + .. warning:: + This is an experimental API. + """ + assert self._fmt is not None + levelname_fmt_match = self.LEVELNAME_FMT_REGEX.search(self._fmt) + if not levelname_fmt_match: + return + levelname_fmt = levelname_fmt_match.group() + + formatted_levelname = levelname_fmt % {"levelname": logging.getLevelName(level)} + + # add ANSI escape sequences around the formatted levelname + color_kwargs = {name: True for name in color_opts} + colorized_formatted_levelname = self._terminalwriter.markup( + formatted_levelname, **color_kwargs + ) + self._level_to_fmt_mapping[level] = self.LEVELNAME_FMT_REGEX.sub( + colorized_formatted_levelname, self._fmt + ) + + def format(self, record: logging.LogRecord) -> str: + fmt = self._level_to_fmt_mapping.get(record.levelno, self._original_fmt) + self._style._fmt = fmt + return super().format(record) + + +class PercentStyleMultiline(logging.PercentStyle): + """A logging style with special support for multiline messages. + + If the message of a record consists of multiple lines, this style + formats the message as if each line were logged separately. + """ + + def __init__(self, fmt: str, auto_indent: Union[int, str, bool, None]) -> None: + super().__init__(fmt) + self._auto_indent = self._get_auto_indent(auto_indent) + + @staticmethod + def _get_auto_indent(auto_indent_option: Union[int, str, bool, None]) -> int: + """Determine the current auto indentation setting. + + Specify auto indent behavior (on/off/fixed) by passing in + extra={"auto_indent": [value]} to the call to logging.log() or + using a --log-auto-indent [value] command line or the + log_auto_indent [value] config option. + + Default behavior is auto-indent off. + + Using the string "True" or "on" or the boolean True as the value + turns auto indent on, using the string "False" or "off" or the + boolean False or the int 0 turns it off, and specifying a + positive integer fixes the indentation position to the value + specified. + + Any other values for the option are invalid, and will silently be + converted to the default. + + :param None|bool|int|str auto_indent_option: + User specified option for indentation from command line, config + or extra kwarg. Accepts int, bool or str. str option accepts the + same range of values as boolean config options, as well as + positive integers represented in str form. + + :returns: + Indentation value, which can be + -1 (automatically determine indentation) or + 0 (auto-indent turned off) or + >0 (explicitly set indentation position). + """ + if auto_indent_option is None: + return 0 + elif isinstance(auto_indent_option, bool): + if auto_indent_option: + return -1 + else: + return 0 + elif isinstance(auto_indent_option, int): + return int(auto_indent_option) + elif isinstance(auto_indent_option, str): + try: + return int(auto_indent_option) + except ValueError: + pass + try: + if _strtobool(auto_indent_option): + return -1 + except ValueError: + return 0 + + return 0 + + def format(self, record: logging.LogRecord) -> str: + if "\n" in record.message: + if hasattr(record, "auto_indent"): + # Passed in from the "extra={}" kwarg on the call to logging.log(). + auto_indent = self._get_auto_indent(record.auto_indent) + else: + auto_indent = self._auto_indent + + if auto_indent: + lines = record.message.splitlines() + formatted = self._fmt % {**record.__dict__, "message": lines[0]} + + if auto_indent < 0: + indentation = _remove_ansi_escape_sequences(formatted).find( + lines[0] + ) + else: + # Optimizes logging by allowing a fixed indentation. + indentation = auto_indent + lines[0] = formatted + return ("\n" + " " * indentation).join(lines) + return self._fmt % record.__dict__ + + +def get_option_ini(config: Config, *names: str): + for name in names: + ret = config.getoption(name) # 'default' arg won't work as expected + if ret is None: + ret = config.getini(name) + if ret: + return ret + + +def pytest_addoption(parser: Parser) -> None: + """Add options to control log capturing.""" + group = parser.getgroup("logging") + + def add_option_ini(option, dest, default=None, type=None, **kwargs): + parser.addini( + dest, default=default, type=type, help="Default value for " + option + ) + group.addoption(option, dest=dest, **kwargs) + + add_option_ini( + "--log-level", + dest="log_level", + default=None, + metavar="LEVEL", + help=( + "Level of messages to catch/display." + " Not set by default, so it depends on the root/parent log handler's" + ' effective level, where it is "WARNING" by default.' + ), + ) + add_option_ini( + "--log-format", + dest="log_format", + default=DEFAULT_LOG_FORMAT, + help="Log format used by the logging module", + ) + add_option_ini( + "--log-date-format", + dest="log_date_format", + default=DEFAULT_LOG_DATE_FORMAT, + help="Log date format used by the logging module", + ) + parser.addini( + "log_cli", + default=False, + type="bool", + help='Enable log display during test run (also known as "live logging")', + ) + add_option_ini( + "--log-cli-level", dest="log_cli_level", default=None, help="CLI logging level" + ) + add_option_ini( + "--log-cli-format", + dest="log_cli_format", + default=None, + help="Log format used by the logging module", + ) + add_option_ini( + "--log-cli-date-format", + dest="log_cli_date_format", + default=None, + help="Log date format used by the logging module", + ) + add_option_ini( + "--log-file", + dest="log_file", + default=None, + help="Path to a file when logging will be written to", + ) + add_option_ini( + "--log-file-mode", + dest="log_file_mode", + default="w", + choices=["w", "a"], + help="Log file open mode", + ) + add_option_ini( + "--log-file-level", + dest="log_file_level", + default=None, + help="Log file logging level", + ) + add_option_ini( + "--log-file-format", + dest="log_file_format", + default=None, + help="Log format used by the logging module", + ) + add_option_ini( + "--log-file-date-format", + dest="log_file_date_format", + default=None, + help="Log date format used by the logging module", + ) + add_option_ini( + "--log-auto-indent", + dest="log_auto_indent", + default=None, + help="Auto-indent multiline messages passed to the logging module. Accepts true|on, false|off or an integer.", + ) + group.addoption( + "--log-disable", + action="append", + default=[], + dest="logger_disable", + help="Disable a logger by name. Can be passed multiple times.", + ) + + +_HandlerType = TypeVar("_HandlerType", bound=logging.Handler) + + +# Not using @contextmanager for performance reasons. +class catching_logs(Generic[_HandlerType]): + """Context manager that prepares the whole logging machinery properly.""" + + __slots__ = ("handler", "level", "orig_level") + + def __init__(self, handler: _HandlerType, level: Optional[int] = None) -> None: + self.handler = handler + self.level = level + + def __enter__(self) -> _HandlerType: + root_logger = logging.getLogger() + if self.level is not None: + self.handler.setLevel(self.level) + root_logger.addHandler(self.handler) + if self.level is not None: + self.orig_level = root_logger.level + root_logger.setLevel(min(self.orig_level, self.level)) + return self.handler + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + root_logger = logging.getLogger() + if self.level is not None: + root_logger.setLevel(self.orig_level) + root_logger.removeHandler(self.handler) + + +class LogCaptureHandler(logging_StreamHandler): + """A logging handler that stores log records and the log text.""" + + def __init__(self) -> None: + """Create a new log handler.""" + super().__init__(StringIO()) + self.records: List[logging.LogRecord] = [] + + def emit(self, record: logging.LogRecord) -> None: + """Keep the log records in a list in addition to the log text.""" + self.records.append(record) + super().emit(record) + + def reset(self) -> None: + self.records = [] + self.stream = StringIO() + + def clear(self) -> None: + self.records.clear() + self.stream = StringIO() + + def handleError(self, record: logging.LogRecord) -> None: + if logging.raiseExceptions: + # Fail the test if the log message is bad (emit failed). + # The default behavior of logging is to print "Logging error" + # to stderr with the call stack and some extra details. + # pytest wants to make such mistakes visible during testing. + raise + + +@final +class LogCaptureFixture: + """Provides access and control of log capturing.""" + + def __init__(self, item: nodes.Node, *, _ispytest: bool = False) -> None: + check_ispytest(_ispytest) + self._item = item + self._initial_handler_level: Optional[int] = None + # Dict of log name -> log level. + self._initial_logger_levels: Dict[Optional[str], int] = {} + self._initial_disabled_logging_level: Optional[int] = None + + def _finalize(self) -> None: + """Finalize the fixture. + + This restores the log levels and the disabled logging levels changed by :meth:`set_level`. + """ + # Restore log levels. + if self._initial_handler_level is not None: + self.handler.setLevel(self._initial_handler_level) + for logger_name, level in self._initial_logger_levels.items(): + logger = logging.getLogger(logger_name) + logger.setLevel(level) + # Disable logging at the original disabled logging level. + if self._initial_disabled_logging_level is not None: + logging.disable(self._initial_disabled_logging_level) + self._initial_disabled_logging_level = None + + @property + def handler(self) -> LogCaptureHandler: + """Get the logging handler used by the fixture.""" + return self._item.stash[caplog_handler_key] + + def get_records( + self, when: Literal["setup", "call", "teardown"] + ) -> List[logging.LogRecord]: + """Get the logging records for one of the possible test phases. + + :param when: + Which test phase to obtain the records from. + Valid values are: "setup", "call" and "teardown". + + :returns: The list of captured records at the given stage. + + .. versionadded:: 3.4 + """ + return self._item.stash[caplog_records_key].get(when, []) + + @property + def text(self) -> str: + """The formatted log text.""" + return _remove_ansi_escape_sequences(self.handler.stream.getvalue()) + + @property + def records(self) -> List[logging.LogRecord]: + """The list of log records.""" + return self.handler.records + + @property + def record_tuples(self) -> List[Tuple[str, int, str]]: + """A list of a stripped down version of log records intended + for use in assertion comparison. + + The format of the tuple is: + + (logger_name, log_level, message) + """ + return [(r.name, r.levelno, r.getMessage()) for r in self.records] + + @property + def messages(self) -> List[str]: + """A list of format-interpolated log messages. + + Unlike 'records', which contains the format string and parameters for + interpolation, log messages in this list are all interpolated. + + Unlike 'text', which contains the output from the handler, log + messages in this list are unadorned with levels, timestamps, etc, + making exact comparisons more reliable. + + Note that traceback or stack info (from :func:`logging.exception` or + the `exc_info` or `stack_info` arguments to the logging functions) is + not included, as this is added by the formatter in the handler. + + .. versionadded:: 3.7 + """ + return [r.getMessage() for r in self.records] + + def clear(self) -> None: + """Reset the list of log records and the captured log text.""" + self.handler.clear() + + def _force_enable_logging( + self, level: Union[int, str], logger_obj: logging.Logger + ) -> int: + """Enable the desired logging level if the global level was disabled via ``logging.disabled``. + + Only enables logging levels greater than or equal to the requested ``level``. + + Does nothing if the desired ``level`` wasn't disabled. + + :param level: + The logger level caplog should capture. + All logging is enabled if a non-standard logging level string is supplied. + Valid level strings are in :data:`logging._nameToLevel`. + :param logger_obj: The logger object to check. + + :return: The original disabled logging level. + """ + original_disable_level: int = logger_obj.manager.disable + + if isinstance(level, str): + # Try to translate the level string to an int for `logging.disable()` + level = logging.getLevelName(level) + + if not isinstance(level, int): + # The level provided was not valid, so just un-disable all logging. + logging.disable(logging.NOTSET) + elif not logger_obj.isEnabledFor(level): + # Each level is `10` away from other levels. + # https://docs.python.org/3/library/logging.html#logging-levels + disable_level = max(level - 10, logging.NOTSET) + logging.disable(disable_level) + + return original_disable_level + + def set_level(self, level: Union[int, str], logger: Optional[str] = None) -> None: + """Set the threshold level of a logger for the duration of a test. + + Logging messages which are less severe than this level will not be captured. + + .. versionchanged:: 3.4 + The levels of the loggers changed by this function will be + restored to their initial values at the end of the test. + + Will enable the requested logging level if it was disabled via :func:`logging.disable`. + + :param level: The level. + :param logger: The logger to update. If not given, the root logger. + """ + logger_obj = logging.getLogger(logger) + # Save the original log-level to restore it during teardown. + self._initial_logger_levels.setdefault(logger, logger_obj.level) + logger_obj.setLevel(level) + if self._initial_handler_level is None: + self._initial_handler_level = self.handler.level + self.handler.setLevel(level) + initial_disabled_logging_level = self._force_enable_logging(level, logger_obj) + if self._initial_disabled_logging_level is None: + self._initial_disabled_logging_level = initial_disabled_logging_level + + @contextmanager + def at_level( + self, level: Union[int, str], logger: Optional[str] = None + ) -> Generator[None, None, None]: + """Context manager that sets the level for capturing of logs. After + the end of the 'with' statement the level is restored to its original + value. + + Will enable the requested logging level if it was disabled via :func:`logging.disable`. + + :param level: The level. + :param logger: The logger to update. If not given, the root logger. + """ + logger_obj = logging.getLogger(logger) + orig_level = logger_obj.level + logger_obj.setLevel(level) + handler_orig_level = self.handler.level + self.handler.setLevel(level) + original_disable_level = self._force_enable_logging(level, logger_obj) + try: + yield + finally: + logger_obj.setLevel(orig_level) + self.handler.setLevel(handler_orig_level) + logging.disable(original_disable_level) + + @contextmanager + def filtering(self, filter_: logging.Filter) -> Generator[None, None, None]: + """Context manager that temporarily adds the given filter to the caplog's + :meth:`handler` for the 'with' statement block, and removes that filter at the + end of the block. + + :param filter_: A custom :class:`logging.Filter` object. + + .. versionadded:: 7.5 + """ + self.handler.addFilter(filter_) + try: + yield + finally: + self.handler.removeFilter(filter_) + + +@fixture +def caplog(request: FixtureRequest) -> Generator[LogCaptureFixture, None, None]: + """Access and control log capturing. + + Captured logs are available through the following properties/methods:: + + * caplog.messages -> list of format-interpolated log messages + * caplog.text -> string containing formatted log output + * caplog.records -> list of logging.LogRecord instances + * caplog.record_tuples -> list of (logger_name, level, message) tuples + * caplog.clear() -> clear captured records and formatted log output string + """ + result = LogCaptureFixture(request.node, _ispytest=True) + yield result + result._finalize() + + +def get_log_level_for_setting(config: Config, *setting_names: str) -> Optional[int]: + for setting_name in setting_names: + log_level = config.getoption(setting_name) + if log_level is None: + log_level = config.getini(setting_name) + if log_level: + break + else: + return None + + if isinstance(log_level, str): + log_level = log_level.upper() + try: + return int(getattr(logging, log_level, log_level)) + except ValueError as e: + # Python logging does not recognise this as a logging level + raise UsageError( + f"'{log_level}' is not recognized as a logging level name for " + f"'{setting_name}'. Please consider passing the " + "logging level num instead." + ) from e + + +# run after terminalreporter/capturemanager are configured +@hookimpl(trylast=True) +def pytest_configure(config: Config) -> None: + config.pluginmanager.register(LoggingPlugin(config), "logging-plugin") + + +class LoggingPlugin: + """Attaches to the logging module and captures log messages for each test.""" + + def __init__(self, config: Config) -> None: + """Create a new plugin to capture log messages. + + The formatter can be safely shared across all handlers so + create a single one for the entire test session here. + """ + self._config = config + + # Report logging. + self.formatter = self._create_formatter( + get_option_ini(config, "log_format"), + get_option_ini(config, "log_date_format"), + get_option_ini(config, "log_auto_indent"), + ) + self.log_level = get_log_level_for_setting(config, "log_level") + self.caplog_handler = LogCaptureHandler() + self.caplog_handler.setFormatter(self.formatter) + self.report_handler = LogCaptureHandler() + self.report_handler.setFormatter(self.formatter) + + # File logging. + self.log_file_level = get_log_level_for_setting( + config, "log_file_level", "log_level" + ) + log_file = get_option_ini(config, "log_file") or os.devnull + if log_file != os.devnull: + directory = os.path.dirname(os.path.abspath(log_file)) + if not os.path.isdir(directory): + os.makedirs(directory) + + self.log_file_mode = get_option_ini(config, "log_file_mode") or "w" + self.log_file_handler = _FileHandler( + log_file, mode=self.log_file_mode, encoding="UTF-8" + ) + log_file_format = get_option_ini(config, "log_file_format", "log_format") + log_file_date_format = get_option_ini( + config, "log_file_date_format", "log_date_format" + ) + + log_file_formatter = DatetimeFormatter( + log_file_format, datefmt=log_file_date_format + ) + self.log_file_handler.setFormatter(log_file_formatter) + + # CLI/live logging. + self.log_cli_level = get_log_level_for_setting( + config, "log_cli_level", "log_level" + ) + if self._log_cli_enabled(): + terminal_reporter = config.pluginmanager.get_plugin("terminalreporter") + # Guaranteed by `_log_cli_enabled()`. + assert terminal_reporter is not None + capture_manager = config.pluginmanager.get_plugin("capturemanager") + # if capturemanager plugin is disabled, live logging still works. + self.log_cli_handler: Union[ + _LiveLoggingStreamHandler, _LiveLoggingNullHandler + ] = _LiveLoggingStreamHandler(terminal_reporter, capture_manager) + else: + self.log_cli_handler = _LiveLoggingNullHandler() + log_cli_formatter = self._create_formatter( + get_option_ini(config, "log_cli_format", "log_format"), + get_option_ini(config, "log_cli_date_format", "log_date_format"), + get_option_ini(config, "log_auto_indent"), + ) + self.log_cli_handler.setFormatter(log_cli_formatter) + self._disable_loggers(loggers_to_disable=config.option.logger_disable) + + def _disable_loggers(self, loggers_to_disable: List[str]) -> None: + if not loggers_to_disable: + return + + for name in loggers_to_disable: + logger = logging.getLogger(name) + logger.disabled = True + + def _create_formatter(self, log_format, log_date_format, auto_indent): + # Color option doesn't exist if terminal plugin is disabled. + color = getattr(self._config.option, "color", "no") + if color != "no" and ColoredLevelFormatter.LEVELNAME_FMT_REGEX.search( + log_format + ): + formatter: logging.Formatter = ColoredLevelFormatter( + create_terminal_writer(self._config), log_format, log_date_format + ) + else: + formatter = DatetimeFormatter(log_format, log_date_format) + + formatter._style = PercentStyleMultiline( + formatter._style._fmt, auto_indent=auto_indent + ) + + return formatter + + def set_log_path(self, fname: str) -> None: + """Set the filename parameter for Logging.FileHandler(). + + Creates parent directory if it does not exist. + + .. warning:: + This is an experimental API. + """ + fpath = Path(fname) + + if not fpath.is_absolute(): + fpath = self._config.rootpath / fpath + + if not fpath.parent.exists(): + fpath.parent.mkdir(exist_ok=True, parents=True) + + # https://github.com/python/mypy/issues/11193 + stream: io.TextIOWrapper = fpath.open(mode=self.log_file_mode, encoding="UTF-8") # type: ignore[assignment] + old_stream = self.log_file_handler.setStream(stream) + if old_stream: + old_stream.close() + + def _log_cli_enabled(self) -> bool: + """Return whether live logging is enabled.""" + enabled = self._config.getoption( + "--log-cli-level" + ) is not None or self._config.getini("log_cli") + if not enabled: + return False + + terminal_reporter = self._config.pluginmanager.get_plugin("terminalreporter") + if terminal_reporter is None: + # terminal reporter is disabled e.g. by pytest-xdist. + return False + + return True + + @hookimpl(wrapper=True, tryfirst=True) + def pytest_sessionstart(self) -> Generator[None, None, None]: + self.log_cli_handler.set_when("sessionstart") + + with catching_logs(self.log_cli_handler, level=self.log_cli_level): + with catching_logs(self.log_file_handler, level=self.log_file_level): + return (yield) + + @hookimpl(wrapper=True, tryfirst=True) + def pytest_collection(self) -> Generator[None, None, None]: + self.log_cli_handler.set_when("collection") + + with catching_logs(self.log_cli_handler, level=self.log_cli_level): + with catching_logs(self.log_file_handler, level=self.log_file_level): + return (yield) + + @hookimpl(wrapper=True) + def pytest_runtestloop(self, session: Session) -> Generator[None, object, object]: + if session.config.option.collectonly: + return (yield) + + if self._log_cli_enabled() and self._config.getoption("verbose") < 1: + # The verbose flag is needed to avoid messy test progress output. + self._config.option.verbose = 1 + + with catching_logs(self.log_cli_handler, level=self.log_cli_level): + with catching_logs(self.log_file_handler, level=self.log_file_level): + return (yield) # Run all the tests. + + @hookimpl + def pytest_runtest_logstart(self) -> None: + self.log_cli_handler.reset() + self.log_cli_handler.set_when("start") + + @hookimpl + def pytest_runtest_logreport(self) -> None: + self.log_cli_handler.set_when("logreport") + + def _runtest_for(self, item: nodes.Item, when: str) -> Generator[None, None, None]: + """Implement the internals of the pytest_runtest_xxx() hooks.""" + with catching_logs( + self.caplog_handler, + level=self.log_level, + ) as caplog_handler, catching_logs( + self.report_handler, + level=self.log_level, + ) as report_handler: + caplog_handler.reset() + report_handler.reset() + item.stash[caplog_records_key][when] = caplog_handler.records + item.stash[caplog_handler_key] = caplog_handler + + try: + yield + finally: + log = report_handler.stream.getvalue().strip() + item.add_report_section(when, "log", log) + + @hookimpl(wrapper=True) + def pytest_runtest_setup(self, item: nodes.Item) -> Generator[None, None, None]: + self.log_cli_handler.set_when("setup") + + empty: Dict[str, List[logging.LogRecord]] = {} + item.stash[caplog_records_key] = empty + yield from self._runtest_for(item, "setup") + + @hookimpl(wrapper=True) + def pytest_runtest_call(self, item: nodes.Item) -> Generator[None, None, None]: + self.log_cli_handler.set_when("call") + + yield from self._runtest_for(item, "call") + + @hookimpl(wrapper=True) + def pytest_runtest_teardown(self, item: nodes.Item) -> Generator[None, None, None]: + self.log_cli_handler.set_when("teardown") + + try: + yield from self._runtest_for(item, "teardown") + finally: + del item.stash[caplog_records_key] + del item.stash[caplog_handler_key] + + @hookimpl + def pytest_runtest_logfinish(self) -> None: + self.log_cli_handler.set_when("finish") + + @hookimpl(wrapper=True, tryfirst=True) + def pytest_sessionfinish(self) -> Generator[None, None, None]: + self.log_cli_handler.set_when("sessionfinish") + + with catching_logs(self.log_cli_handler, level=self.log_cli_level): + with catching_logs(self.log_file_handler, level=self.log_file_level): + return (yield) + + @hookimpl + def pytest_unconfigure(self) -> None: + # Close the FileHandler explicitly. + # (logging.shutdown might have lost the weakref?!) + self.log_file_handler.close() + + +class _FileHandler(logging.FileHandler): + """A logging FileHandler with pytest tweaks.""" + + def handleError(self, record: logging.LogRecord) -> None: + # Handled by LogCaptureHandler. + pass + + +class _LiveLoggingStreamHandler(logging_StreamHandler): + """A logging StreamHandler used by the live logging feature: it will + write a newline before the first log message in each test. + + During live logging we must also explicitly disable stdout/stderr + capturing otherwise it will get captured and won't appear in the + terminal. + """ + + # Officially stream needs to be a IO[str], but TerminalReporter + # isn't. So force it. + stream: TerminalReporter = None # type: ignore + + def __init__( + self, + terminal_reporter: TerminalReporter, + capture_manager: Optional[CaptureManager], + ) -> None: + super().__init__(stream=terminal_reporter) # type: ignore[arg-type] + self.capture_manager = capture_manager + self.reset() + self.set_when(None) + self._test_outcome_written = False + + def reset(self) -> None: + """Reset the handler; should be called before the start of each test.""" + self._first_record_emitted = False + + def set_when(self, when: Optional[str]) -> None: + """Prepare for the given test phase (setup/call/teardown).""" + self._when = when + self._section_name_shown = False + if when == "start": + self._test_outcome_written = False + + def emit(self, record: logging.LogRecord) -> None: + ctx_manager = ( + self.capture_manager.global_and_fixture_disabled() + if self.capture_manager + else nullcontext() + ) + with ctx_manager: + if not self._first_record_emitted: + self.stream.write("\n") + self._first_record_emitted = True + elif self._when in ("teardown", "finish"): + if not self._test_outcome_written: + self._test_outcome_written = True + self.stream.write("\n") + if not self._section_name_shown and self._when: + self.stream.section("live log " + self._when, sep="-", bold=True) + self._section_name_shown = True + super().emit(record) + + def handleError(self, record: logging.LogRecord) -> None: + # Handled by LogCaptureHandler. + pass + + +class _LiveLoggingNullHandler(logging.NullHandler): + """A logging handler used when live logging is disabled.""" + + def reset(self) -> None: + pass + + def set_when(self, when: str) -> None: + pass + + def handleError(self, record: logging.LogRecord) -> None: + # Handled by LogCaptureHandler. + pass diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/main.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/main.py new file mode 100644 index 000000000..716d5cf78 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/main.py @@ -0,0 +1,1076 @@ +"""Core implementation of the testing process: init, session, runtest loop.""" + +import argparse +import dataclasses +import fnmatch +import functools +import importlib +import importlib.util +import os +from pathlib import Path +import sys +from typing import AbstractSet +from typing import Callable +from typing import Dict +from typing import final +from typing import FrozenSet +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Literal +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union +import warnings + +import pluggy + +from _pytest import nodes +import _pytest._code +from _pytest.config import Config +from _pytest.config import directory_arg +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config import PytestPluginManager +from _pytest.config import UsageError +from _pytest.config.argparsing import Parser +from _pytest.config.compat import PathAwareHookProxy +from _pytest.fixtures import FixtureManager +from _pytest.outcomes import exit +from _pytest.pathlib import absolutepath +from _pytest.pathlib import bestrelpath +from _pytest.pathlib import fnmatch_ex +from _pytest.pathlib import safe_exists +from _pytest.pathlib import scandir +from _pytest.reports import CollectReport +from _pytest.reports import TestReport +from _pytest.runner import collect_one_node +from _pytest.runner import SetupState +from _pytest.warning_types import PytestWarning + + +if TYPE_CHECKING: + from typing import Self + + +def pytest_addoption(parser: Parser) -> None: + parser.addini( + "norecursedirs", + "Directory patterns to avoid for recursion", + type="args", + default=[ + "*.egg", + ".*", + "_darcs", + "build", + "CVS", + "dist", + "node_modules", + "venv", + "{arch}", + ], + ) + parser.addini( + "testpaths", + "Directories to search for tests when no files or directories are given on the " + "command line", + type="args", + default=[], + ) + group = parser.getgroup("general", "Running and selection options") + group._addoption( + "-x", + "--exitfirst", + action="store_const", + dest="maxfail", + const=1, + help="Exit instantly on first error or failed test", + ) + group = parser.getgroup("pytest-warnings") + group.addoption( + "-W", + "--pythonwarnings", + action="append", + help="Set which warnings to report, see -W option of Python itself", + ) + parser.addini( + "filterwarnings", + type="linelist", + help="Each line specifies a pattern for " + "warnings.filterwarnings. " + "Processed after -W/--pythonwarnings.", + ) + group._addoption( + "--maxfail", + metavar="num", + action="store", + type=int, + dest="maxfail", + default=0, + help="Exit after first num failures or errors", + ) + group._addoption( + "--strict-config", + action="store_true", + help="Any warnings encountered while parsing the `pytest` section of the " + "configuration file raise errors", + ) + group._addoption( + "--strict-markers", + action="store_true", + help="Markers not registered in the `markers` section of the configuration " + "file raise errors", + ) + group._addoption( + "--strict", + action="store_true", + help="(Deprecated) alias to --strict-markers", + ) + group._addoption( + "-c", + "--config-file", + metavar="FILE", + type=str, + dest="inifilename", + help="Load configuration from `FILE` instead of trying to locate one of the " + "implicit configuration files.", + ) + group._addoption( + "--continue-on-collection-errors", + action="store_true", + default=False, + dest="continue_on_collection_errors", + help="Force test execution even if collection errors occur", + ) + group._addoption( + "--rootdir", + action="store", + dest="rootdir", + help="Define root directory for tests. Can be relative path: 'root_dir', './root_dir', " + "'root_dir/another_dir/'; absolute path: '/home/user/root_dir'; path with variables: " + "'$HOME/root_dir'.", + ) + + group = parser.getgroup("collect", "collection") + group.addoption( + "--collectonly", + "--collect-only", + "--co", + action="store_true", + help="Only collect tests, don't execute them", + ) + group.addoption( + "--pyargs", + action="store_true", + help="Try to interpret all arguments as Python packages", + ) + group.addoption( + "--ignore", + action="append", + metavar="path", + help="Ignore path during collection (multi-allowed)", + ) + group.addoption( + "--ignore-glob", + action="append", + metavar="path", + help="Ignore path pattern during collection (multi-allowed)", + ) + group.addoption( + "--deselect", + action="append", + metavar="nodeid_prefix", + help="Deselect item (via node id prefix) during collection (multi-allowed)", + ) + group.addoption( + "--confcutdir", + dest="confcutdir", + default=None, + metavar="dir", + type=functools.partial(directory_arg, optname="--confcutdir"), + help="Only load conftest.py's relative to specified dir", + ) + group.addoption( + "--noconftest", + action="store_true", + dest="noconftest", + default=False, + help="Don't load any conftest.py files", + ) + group.addoption( + "--keepduplicates", + "--keep-duplicates", + action="store_true", + dest="keepduplicates", + default=False, + help="Keep duplicate tests", + ) + group.addoption( + "--collect-in-virtualenv", + action="store_true", + dest="collect_in_virtualenv", + default=False, + help="Don't ignore tests in a local virtualenv directory", + ) + group.addoption( + "--import-mode", + default="prepend", + choices=["prepend", "append", "importlib"], + dest="importmode", + help="Prepend/append to sys.path when importing test modules and conftest " + "files. Default: prepend.", + ) + parser.addini( + "consider_namespace_packages", + type="bool", + default=False, + help="Consider namespace packages when resolving module names during import", + ) + + group = parser.getgroup("debugconfig", "test session debugging and configuration") + group.addoption( + "--basetemp", + dest="basetemp", + default=None, + type=validate_basetemp, + metavar="dir", + help=( + "Base temporary directory for this test run. " + "(Warning: this directory is removed if it exists.)" + ), + ) + + +def validate_basetemp(path: str) -> str: + # GH 7119 + msg = "basetemp must not be empty, the current working directory or any parent directory of it" + + # empty path + if not path: + raise argparse.ArgumentTypeError(msg) + + def is_ancestor(base: Path, query: Path) -> bool: + """Return whether query is an ancestor of base.""" + if base == query: + return True + return query in base.parents + + # check if path is an ancestor of cwd + if is_ancestor(Path.cwd(), Path(path).absolute()): + raise argparse.ArgumentTypeError(msg) + + # check symlinks for ancestors + if is_ancestor(Path.cwd().resolve(), Path(path).resolve()): + raise argparse.ArgumentTypeError(msg) + + return path + + +def wrap_session( + config: Config, doit: Callable[[Config, "Session"], Optional[Union[int, ExitCode]]] +) -> Union[int, ExitCode]: + """Skeleton command line program.""" + session = Session.from_config(config) + session.exitstatus = ExitCode.OK + initstate = 0 + try: + try: + config._do_configure() + initstate = 1 + config.hook.pytest_sessionstart(session=session) + initstate = 2 + session.exitstatus = doit(config, session) or 0 + except UsageError: + session.exitstatus = ExitCode.USAGE_ERROR + raise + except Failed: + session.exitstatus = ExitCode.TESTS_FAILED + except (KeyboardInterrupt, exit.Exception): + excinfo = _pytest._code.ExceptionInfo.from_current() + exitstatus: Union[int, ExitCode] = ExitCode.INTERRUPTED + if isinstance(excinfo.value, exit.Exception): + if excinfo.value.returncode is not None: + exitstatus = excinfo.value.returncode + if initstate < 2: + sys.stderr.write(f"{excinfo.typename}: {excinfo.value.msg}\n") + config.hook.pytest_keyboard_interrupt(excinfo=excinfo) + session.exitstatus = exitstatus + except BaseException: + session.exitstatus = ExitCode.INTERNAL_ERROR + excinfo = _pytest._code.ExceptionInfo.from_current() + try: + config.notify_exception(excinfo, config.option) + except exit.Exception as exc: + if exc.returncode is not None: + session.exitstatus = exc.returncode + sys.stderr.write(f"{type(exc).__name__}: {exc}\n") + else: + if isinstance(excinfo.value, SystemExit): + sys.stderr.write("mainloop: caught unexpected SystemExit!\n") + + finally: + # Explicitly break reference cycle. + excinfo = None # type: ignore + os.chdir(session.startpath) + if initstate >= 2: + try: + config.hook.pytest_sessionfinish( + session=session, exitstatus=session.exitstatus + ) + except exit.Exception as exc: + if exc.returncode is not None: + session.exitstatus = exc.returncode + sys.stderr.write(f"{type(exc).__name__}: {exc}\n") + config._ensure_unconfigure() + return session.exitstatus + + +def pytest_cmdline_main(config: Config) -> Union[int, ExitCode]: + return wrap_session(config, _main) + + +def _main(config: Config, session: "Session") -> Optional[Union[int, ExitCode]]: + """Default command line protocol for initialization, session, + running tests and reporting.""" + config.hook.pytest_collection(session=session) + config.hook.pytest_runtestloop(session=session) + + if session.testsfailed: + return ExitCode.TESTS_FAILED + elif session.testscollected == 0: + return ExitCode.NO_TESTS_COLLECTED + return None + + +def pytest_collection(session: "Session") -> None: + session.perform_collect() + + +def pytest_runtestloop(session: "Session") -> bool: + if session.testsfailed and not session.config.option.continue_on_collection_errors: + raise session.Interrupted( + "%d error%s during collection" + % (session.testsfailed, "s" if session.testsfailed != 1 else "") + ) + + if session.config.option.collectonly: + return True + + for i, item in enumerate(session.items): + nextitem = session.items[i + 1] if i + 1 < len(session.items) else None + item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) + if session.shouldfail: + raise session.Failed(session.shouldfail) + if session.shouldstop: + raise session.Interrupted(session.shouldstop) + return True + + +def _in_venv(path: Path) -> bool: + """Attempt to detect if ``path`` is the root of a Virtual Environment by + checking for the existence of the appropriate activate script.""" + bindir = path.joinpath("Scripts" if sys.platform.startswith("win") else "bin") + try: + if not bindir.is_dir(): + return False + except OSError: + return False + activates = ( + "activate", + "activate.csh", + "activate.fish", + "Activate", + "Activate.bat", + "Activate.ps1", + ) + return any(fname.name in activates for fname in bindir.iterdir()) + + +def pytest_ignore_collect(collection_path: Path, config: Config) -> Optional[bool]: + if collection_path.name == "__pycache__": + return True + + ignore_paths = config._getconftest_pathlist( + "collect_ignore", path=collection_path.parent + ) + ignore_paths = ignore_paths or [] + excludeopt = config.getoption("ignore") + if excludeopt: + ignore_paths.extend(absolutepath(x) for x in excludeopt) + + if collection_path in ignore_paths: + return True + + ignore_globs = config._getconftest_pathlist( + "collect_ignore_glob", path=collection_path.parent + ) + ignore_globs = ignore_globs or [] + excludeglobopt = config.getoption("ignore_glob") + if excludeglobopt: + ignore_globs.extend(absolutepath(x) for x in excludeglobopt) + + if any(fnmatch.fnmatch(str(collection_path), str(glob)) for glob in ignore_globs): + return True + + allow_in_venv = config.getoption("collect_in_virtualenv") + if not allow_in_venv and _in_venv(collection_path): + return True + + if collection_path.is_dir(): + norecursepatterns = config.getini("norecursedirs") + if any(fnmatch_ex(pat, collection_path) for pat in norecursepatterns): + return True + + return None + + +def pytest_collect_directory( + path: Path, parent: nodes.Collector +) -> Optional[nodes.Collector]: + return Dir.from_parent(parent, path=path) + + +def pytest_collection_modifyitems(items: List[nodes.Item], config: Config) -> None: + deselect_prefixes = tuple(config.getoption("deselect") or []) + if not deselect_prefixes: + return + + remaining = [] + deselected = [] + for colitem in items: + if colitem.nodeid.startswith(deselect_prefixes): + deselected.append(colitem) + else: + remaining.append(colitem) + + if deselected: + config.hook.pytest_deselected(items=deselected) + items[:] = remaining + + +class FSHookProxy: + def __init__( + self, + pm: PytestPluginManager, + remove_mods: AbstractSet[object], + ) -> None: + self.pm = pm + self.remove_mods = remove_mods + + def __getattr__(self, name: str) -> pluggy.HookCaller: + x = self.pm.subset_hook_caller(name, remove_plugins=self.remove_mods) + self.__dict__[name] = x + return x + + +class Interrupted(KeyboardInterrupt): + """Signals that the test run was interrupted.""" + + __module__ = "builtins" # For py3. + + +class Failed(Exception): + """Signals a stop as failed test run.""" + + +@dataclasses.dataclass +class _bestrelpath_cache(Dict[Path, str]): + __slots__ = ("path",) + + path: Path + + def __missing__(self, path: Path) -> str: + r = bestrelpath(self.path, path) + self[path] = r + return r + + +@final +class Dir(nodes.Directory): + """Collector of files in a file system directory. + + .. versionadded:: 8.0 + + .. note:: + + Python directories with an `__init__.py` file are instead collected by + :class:`~pytest.Package` by default. Both are :class:`~pytest.Directory` + collectors. + """ + + @classmethod + def from_parent( # type: ignore[override] + cls, + parent: nodes.Collector, + *, + path: Path, + ) -> "Self": + """The public constructor. + + :param parent: The parent collector of this Dir. + :param path: The directory's path. + """ + return super().from_parent(parent=parent, path=path) + + def collect(self) -> Iterable[Union[nodes.Item, nodes.Collector]]: + config = self.config + col: Optional[nodes.Collector] + cols: Sequence[nodes.Collector] + ihook = self.ihook + for direntry in scandir(self.path): + if direntry.is_dir(): + path = Path(direntry.path) + if not self.session.isinitpath(path, with_parents=True): + if ihook.pytest_ignore_collect(collection_path=path, config=config): + continue + col = ihook.pytest_collect_directory(path=path, parent=self) + if col is not None: + yield col + + elif direntry.is_file(): + path = Path(direntry.path) + if not self.session.isinitpath(path): + if ihook.pytest_ignore_collect(collection_path=path, config=config): + continue + cols = ihook.pytest_collect_file(file_path=path, parent=self) + yield from cols + + +@final +class Session(nodes.Collector): + """The root of the collection tree. + + ``Session`` collects the initial paths given as arguments to pytest. + """ + + Interrupted = Interrupted + Failed = Failed + # Set on the session by runner.pytest_sessionstart. + _setupstate: SetupState + # Set on the session by fixtures.pytest_sessionstart. + _fixturemanager: FixtureManager + exitstatus: Union[int, ExitCode] + + def __init__(self, config: Config) -> None: + super().__init__( + name="", + path=config.rootpath, + fspath=None, + parent=None, + config=config, + session=self, + nodeid="", + ) + self.testsfailed = 0 + self.testscollected = 0 + self._shouldstop: Union[bool, str] = False + self._shouldfail: Union[bool, str] = False + self.trace = config.trace.root.get("collection") + self._initialpaths: FrozenSet[Path] = frozenset() + self._initialpaths_with_parents: FrozenSet[Path] = frozenset() + self._notfound: List[Tuple[str, Sequence[nodes.Collector]]] = [] + self._initial_parts: List[CollectionArgument] = [] + self._collection_cache: Dict[nodes.Collector, CollectReport] = {} + self.items: List[nodes.Item] = [] + + self._bestrelpathcache: Dict[Path, str] = _bestrelpath_cache(config.rootpath) + + self.config.pluginmanager.register(self, name="session") + + @classmethod + def from_config(cls, config: Config) -> "Session": + session: Session = cls._create(config=config) + return session + + def __repr__(self) -> str: + return "<%s %s exitstatus=%r testsfailed=%d testscollected=%d>" % ( + self.__class__.__name__, + self.name, + getattr(self, "exitstatus", ""), + self.testsfailed, + self.testscollected, + ) + + @property + def shouldstop(self) -> Union[bool, str]: + return self._shouldstop + + @shouldstop.setter + def shouldstop(self, value: Union[bool, str]) -> None: + # The runner checks shouldfail and assumes that if it is set we are + # definitely stopping, so prevent unsetting it. + if value is False and self._shouldstop: + warnings.warn( + PytestWarning( + "session.shouldstop cannot be unset after it has been set; ignoring." + ), + stacklevel=2, + ) + return + self._shouldstop = value + + @property + def shouldfail(self) -> Union[bool, str]: + return self._shouldfail + + @shouldfail.setter + def shouldfail(self, value: Union[bool, str]) -> None: + # The runner checks shouldfail and assumes that if it is set we are + # definitely stopping, so prevent unsetting it. + if value is False and self._shouldfail: + warnings.warn( + PytestWarning( + "session.shouldfail cannot be unset after it has been set; ignoring." + ), + stacklevel=2, + ) + return + self._shouldfail = value + + @property + def startpath(self) -> Path: + """The path from which pytest was invoked. + + .. versionadded:: 7.0.0 + """ + return self.config.invocation_params.dir + + def _node_location_to_relpath(self, node_path: Path) -> str: + # bestrelpath is a quite slow function. + return self._bestrelpathcache[node_path] + + @hookimpl(tryfirst=True) + def pytest_collectstart(self) -> None: + if self.shouldfail: + raise self.Failed(self.shouldfail) + if self.shouldstop: + raise self.Interrupted(self.shouldstop) + + @hookimpl(tryfirst=True) + def pytest_runtest_logreport( + self, report: Union[TestReport, CollectReport] + ) -> None: + if report.failed and not hasattr(report, "wasxfail"): + self.testsfailed += 1 + maxfail = self.config.getvalue("maxfail") + if maxfail and self.testsfailed >= maxfail: + self.shouldfail = "stopping after %d failures" % (self.testsfailed) + + pytest_collectreport = pytest_runtest_logreport + + def isinitpath( + self, + path: Union[str, "os.PathLike[str]"], + *, + with_parents: bool = False, + ) -> bool: + """Is path an initial path? + + An initial path is a path explicitly given to pytest on the command + line. + + :param with_parents: + If set, also return True if the path is a parent of an initial path. + + .. versionchanged:: 8.0 + Added the ``with_parents`` parameter. + """ + # Optimization: Path(Path(...)) is much slower than isinstance. + path_ = path if isinstance(path, Path) else Path(path) + if with_parents: + return path_ in self._initialpaths_with_parents + else: + return path_ in self._initialpaths + + def gethookproxy(self, fspath: "os.PathLike[str]") -> pluggy.HookRelay: + # Optimization: Path(Path(...)) is much slower than isinstance. + path = fspath if isinstance(fspath, Path) else Path(fspath) + pm = self.config.pluginmanager + # Check if we have the common case of running + # hooks with all conftest.py files. + my_conftestmodules = pm._getconftestmodules(path) + remove_mods = pm._conftest_plugins.difference(my_conftestmodules) + proxy: pluggy.HookRelay + if remove_mods: + # One or more conftests are not in use at this path. + proxy = PathAwareHookProxy(FSHookProxy(pm, remove_mods)) # type: ignore[arg-type,assignment] + else: + # All plugins are active for this fspath. + proxy = self.config.hook + return proxy + + def _collect_path( + self, + path: Path, + path_cache: Dict[Path, Sequence[nodes.Collector]], + ) -> Sequence[nodes.Collector]: + """Create a Collector for the given path. + + `path_cache` makes it so the same Collectors are returned for the same + path. + """ + if path in path_cache: + return path_cache[path] + + if path.is_dir(): + ihook = self.gethookproxy(path.parent) + col: Optional[nodes.Collector] = ihook.pytest_collect_directory( + path=path, parent=self + ) + cols: Sequence[nodes.Collector] = (col,) if col is not None else () + + elif path.is_file(): + ihook = self.gethookproxy(path) + cols = ihook.pytest_collect_file(file_path=path, parent=self) + + else: + # Broken symlink or invalid/missing file. + cols = () + + path_cache[path] = cols + return cols + + @overload + def perform_collect( + self, args: Optional[Sequence[str]] = ..., genitems: "Literal[True]" = ... + ) -> Sequence[nodes.Item]: ... + + @overload + def perform_collect( + self, args: Optional[Sequence[str]] = ..., genitems: bool = ... + ) -> Sequence[Union[nodes.Item, nodes.Collector]]: ... + + def perform_collect( + self, args: Optional[Sequence[str]] = None, genitems: bool = True + ) -> Sequence[Union[nodes.Item, nodes.Collector]]: + """Perform the collection phase for this session. + + This is called by the default :hook:`pytest_collection` hook + implementation; see the documentation of this hook for more details. + For testing purposes, it may also be called directly on a fresh + ``Session``. + + This function normally recursively expands any collectors collected + from the session to their items, and only items are returned. For + testing purposes, this may be suppressed by passing ``genitems=False``, + in which case the return value contains these collectors unexpanded, + and ``session.items`` is empty. + """ + if args is None: + args = self.config.args + + self.trace("perform_collect", self, args) + self.trace.root.indent += 1 + + hook = self.config.hook + + self._notfound = [] + self._initial_parts = [] + self._collection_cache = {} + self.items = [] + items: Sequence[Union[nodes.Item, nodes.Collector]] = self.items + try: + initialpaths: List[Path] = [] + initialpaths_with_parents: List[Path] = [] + for arg in args: + collection_argument = resolve_collection_argument( + self.config.invocation_params.dir, + arg, + as_pypath=self.config.option.pyargs, + ) + self._initial_parts.append(collection_argument) + initialpaths.append(collection_argument.path) + initialpaths_with_parents.append(collection_argument.path) + initialpaths_with_parents.extend(collection_argument.path.parents) + self._initialpaths = frozenset(initialpaths) + self._initialpaths_with_parents = frozenset(initialpaths_with_parents) + + rep = collect_one_node(self) + self.ihook.pytest_collectreport(report=rep) + self.trace.root.indent -= 1 + if self._notfound: + errors = [] + for arg, collectors in self._notfound: + if collectors: + errors.append( + f"not found: {arg}\n(no match in any of {collectors!r})" + ) + else: + errors.append(f"found no collectors for {arg}") + + raise UsageError(*errors) + + if not genitems: + items = rep.result + else: + if rep.passed: + for node in rep.result: + self.items.extend(self.genitems(node)) + + self.config.pluginmanager.check_pending() + hook.pytest_collection_modifyitems( + session=self, config=self.config, items=items + ) + finally: + self._notfound = [] + self._initial_parts = [] + self._collection_cache = {} + hook.pytest_collection_finish(session=self) + + if genitems: + self.testscollected = len(items) + + return items + + def _collect_one_node( + self, + node: nodes.Collector, + handle_dupes: bool = True, + ) -> Tuple[CollectReport, bool]: + if node in self._collection_cache and handle_dupes: + rep = self._collection_cache[node] + return rep, True + else: + rep = collect_one_node(node) + self._collection_cache[node] = rep + return rep, False + + def collect(self) -> Iterator[Union[nodes.Item, nodes.Collector]]: + # This is a cache for the root directories of the initial paths. + # We can't use collection_cache for Session because of its special + # role as the bootstrapping collector. + path_cache: Dict[Path, Sequence[nodes.Collector]] = {} + + pm = self.config.pluginmanager + + for collection_argument in self._initial_parts: + self.trace("processing argument", collection_argument) + self.trace.root.indent += 1 + + argpath = collection_argument.path + names = collection_argument.parts + module_name = collection_argument.module_name + + # resolve_collection_argument() ensures this. + if argpath.is_dir(): + assert not names, f"invalid arg {(argpath, names)!r}" + + paths = [argpath] + # Add relevant parents of the path, from the root, e.g. + # /a/b/c.py -> [/, /a, /a/b, /a/b/c.py] + if module_name is None: + # Paths outside of the confcutdir should not be considered. + for path in argpath.parents: + if not pm._is_in_confcutdir(path): + break + paths.insert(0, path) + else: + # For --pyargs arguments, only consider paths matching the module + # name. Paths beyond the package hierarchy are not included. + module_name_parts = module_name.split(".") + for i, path in enumerate(argpath.parents, 2): + if i > len(module_name_parts) or path.stem != module_name_parts[-i]: + break + paths.insert(0, path) + + # Start going over the parts from the root, collecting each level + # and discarding all nodes which don't match the level's part. + any_matched_in_initial_part = False + notfound_collectors = [] + work: List[ + Tuple[Union[nodes.Collector, nodes.Item], List[Union[Path, str]]] + ] = [(self, [*paths, *names])] + while work: + matchnode, matchparts = work.pop() + + # Pop'd all of the parts, this is a match. + if not matchparts: + yield matchnode + any_matched_in_initial_part = True + continue + + # Should have been matched by now, discard. + if not isinstance(matchnode, nodes.Collector): + continue + + # Collect this level of matching. + # Collecting Session (self) is done directly to avoid endless + # recursion to this function. + subnodes: Sequence[Union[nodes.Collector, nodes.Item]] + if isinstance(matchnode, Session): + assert isinstance(matchparts[0], Path) + subnodes = matchnode._collect_path(matchparts[0], path_cache) + else: + # For backward compat, files given directly multiple + # times on the command line should not be deduplicated. + handle_dupes = not ( + len(matchparts) == 1 + and isinstance(matchparts[0], Path) + and matchparts[0].is_file() + ) + rep, duplicate = self._collect_one_node(matchnode, handle_dupes) + if not duplicate and not rep.passed: + # Report collection failures here to avoid failing to + # run some test specified in the command line because + # the module could not be imported (#134). + matchnode.ihook.pytest_collectreport(report=rep) + if not rep.passed: + continue + subnodes = rep.result + + # Prune this level. + any_matched_in_collector = False + for node in reversed(subnodes): + # Path part e.g. `/a/b/` in `/a/b/test_file.py::TestIt::test_it`. + if isinstance(matchparts[0], Path): + is_match = node.path == matchparts[0] + if sys.platform == "win32" and not is_match: + # In case the file paths do not match, fallback to samefile() to + # account for short-paths on Windows (#11895). + same_file = os.path.samefile(node.path, matchparts[0]) + # We don't want to match links to the current node, + # otherwise we would match the same file more than once (#12039). + is_match = same_file and ( + os.path.islink(node.path) + == os.path.islink(matchparts[0]) + ) + + # Name part e.g. `TestIt` in `/a/b/test_file.py::TestIt::test_it`. + else: + # TODO: Remove parametrized workaround once collection structure contains + # parametrization. + is_match = ( + node.name == matchparts[0] + or node.name.split("[")[0] == matchparts[0] + ) + if is_match: + work.append((node, matchparts[1:])) + any_matched_in_collector = True + + if not any_matched_in_collector: + notfound_collectors.append(matchnode) + + if not any_matched_in_initial_part: + report_arg = "::".join((str(argpath), *names)) + self._notfound.append((report_arg, notfound_collectors)) + + self.trace.root.indent -= 1 + + def genitems( + self, node: Union[nodes.Item, nodes.Collector] + ) -> Iterator[nodes.Item]: + self.trace("genitems", node) + if isinstance(node, nodes.Item): + node.ihook.pytest_itemcollected(item=node) + yield node + else: + assert isinstance(node, nodes.Collector) + keepduplicates = self.config.getoption("keepduplicates") + # For backward compat, dedup only applies to files. + handle_dupes = not (keepduplicates and isinstance(node, nodes.File)) + rep, duplicate = self._collect_one_node(node, handle_dupes) + if duplicate and not keepduplicates: + return + if rep.passed: + for subnode in rep.result: + yield from self.genitems(subnode) + if not duplicate: + node.ihook.pytest_collectreport(report=rep) + + +def search_pypath(module_name: str) -> Optional[str]: + """Search sys.path for the given a dotted module name, and return its file + system path if found.""" + try: + spec = importlib.util.find_spec(module_name) + # AttributeError: looks like package module, but actually filename + # ImportError: module does not exist + # ValueError: not a module name + except (AttributeError, ImportError, ValueError): + return None + if spec is None or spec.origin is None or spec.origin == "namespace": + return None + elif spec.submodule_search_locations: + return os.path.dirname(spec.origin) + else: + return spec.origin + + +@dataclasses.dataclass(frozen=True) +class CollectionArgument: + """A resolved collection argument.""" + + path: Path + parts: Sequence[str] + module_name: Optional[str] + + +def resolve_collection_argument( + invocation_path: Path, arg: str, *, as_pypath: bool = False +) -> CollectionArgument: + """Parse path arguments optionally containing selection parts and return (fspath, names). + + Command-line arguments can point to files and/or directories, and optionally contain + parts for specific tests selection, for example: + + "pkg/tests/test_foo.py::TestClass::test_foo" + + This function ensures the path exists, and returns a resolved `CollectionArgument`: + + CollectionArgument( + path=Path("/full/path/to/pkg/tests/test_foo.py"), + parts=["TestClass", "test_foo"], + module_name=None, + ) + + When as_pypath is True, expects that the command-line argument actually contains + module paths instead of file-system paths: + + "pkg.tests.test_foo::TestClass::test_foo" + + In which case we search sys.path for a matching module, and then return the *path* to the + found module, which may look like this: + + CollectionArgument( + path=Path("/home/u/myvenv/lib/site-packages/pkg/tests/test_foo.py"), + parts=["TestClass", "test_foo"], + module_name="pkg.tests.test_foo", + ) + + If the path doesn't exist, raise UsageError. + If the path is a directory and selection parts are present, raise UsageError. + """ + base, squacket, rest = str(arg).partition("[") + strpath, *parts = base.split("::") + if parts: + parts[-1] = f"{parts[-1]}{squacket}{rest}" + module_name = None + if as_pypath: + pyarg_strpath = search_pypath(strpath) + if pyarg_strpath is not None: + module_name = strpath + strpath = pyarg_strpath + fspath = invocation_path / strpath + fspath = absolutepath(fspath) + if not safe_exists(fspath): + msg = ( + "module or package not found: {arg} (missing __init__.py?)" + if as_pypath + else "file or directory not found: {arg}" + ) + raise UsageError(msg.format(arg=arg)) + if parts and fspath.is_dir(): + msg = ( + "package argument cannot contain :: selection parts: {arg}" + if as_pypath + else "directory argument cannot contain :: selection parts: {arg}" + ) + raise UsageError(msg.format(arg=arg)) + return CollectionArgument( + path=fspath, + parts=parts, + module_name=module_name, + ) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/mark/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/mark/__init__.py new file mode 100644 index 000000000..77dabd95d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/mark/__init__.py @@ -0,0 +1,278 @@ +"""Generic mechanism for marking and selecting python functions.""" + +import dataclasses +from typing import AbstractSet +from typing import Collection +from typing import List +from typing import Optional +from typing import TYPE_CHECKING +from typing import Union + +from .expression import Expression +from .expression import ParseError +from .structures import EMPTY_PARAMETERSET_OPTION +from .structures import get_empty_parameterset_mark +from .structures import Mark +from .structures import MARK_GEN +from .structures import MarkDecorator +from .structures import MarkGenerator +from .structures import ParameterSet +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config import UsageError +from _pytest.config.argparsing import Parser +from _pytest.stash import StashKey + + +if TYPE_CHECKING: + from _pytest.nodes import Item + + +__all__ = [ + "MARK_GEN", + "Mark", + "MarkDecorator", + "MarkGenerator", + "ParameterSet", + "get_empty_parameterset_mark", +] + + +old_mark_config_key = StashKey[Optional[Config]]() + + +def param( + *values: object, + marks: Union[MarkDecorator, Collection[Union[MarkDecorator, Mark]]] = (), + id: Optional[str] = None, +) -> ParameterSet: + """Specify a parameter in `pytest.mark.parametrize`_ calls or + :ref:`parametrized fixtures `. + + .. code-block:: python + + @pytest.mark.parametrize( + "test_input,expected", + [ + ("3+5", 8), + pytest.param("6*9", 42, marks=pytest.mark.xfail), + ], + ) + def test_eval(test_input, expected): + assert eval(test_input) == expected + + :param values: Variable args of the values of the parameter set, in order. + :param marks: A single mark or a list of marks to be applied to this parameter set. + :param id: The id to attribute to this parameter set. + """ + return ParameterSet.param(*values, marks=marks, id=id) + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group._addoption( + "-k", + action="store", + dest="keyword", + default="", + metavar="EXPRESSION", + help="Only run tests which match the given substring expression. " + "An expression is a Python evaluatable expression " + "where all names are substring-matched against test names " + "and their parent classes. Example: -k 'test_method or test_" + "other' matches all test functions and classes whose name " + "contains 'test_method' or 'test_other', while -k 'not test_method' " + "matches those that don't contain 'test_method' in their names. " + "-k 'not test_method and not test_other' will eliminate the matches. " + "Additionally keywords are matched to classes and functions " + "containing extra names in their 'extra_keyword_matches' set, " + "as well as functions which have names assigned directly to them. " + "The matching is case-insensitive.", + ) + + group._addoption( + "-m", + action="store", + dest="markexpr", + default="", + metavar="MARKEXPR", + help="Only run tests matching given mark expression. " + "For example: -m 'mark1 and not mark2'.", + ) + + group.addoption( + "--markers", + action="store_true", + help="show markers (builtin, plugin and per-project ones).", + ) + + parser.addini("markers", "Register new markers for test functions", "linelist") + parser.addini(EMPTY_PARAMETERSET_OPTION, "Default marker for empty parametersets") + + +@hookimpl(tryfirst=True) +def pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]: + import _pytest.config + + if config.option.markers: + config._do_configure() + tw = _pytest.config.create_terminal_writer(config) + for line in config.getini("markers"): + parts = line.split(":", 1) + name = parts[0] + rest = parts[1] if len(parts) == 2 else "" + tw.write("@pytest.mark.%s:" % name, bold=True) + tw.line(rest) + tw.line() + config._ensure_unconfigure() + return 0 + + return None + + +@dataclasses.dataclass +class KeywordMatcher: + """A matcher for keywords. + + Given a list of names, matches any substring of one of these names. The + string inclusion check is case-insensitive. + + Will match on the name of colitem, including the names of its parents. + Only matches names of items which are either a :class:`Class` or a + :class:`Function`. + + Additionally, matches on names in the 'extra_keyword_matches' set of + any item, as well as names directly assigned to test functions. + """ + + __slots__ = ("_names",) + + _names: AbstractSet[str] + + @classmethod + def from_item(cls, item: "Item") -> "KeywordMatcher": + mapped_names = set() + + # Add the names of the current item and any parent items, + # except the Session and root Directory's which are not + # interesting for matching. + import pytest + + for node in item.listchain(): + if isinstance(node, pytest.Session): + continue + if isinstance(node, pytest.Directory) and isinstance( + node.parent, pytest.Session + ): + continue + mapped_names.add(node.name) + + # Add the names added as extra keywords to current or parent items. + mapped_names.update(item.listextrakeywords()) + + # Add the names attached to the current function through direct assignment. + function_obj = getattr(item, "function", None) + if function_obj: + mapped_names.update(function_obj.__dict__) + + # Add the markers to the keywords as we no longer handle them correctly. + mapped_names.update(mark.name for mark in item.iter_markers()) + + return cls(mapped_names) + + def __call__(self, subname: str) -> bool: + subname = subname.lower() + names = (name.lower() for name in self._names) + + for name in names: + if subname in name: + return True + return False + + +def deselect_by_keyword(items: "List[Item]", config: Config) -> None: + keywordexpr = config.option.keyword.lstrip() + if not keywordexpr: + return + + expr = _parse_expression(keywordexpr, "Wrong expression passed to '-k'") + + remaining = [] + deselected = [] + for colitem in items: + if not expr.evaluate(KeywordMatcher.from_item(colitem)): + deselected.append(colitem) + else: + remaining.append(colitem) + + if deselected: + config.hook.pytest_deselected(items=deselected) + items[:] = remaining + + +@dataclasses.dataclass +class MarkMatcher: + """A matcher for markers which are present. + + Tries to match on any marker names, attached to the given colitem. + """ + + __slots__ = ("own_mark_names",) + + own_mark_names: AbstractSet[str] + + @classmethod + def from_item(cls, item: "Item") -> "MarkMatcher": + mark_names = {mark.name for mark in item.iter_markers()} + return cls(mark_names) + + def __call__(self, name: str) -> bool: + return name in self.own_mark_names + + +def deselect_by_mark(items: "List[Item]", config: Config) -> None: + matchexpr = config.option.markexpr + if not matchexpr: + return + + expr = _parse_expression(matchexpr, "Wrong expression passed to '-m'") + remaining: List[Item] = [] + deselected: List[Item] = [] + for item in items: + if expr.evaluate(MarkMatcher.from_item(item)): + remaining.append(item) + else: + deselected.append(item) + if deselected: + config.hook.pytest_deselected(items=deselected) + items[:] = remaining + + +def _parse_expression(expr: str, exc_message: str) -> Expression: + try: + return Expression.compile(expr) + except ParseError as e: + raise UsageError(f"{exc_message}: {expr}: {e}") from None + + +def pytest_collection_modifyitems(items: "List[Item]", config: Config) -> None: + deselect_by_keyword(items, config) + deselect_by_mark(items, config) + + +def pytest_configure(config: Config) -> None: + config.stash[old_mark_config_key] = MARK_GEN._config + MARK_GEN._config = config + + empty_parameterset = config.getini(EMPTY_PARAMETERSET_OPTION) + + if empty_parameterset not in ("skip", "xfail", "fail_at_collect", None, ""): + raise UsageError( + f"{EMPTY_PARAMETERSET_OPTION!s} must be one of skip, xfail or fail_at_collect" + f" but it is {empty_parameterset!r}" + ) + + +def pytest_unconfigure(config: Config) -> None: + MARK_GEN._config = config.stash.get(old_mark_config_key, None) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/mark/expression.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/mark/expression.py new file mode 100644 index 000000000..78b7fda69 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/mark/expression.py @@ -0,0 +1,223 @@ +r"""Evaluate match expressions, as used by `-k` and `-m`. + +The grammar is: + +expression: expr? EOF +expr: and_expr ('or' and_expr)* +and_expr: not_expr ('and' not_expr)* +not_expr: 'not' not_expr | '(' expr ')' | ident +ident: (\w|:|\+|-|\.|\[|\]|\\|/)+ + +The semantics are: + +- Empty expression evaluates to False. +- ident evaluates to True of False according to a provided matcher function. +- or/and/not evaluate according to the usual boolean semantics. +""" + +import ast +import dataclasses +import enum +import re +import types +from typing import Callable +from typing import Iterator +from typing import Mapping +from typing import NoReturn +from typing import Optional +from typing import Sequence + + +__all__ = [ + "Expression", + "ParseError", +] + + +class TokenType(enum.Enum): + LPAREN = "left parenthesis" + RPAREN = "right parenthesis" + OR = "or" + AND = "and" + NOT = "not" + IDENT = "identifier" + EOF = "end of input" + + +@dataclasses.dataclass(frozen=True) +class Token: + __slots__ = ("type", "value", "pos") + type: TokenType + value: str + pos: int + + +class ParseError(Exception): + """The expression contains invalid syntax. + + :param column: The column in the line where the error occurred (1-based). + :param message: A description of the error. + """ + + def __init__(self, column: int, message: str) -> None: + self.column = column + self.message = message + + def __str__(self) -> str: + return f"at column {self.column}: {self.message}" + + +class Scanner: + __slots__ = ("tokens", "current") + + def __init__(self, input: str) -> None: + self.tokens = self.lex(input) + self.current = next(self.tokens) + + def lex(self, input: str) -> Iterator[Token]: + pos = 0 + while pos < len(input): + if input[pos] in (" ", "\t"): + pos += 1 + elif input[pos] == "(": + yield Token(TokenType.LPAREN, "(", pos) + pos += 1 + elif input[pos] == ")": + yield Token(TokenType.RPAREN, ")", pos) + pos += 1 + else: + match = re.match(r"(:?\w|:|\+|-|\.|\[|\]|\\|/)+", input[pos:]) + if match: + value = match.group(0) + if value == "or": + yield Token(TokenType.OR, value, pos) + elif value == "and": + yield Token(TokenType.AND, value, pos) + elif value == "not": + yield Token(TokenType.NOT, value, pos) + else: + yield Token(TokenType.IDENT, value, pos) + pos += len(value) + else: + raise ParseError( + pos + 1, + f'unexpected character "{input[pos]}"', + ) + yield Token(TokenType.EOF, "", pos) + + def accept(self, type: TokenType, *, reject: bool = False) -> Optional[Token]: + if self.current.type is type: + token = self.current + if token.type is not TokenType.EOF: + self.current = next(self.tokens) + return token + if reject: + self.reject((type,)) + return None + + def reject(self, expected: Sequence[TokenType]) -> NoReturn: + raise ParseError( + self.current.pos + 1, + "expected {}; got {}".format( + " OR ".join(type.value for type in expected), + self.current.type.value, + ), + ) + + +# True, False and None are legal match expression identifiers, +# but illegal as Python identifiers. To fix this, this prefix +# is added to identifiers in the conversion to Python AST. +IDENT_PREFIX = "$" + + +def expression(s: Scanner) -> ast.Expression: + if s.accept(TokenType.EOF): + ret: ast.expr = ast.Constant(False) + else: + ret = expr(s) + s.accept(TokenType.EOF, reject=True) + return ast.fix_missing_locations(ast.Expression(ret)) + + +def expr(s: Scanner) -> ast.expr: + ret = and_expr(s) + while s.accept(TokenType.OR): + rhs = and_expr(s) + ret = ast.BoolOp(ast.Or(), [ret, rhs]) + return ret + + +def and_expr(s: Scanner) -> ast.expr: + ret = not_expr(s) + while s.accept(TokenType.AND): + rhs = not_expr(s) + ret = ast.BoolOp(ast.And(), [ret, rhs]) + return ret + + +def not_expr(s: Scanner) -> ast.expr: + if s.accept(TokenType.NOT): + return ast.UnaryOp(ast.Not(), not_expr(s)) + if s.accept(TokenType.LPAREN): + ret = expr(s) + s.accept(TokenType.RPAREN, reject=True) + return ret + ident = s.accept(TokenType.IDENT) + if ident: + return ast.Name(IDENT_PREFIX + ident.value, ast.Load()) + s.reject((TokenType.NOT, TokenType.LPAREN, TokenType.IDENT)) + + +class MatcherAdapter(Mapping[str, bool]): + """Adapts a matcher function to a locals mapping as required by eval().""" + + def __init__(self, matcher: Callable[[str], bool]) -> None: + self.matcher = matcher + + def __getitem__(self, key: str) -> bool: + return self.matcher(key[len(IDENT_PREFIX) :]) + + def __iter__(self) -> Iterator[str]: + raise NotImplementedError() + + def __len__(self) -> int: + raise NotImplementedError() + + +class Expression: + """A compiled match expression as used by -k and -m. + + The expression can be evaluated against different matchers. + """ + + __slots__ = ("code",) + + def __init__(self, code: types.CodeType) -> None: + self.code = code + + @classmethod + def compile(self, input: str) -> "Expression": + """Compile a match expression. + + :param input: The input expression - one line. + """ + astexpr = expression(Scanner(input)) + code: types.CodeType = compile( + astexpr, + filename="", + mode="eval", + ) + return Expression(code) + + def evaluate(self, matcher: Callable[[str], bool]) -> bool: + """Evaluate the match expression. + + :param matcher: + Given an identifier, should return whether it matches or not. + Should be prepared to handle arbitrary strings as input. + + :returns: Whether the expression matches or not. + """ + ret: bool = eval(self.code, {"__builtins__": {}}, MatcherAdapter(matcher)) + return ret diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/mark/structures.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/mark/structures.py new file mode 100644 index 000000000..a6503bf1d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/mark/structures.py @@ -0,0 +1,623 @@ +# mypy: allow-untyped-defs +import collections.abc +import dataclasses +import inspect +from typing import Any +from typing import Callable +from typing import Collection +from typing import final +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Mapping +from typing import MutableMapping +from typing import NamedTuple +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union +import warnings + +from .._code import getfslineno +from ..compat import ascii_escaped +from ..compat import NOTSET +from ..compat import NotSetType +from _pytest.config import Config +from _pytest.deprecated import check_ispytest +from _pytest.deprecated import MARKED_FIXTURE +from _pytest.outcomes import fail +from _pytest.warning_types import PytestUnknownMarkWarning + + +if TYPE_CHECKING: + from ..nodes import Node + + +EMPTY_PARAMETERSET_OPTION = "empty_parameter_set_mark" + + +def istestfunc(func) -> bool: + return callable(func) and getattr(func, "__name__", "") != "" + + +def get_empty_parameterset_mark( + config: Config, argnames: Sequence[str], func +) -> "MarkDecorator": + from ..nodes import Collector + + fs, lineno = getfslineno(func) + reason = "got empty parameter set %r, function %s at %s:%d" % ( + argnames, + func.__name__, + fs, + lineno, + ) + + requested_mark = config.getini(EMPTY_PARAMETERSET_OPTION) + if requested_mark in ("", None, "skip"): + mark = MARK_GEN.skip(reason=reason) + elif requested_mark == "xfail": + mark = MARK_GEN.xfail(reason=reason, run=False) + elif requested_mark == "fail_at_collect": + f_name = func.__name__ + _, lineno = getfslineno(func) + raise Collector.CollectError( + "Empty parameter set in '%s' at line %d" % (f_name, lineno + 1) + ) + else: + raise LookupError(requested_mark) + return mark + + +class ParameterSet(NamedTuple): + values: Sequence[Union[object, NotSetType]] + marks: Collection[Union["MarkDecorator", "Mark"]] + id: Optional[str] + + @classmethod + def param( + cls, + *values: object, + marks: Union["MarkDecorator", Collection[Union["MarkDecorator", "Mark"]]] = (), + id: Optional[str] = None, + ) -> "ParameterSet": + if isinstance(marks, MarkDecorator): + marks = (marks,) + else: + assert isinstance(marks, collections.abc.Collection) + + if id is not None: + if not isinstance(id, str): + raise TypeError(f"Expected id to be a string, got {type(id)}: {id!r}") + id = ascii_escaped(id) + return cls(values, marks, id) + + @classmethod + def extract_from( + cls, + parameterset: Union["ParameterSet", Sequence[object], object], + force_tuple: bool = False, + ) -> "ParameterSet": + """Extract from an object or objects. + + :param parameterset: + A legacy style parameterset that may or may not be a tuple, + and may or may not be wrapped into a mess of mark objects. + + :param force_tuple: + Enforce tuple wrapping so single argument tuple values + don't get decomposed and break tests. + """ + if isinstance(parameterset, cls): + return parameterset + if force_tuple: + return cls.param(parameterset) + else: + # TODO: Refactor to fix this type-ignore. Currently the following + # passes type-checking but crashes: + # + # @pytest.mark.parametrize(('x', 'y'), [1, 2]) + # def test_foo(x, y): pass + return cls(parameterset, marks=[], id=None) # type: ignore[arg-type] + + @staticmethod + def _parse_parametrize_args( + argnames: Union[str, Sequence[str]], + argvalues: Iterable[Union["ParameterSet", Sequence[object], object]], + *args, + **kwargs, + ) -> Tuple[Sequence[str], bool]: + if isinstance(argnames, str): + argnames = [x.strip() for x in argnames.split(",") if x.strip()] + force_tuple = len(argnames) == 1 + else: + force_tuple = False + return argnames, force_tuple + + @staticmethod + def _parse_parametrize_parameters( + argvalues: Iterable[Union["ParameterSet", Sequence[object], object]], + force_tuple: bool, + ) -> List["ParameterSet"]: + return [ + ParameterSet.extract_from(x, force_tuple=force_tuple) for x in argvalues + ] + + @classmethod + def _for_parametrize( + cls, + argnames: Union[str, Sequence[str]], + argvalues: Iterable[Union["ParameterSet", Sequence[object], object]], + func, + config: Config, + nodeid: str, + ) -> Tuple[Sequence[str], List["ParameterSet"]]: + argnames, force_tuple = cls._parse_parametrize_args(argnames, argvalues) + parameters = cls._parse_parametrize_parameters(argvalues, force_tuple) + del argvalues + + if parameters: + # Check all parameter sets have the correct number of values. + for param in parameters: + if len(param.values) != len(argnames): + msg = ( + '{nodeid}: in "parametrize" the number of names ({names_len}):\n' + " {names}\n" + "must be equal to the number of values ({values_len}):\n" + " {values}" + ) + fail( + msg.format( + nodeid=nodeid, + values=param.values, + names=argnames, + names_len=len(argnames), + values_len=len(param.values), + ), + pytrace=False, + ) + else: + # Empty parameter set (likely computed at runtime): create a single + # parameter set with NOTSET values, with the "empty parameter set" mark applied to it. + mark = get_empty_parameterset_mark(config, argnames, func) + parameters.append( + ParameterSet(values=(NOTSET,) * len(argnames), marks=[mark], id=None) + ) + return argnames, parameters + + +@final +@dataclasses.dataclass(frozen=True) +class Mark: + """A pytest mark.""" + + #: Name of the mark. + name: str + #: Positional arguments of the mark decorator. + args: Tuple[Any, ...] + #: Keyword arguments of the mark decorator. + kwargs: Mapping[str, Any] + + #: Source Mark for ids with parametrize Marks. + _param_ids_from: Optional["Mark"] = dataclasses.field(default=None, repr=False) + #: Resolved/generated ids with parametrize Marks. + _param_ids_generated: Optional[Sequence[str]] = dataclasses.field( + default=None, repr=False + ) + + def __init__( + self, + name: str, + args: Tuple[Any, ...], + kwargs: Mapping[str, Any], + param_ids_from: Optional["Mark"] = None, + param_ids_generated: Optional[Sequence[str]] = None, + *, + _ispytest: bool = False, + ) -> None: + """:meta private:""" + check_ispytest(_ispytest) + # Weirdness to bypass frozen=True. + object.__setattr__(self, "name", name) + object.__setattr__(self, "args", args) + object.__setattr__(self, "kwargs", kwargs) + object.__setattr__(self, "_param_ids_from", param_ids_from) + object.__setattr__(self, "_param_ids_generated", param_ids_generated) + + def _has_param_ids(self) -> bool: + return "ids" in self.kwargs or len(self.args) >= 4 + + def combined_with(self, other: "Mark") -> "Mark": + """Return a new Mark which is a combination of this + Mark and another Mark. + + Combines by appending args and merging kwargs. + + :param Mark other: The mark to combine with. + :rtype: Mark + """ + assert self.name == other.name + + # Remember source of ids with parametrize Marks. + param_ids_from: Optional[Mark] = None + if self.name == "parametrize": + if other._has_param_ids(): + param_ids_from = other + elif self._has_param_ids(): + param_ids_from = self + + return Mark( + self.name, + self.args + other.args, + dict(self.kwargs, **other.kwargs), + param_ids_from=param_ids_from, + _ispytest=True, + ) + + +# A generic parameter designating an object to which a Mark may +# be applied -- a test function (callable) or class. +# Note: a lambda is not allowed, but this can't be represented. +Markable = TypeVar("Markable", bound=Union[Callable[..., object], type]) + + +@dataclasses.dataclass +class MarkDecorator: + """A decorator for applying a mark on test functions and classes. + + ``MarkDecorators`` are created with ``pytest.mark``:: + + mark1 = pytest.mark.NAME # Simple MarkDecorator + mark2 = pytest.mark.NAME(name1=value) # Parametrized MarkDecorator + + and can then be applied as decorators to test functions:: + + @mark2 + def test_function(): + pass + + When a ``MarkDecorator`` is called, it does the following: + + 1. If called with a single class as its only positional argument and no + additional keyword arguments, it attaches the mark to the class so it + gets applied automatically to all test cases found in that class. + + 2. If called with a single function as its only positional argument and + no additional keyword arguments, it attaches the mark to the function, + containing all the arguments already stored internally in the + ``MarkDecorator``. + + 3. When called in any other case, it returns a new ``MarkDecorator`` + instance with the original ``MarkDecorator``'s content updated with + the arguments passed to this call. + + Note: The rules above prevent a ``MarkDecorator`` from storing only a + single function or class reference as its positional argument with no + additional keyword or positional arguments. You can work around this by + using `with_args()`. + """ + + mark: Mark + + def __init__(self, mark: Mark, *, _ispytest: bool = False) -> None: + """:meta private:""" + check_ispytest(_ispytest) + self.mark = mark + + @property + def name(self) -> str: + """Alias for mark.name.""" + return self.mark.name + + @property + def args(self) -> Tuple[Any, ...]: + """Alias for mark.args.""" + return self.mark.args + + @property + def kwargs(self) -> Mapping[str, Any]: + """Alias for mark.kwargs.""" + return self.mark.kwargs + + @property + def markname(self) -> str: + """:meta private:""" + return self.name # for backward-compat (2.4.1 had this attr) + + def with_args(self, *args: object, **kwargs: object) -> "MarkDecorator": + """Return a MarkDecorator with extra arguments added. + + Unlike calling the MarkDecorator, with_args() can be used even + if the sole argument is a callable/class. + """ + mark = Mark(self.name, args, kwargs, _ispytest=True) + return MarkDecorator(self.mark.combined_with(mark), _ispytest=True) + + # Type ignored because the overloads overlap with an incompatible + # return type. Not much we can do about that. Thankfully mypy picks + # the first match so it works out even if we break the rules. + @overload + def __call__(self, arg: Markable) -> Markable: # type: ignore[overload-overlap] + pass + + @overload + def __call__(self, *args: object, **kwargs: object) -> "MarkDecorator": + pass + + def __call__(self, *args: object, **kwargs: object): + """Call the MarkDecorator.""" + if args and not kwargs: + func = args[0] + is_class = inspect.isclass(func) + if len(args) == 1 and (istestfunc(func) or is_class): + store_mark(func, self.mark, stacklevel=3) + return func + return self.with_args(*args, **kwargs) + + +def get_unpacked_marks( + obj: Union[object, type], + *, + consider_mro: bool = True, +) -> List[Mark]: + """Obtain the unpacked marks that are stored on an object. + + If obj is a class and consider_mro is true, return marks applied to + this class and all of its super-classes in MRO order. If consider_mro + is false, only return marks applied directly to this class. + """ + if isinstance(obj, type): + if not consider_mro: + mark_lists = [obj.__dict__.get("pytestmark", [])] + else: + mark_lists = [ + x.__dict__.get("pytestmark", []) for x in reversed(obj.__mro__) + ] + mark_list = [] + for item in mark_lists: + if isinstance(item, list): + mark_list.extend(item) + else: + mark_list.append(item) + else: + mark_attribute = getattr(obj, "pytestmark", []) + if isinstance(mark_attribute, list): + mark_list = mark_attribute + else: + mark_list = [mark_attribute] + return list(normalize_mark_list(mark_list)) + + +def normalize_mark_list( + mark_list: Iterable[Union[Mark, MarkDecorator]], +) -> Iterable[Mark]: + """ + Normalize an iterable of Mark or MarkDecorator objects into a list of marks + by retrieving the `mark` attribute on MarkDecorator instances. + + :param mark_list: marks to normalize + :returns: A new list of the extracted Mark objects + """ + for mark in mark_list: + mark_obj = getattr(mark, "mark", mark) + if not isinstance(mark_obj, Mark): + raise TypeError(f"got {mark_obj!r} instead of Mark") + yield mark_obj + + +def store_mark(obj, mark: Mark, *, stacklevel: int = 2) -> None: + """Store a Mark on an object. + + This is used to implement the Mark declarations/decorators correctly. + """ + assert isinstance(mark, Mark), mark + + from ..fixtures import getfixturemarker + + if getfixturemarker(obj) is not None: + warnings.warn(MARKED_FIXTURE, stacklevel=stacklevel) + + # Always reassign name to avoid updating pytestmark in a reference that + # was only borrowed. + obj.pytestmark = [*get_unpacked_marks(obj, consider_mro=False), mark] + + +# Typing for builtin pytest marks. This is cheating; it gives builtin marks +# special privilege, and breaks modularity. But practicality beats purity... +if TYPE_CHECKING: + from _pytest.scope import _ScopeName + + class _SkipMarkDecorator(MarkDecorator): + @overload # type: ignore[override,no-overload-impl] + def __call__(self, arg: Markable) -> Markable: ... + + @overload + def __call__(self, reason: str = ...) -> "MarkDecorator": ... + + class _SkipifMarkDecorator(MarkDecorator): + def __call__( # type: ignore[override] + self, + condition: Union[str, bool] = ..., + *conditions: Union[str, bool], + reason: str = ..., + ) -> MarkDecorator: ... + + class _XfailMarkDecorator(MarkDecorator): + @overload # type: ignore[override,no-overload-impl] + def __call__(self, arg: Markable) -> Markable: ... + + @overload + def __call__( + self, + condition: Union[str, bool] = False, + *conditions: Union[str, bool], + reason: str = ..., + run: bool = ..., + raises: Union[ + None, Type[BaseException], Tuple[Type[BaseException], ...] + ] = ..., + strict: bool = ..., + ) -> MarkDecorator: ... + + class _ParametrizeMarkDecorator(MarkDecorator): + def __call__( # type: ignore[override] + self, + argnames: Union[str, Sequence[str]], + argvalues: Iterable[Union[ParameterSet, Sequence[object], object]], + *, + indirect: Union[bool, Sequence[str]] = ..., + ids: Optional[ + Union[ + Iterable[Union[None, str, float, int, bool]], + Callable[[Any], Optional[object]], + ] + ] = ..., + scope: Optional[_ScopeName] = ..., + ) -> MarkDecorator: ... + + class _UsefixturesMarkDecorator(MarkDecorator): + def __call__(self, *fixtures: str) -> MarkDecorator: # type: ignore[override] + ... + + class _FilterwarningsMarkDecorator(MarkDecorator): + def __call__(self, *filters: str) -> MarkDecorator: # type: ignore[override] + ... + + +@final +class MarkGenerator: + """Factory for :class:`MarkDecorator` objects - exposed as + a ``pytest.mark`` singleton instance. + + Example:: + + import pytest + + + @pytest.mark.slowtest + def test_function(): + pass + + applies a 'slowtest' :class:`Mark` on ``test_function``. + """ + + # See TYPE_CHECKING above. + if TYPE_CHECKING: + skip: _SkipMarkDecorator + skipif: _SkipifMarkDecorator + xfail: _XfailMarkDecorator + parametrize: _ParametrizeMarkDecorator + usefixtures: _UsefixturesMarkDecorator + filterwarnings: _FilterwarningsMarkDecorator + + def __init__(self, *, _ispytest: bool = False) -> None: + check_ispytest(_ispytest) + self._config: Optional[Config] = None + self._markers: Set[str] = set() + + def __getattr__(self, name: str) -> MarkDecorator: + """Generate a new :class:`MarkDecorator` with the given name.""" + if name[0] == "_": + raise AttributeError("Marker name must NOT start with underscore") + + if self._config is not None: + # We store a set of markers as a performance optimisation - if a mark + # name is in the set we definitely know it, but a mark may be known and + # not in the set. We therefore start by updating the set! + if name not in self._markers: + for line in self._config.getini("markers"): + # example lines: "skipif(condition): skip the given test if..." + # or "hypothesis: tests which use Hypothesis", so to get the + # marker name we split on both `:` and `(`. + marker = line.split(":")[0].split("(")[0].strip() + self._markers.add(marker) + + # If the name is not in the set of known marks after updating, + # then it really is time to issue a warning or an error. + if name not in self._markers: + if self._config.option.strict_markers or self._config.option.strict: + fail( + f"{name!r} not found in `markers` configuration option", + pytrace=False, + ) + + # Raise a specific error for common misspellings of "parametrize". + if name in ["parameterize", "parametrise", "parameterise"]: + __tracebackhide__ = True + fail(f"Unknown '{name}' mark, did you mean 'parametrize'?") + + warnings.warn( + "Unknown pytest.mark.%s - is this a typo? You can register " + "custom marks to avoid this warning - for details, see " + "https://docs.pytest.org/en/stable/how-to/mark.html" % name, + PytestUnknownMarkWarning, + 2, + ) + + return MarkDecorator(Mark(name, (), {}, _ispytest=True), _ispytest=True) + + +MARK_GEN = MarkGenerator(_ispytest=True) + + +@final +class NodeKeywords(MutableMapping[str, Any]): + __slots__ = ("node", "parent", "_markers") + + def __init__(self, node: "Node") -> None: + self.node = node + self.parent = node.parent + self._markers = {node.name: True} + + def __getitem__(self, key: str) -> Any: + try: + return self._markers[key] + except KeyError: + if self.parent is None: + raise + return self.parent.keywords[key] + + def __setitem__(self, key: str, value: Any) -> None: + self._markers[key] = value + + # Note: we could've avoided explicitly implementing some of the methods + # below and use the collections.abc fallback, but that would be slow. + + def __contains__(self, key: object) -> bool: + return ( + key in self._markers + or self.parent is not None + and key in self.parent.keywords + ) + + def update( # type: ignore[override] + self, + other: Union[Mapping[str, Any], Iterable[Tuple[str, Any]]] = (), + **kwds: Any, + ) -> None: + self._markers.update(other) + self._markers.update(kwds) + + def __delitem__(self, key: str) -> None: + raise ValueError("cannot delete key in keywords dict") + + def __iter__(self) -> Iterator[str]: + # Doesn't need to be fast. + yield from self._markers + if self.parent is not None: + for keyword in self.parent.keywords: + # self._marks and self.parent.keywords can have duplicates. + if keyword not in self._markers: + yield keyword + + def __len__(self) -> int: + # Doesn't need to be fast. + return sum(1 for keyword in self) + + def __repr__(self) -> str: + return f"" diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/monkeypatch.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/monkeypatch.py new file mode 100644 index 000000000..3f398df76 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/monkeypatch.py @@ -0,0 +1,416 @@ +# mypy: allow-untyped-defs +"""Monkeypatching and mocking functionality.""" + +from contextlib import contextmanager +import os +import re +import sys +from typing import Any +from typing import final +from typing import Generator +from typing import List +from typing import Mapping +from typing import MutableMapping +from typing import Optional +from typing import overload +from typing import Tuple +from typing import TypeVar +from typing import Union +import warnings + +from _pytest.fixtures import fixture +from _pytest.warning_types import PytestWarning + + +RE_IMPORT_ERROR_NAME = re.compile(r"^No module named (.*)$") + + +K = TypeVar("K") +V = TypeVar("V") + + +@fixture +def monkeypatch() -> Generator["MonkeyPatch", None, None]: + """A convenient fixture for monkey-patching. + + The fixture provides these methods to modify objects, dictionaries, or + :data:`os.environ`: + + * :meth:`monkeypatch.setattr(obj, name, value, raising=True) ` + * :meth:`monkeypatch.delattr(obj, name, raising=True) ` + * :meth:`monkeypatch.setitem(mapping, name, value) ` + * :meth:`monkeypatch.delitem(obj, name, raising=True) ` + * :meth:`monkeypatch.setenv(name, value, prepend=None) ` + * :meth:`monkeypatch.delenv(name, raising=True) ` + * :meth:`monkeypatch.syspath_prepend(path) ` + * :meth:`monkeypatch.chdir(path) ` + * :meth:`monkeypatch.context() ` + + All modifications will be undone after the requesting test function or + fixture has finished. The ``raising`` parameter determines if a :class:`KeyError` + or :class:`AttributeError` will be raised if the set/deletion operation does not have the + specified target. + + To undo modifications done by the fixture in a contained scope, + use :meth:`context() `. + """ + mpatch = MonkeyPatch() + yield mpatch + mpatch.undo() + + +def resolve(name: str) -> object: + # Simplified from zope.dottedname. + parts = name.split(".") + + used = parts.pop(0) + found: object = __import__(used) + for part in parts: + used += "." + part + try: + found = getattr(found, part) + except AttributeError: + pass + else: + continue + # We use explicit un-nesting of the handling block in order + # to avoid nested exceptions. + try: + __import__(used) + except ImportError as ex: + expected = str(ex).split()[-1] + if expected == used: + raise + else: + raise ImportError(f"import error in {used}: {ex}") from ex + found = annotated_getattr(found, part, used) + return found + + +def annotated_getattr(obj: object, name: str, ann: str) -> object: + try: + obj = getattr(obj, name) + except AttributeError as e: + raise AttributeError( + f"{type(obj).__name__!r} object at {ann} has no attribute {name!r}" + ) from e + return obj + + +def derive_importpath(import_path: str, raising: bool) -> Tuple[str, object]: + if not isinstance(import_path, str) or "." not in import_path: + raise TypeError(f"must be absolute import path string, not {import_path!r}") + module, attr = import_path.rsplit(".", 1) + target = resolve(module) + if raising: + annotated_getattr(target, attr, ann=module) + return attr, target + + +class Notset: + def __repr__(self) -> str: + return "" + + +notset = Notset() + + +@final +class MonkeyPatch: + """Helper to conveniently monkeypatch attributes/items/environment + variables/syspath. + + Returned by the :fixture:`monkeypatch` fixture. + + .. versionchanged:: 6.2 + Can now also be used directly as `pytest.MonkeyPatch()`, for when + the fixture is not available. In this case, use + :meth:`with MonkeyPatch.context() as mp: ` or remember to call + :meth:`undo` explicitly. + """ + + def __init__(self) -> None: + self._setattr: List[Tuple[object, str, object]] = [] + self._setitem: List[Tuple[Mapping[Any, Any], object, object]] = [] + self._cwd: Optional[str] = None + self._savesyspath: Optional[List[str]] = None + + @classmethod + @contextmanager + def context(cls) -> Generator["MonkeyPatch", None, None]: + """Context manager that returns a new :class:`MonkeyPatch` object + which undoes any patching done inside the ``with`` block upon exit. + + Example: + .. code-block:: python + + import functools + + + def test_partial(monkeypatch): + with monkeypatch.context() as m: + m.setattr(functools, "partial", 3) + + Useful in situations where it is desired to undo some patches before the test ends, + such as mocking ``stdlib`` functions that might break pytest itself if mocked (for examples + of this see :issue:`3290`). + """ + m = cls() + try: + yield m + finally: + m.undo() + + @overload + def setattr( + self, + target: str, + name: object, + value: Notset = ..., + raising: bool = ..., + ) -> None: ... + + @overload + def setattr( + self, + target: object, + name: str, + value: object, + raising: bool = ..., + ) -> None: ... + + def setattr( + self, + target: Union[str, object], + name: Union[object, str], + value: object = notset, + raising: bool = True, + ) -> None: + """ + Set attribute value on target, memorizing the old value. + + For example: + + .. code-block:: python + + import os + + monkeypatch.setattr(os, "getcwd", lambda: "/") + + The code above replaces the :func:`os.getcwd` function by a ``lambda`` which + always returns ``"/"``. + + For convenience, you can specify a string as ``target`` which + will be interpreted as a dotted import path, with the last part + being the attribute name: + + .. code-block:: python + + monkeypatch.setattr("os.getcwd", lambda: "/") + + Raises :class:`AttributeError` if the attribute does not exist, unless + ``raising`` is set to False. + + **Where to patch** + + ``monkeypatch.setattr`` works by (temporarily) changing the object that a name points to with another one. + There can be many names pointing to any individual object, so for patching to work you must ensure + that you patch the name used by the system under test. + + See the section :ref:`Where to patch ` in the :mod:`unittest.mock` + docs for a complete explanation, which is meant for :func:`unittest.mock.patch` but + applies to ``monkeypatch.setattr`` as well. + """ + __tracebackhide__ = True + import inspect + + if isinstance(value, Notset): + if not isinstance(target, str): + raise TypeError( + "use setattr(target, name, value) or " + "setattr(target, value) with target being a dotted " + "import string" + ) + value = name + name, target = derive_importpath(target, raising) + else: + if not isinstance(name, str): + raise TypeError( + "use setattr(target, name, value) with name being a string or " + "setattr(target, value) with target being a dotted " + "import string" + ) + + oldval = getattr(target, name, notset) + if raising and oldval is notset: + raise AttributeError(f"{target!r} has no attribute {name!r}") + + # avoid class descriptors like staticmethod/classmethod + if inspect.isclass(target): + oldval = target.__dict__.get(name, notset) + self._setattr.append((target, name, oldval)) + setattr(target, name, value) + + def delattr( + self, + target: Union[object, str], + name: Union[str, Notset] = notset, + raising: bool = True, + ) -> None: + """Delete attribute ``name`` from ``target``. + + If no ``name`` is specified and ``target`` is a string + it will be interpreted as a dotted import path with the + last part being the attribute name. + + Raises AttributeError it the attribute does not exist, unless + ``raising`` is set to False. + """ + __tracebackhide__ = True + import inspect + + if isinstance(name, Notset): + if not isinstance(target, str): + raise TypeError( + "use delattr(target, name) or " + "delattr(target) with target being a dotted " + "import string" + ) + name, target = derive_importpath(target, raising) + + if not hasattr(target, name): + if raising: + raise AttributeError(name) + else: + oldval = getattr(target, name, notset) + # Avoid class descriptors like staticmethod/classmethod. + if inspect.isclass(target): + oldval = target.__dict__.get(name, notset) + self._setattr.append((target, name, oldval)) + delattr(target, name) + + def setitem(self, dic: Mapping[K, V], name: K, value: V) -> None: + """Set dictionary entry ``name`` to value.""" + self._setitem.append((dic, name, dic.get(name, notset))) + # Not all Mapping types support indexing, but MutableMapping doesn't support TypedDict + dic[name] = value # type: ignore[index] + + def delitem(self, dic: Mapping[K, V], name: K, raising: bool = True) -> None: + """Delete ``name`` from dict. + + Raises ``KeyError`` if it doesn't exist, unless ``raising`` is set to + False. + """ + if name not in dic: + if raising: + raise KeyError(name) + else: + self._setitem.append((dic, name, dic.get(name, notset))) + # Not all Mapping types support indexing, but MutableMapping doesn't support TypedDict + del dic[name] # type: ignore[attr-defined] + + def setenv(self, name: str, value: str, prepend: Optional[str] = None) -> None: + """Set environment variable ``name`` to ``value``. + + If ``prepend`` is a character, read the current environment variable + value and prepend the ``value`` adjoined with the ``prepend`` + character. + """ + if not isinstance(value, str): + warnings.warn( # type: ignore[unreachable] + PytestWarning( + f"Value of environment variable {name} type should be str, but got " + f"{value!r} (type: {type(value).__name__}); converted to str implicitly" + ), + stacklevel=2, + ) + value = str(value) + if prepend and name in os.environ: + value = value + prepend + os.environ[name] + self.setitem(os.environ, name, value) + + def delenv(self, name: str, raising: bool = True) -> None: + """Delete ``name`` from the environment. + + Raises ``KeyError`` if it does not exist, unless ``raising`` is set to + False. + """ + environ: MutableMapping[str, str] = os.environ + self.delitem(environ, name, raising=raising) + + def syspath_prepend(self, path) -> None: + """Prepend ``path`` to ``sys.path`` list of import locations.""" + if self._savesyspath is None: + self._savesyspath = sys.path[:] + sys.path.insert(0, str(path)) + + # https://github.com/pypa/setuptools/blob/d8b901bc/docs/pkg_resources.txt#L162-L171 + # this is only needed when pkg_resources was already loaded by the namespace package + if "pkg_resources" in sys.modules: + from pkg_resources import fixup_namespace_packages + + fixup_namespace_packages(str(path)) + + # A call to syspathinsert() usually means that the caller wants to + # import some dynamically created files, thus with python3 we + # invalidate its import caches. + # This is especially important when any namespace package is in use, + # since then the mtime based FileFinder cache (that gets created in + # this case already) gets not invalidated when writing the new files + # quickly afterwards. + from importlib import invalidate_caches + + invalidate_caches() + + def chdir(self, path: Union[str, "os.PathLike[str]"]) -> None: + """Change the current working directory to the specified path. + + :param path: + The path to change into. + """ + if self._cwd is None: + self._cwd = os.getcwd() + os.chdir(path) + + def undo(self) -> None: + """Undo previous changes. + + This call consumes the undo stack. Calling it a second time has no + effect unless you do more monkeypatching after the undo call. + + There is generally no need to call `undo()`, since it is + called automatically during tear-down. + + .. note:: + The same `monkeypatch` fixture is used across a + single test function invocation. If `monkeypatch` is used both by + the test function itself and one of the test fixtures, + calling `undo()` will undo all of the changes made in + both functions. + + Prefer to use :meth:`context() ` instead. + """ + for obj, name, value in reversed(self._setattr): + if value is not notset: + setattr(obj, name, value) + else: + delattr(obj, name) + self._setattr[:] = [] + for dictionary, key, value in reversed(self._setitem): + if value is notset: + try: + # Not all Mapping types support indexing, but MutableMapping doesn't support TypedDict + del dictionary[key] # type: ignore[attr-defined] + except KeyError: + pass # Was already deleted, so we have the desired state. + else: + # Not all Mapping types support indexing, but MutableMapping doesn't support TypedDict + dictionary[key] = value # type: ignore[index] + self._setitem[:] = [] + if self._savesyspath is not None: + sys.path[:] = self._savesyspath + self._savesyspath = None + + if self._cwd is not None: + os.chdir(self._cwd) + self._cwd = None diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/nodes.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/nodes.py new file mode 100644 index 000000000..974d756a2 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/nodes.py @@ -0,0 +1,771 @@ +# mypy: allow-untyped-defs +import abc +from functools import cached_property +from inspect import signature +import os +import pathlib +from pathlib import Path +from typing import Any +from typing import Callable +from typing import cast +from typing import Iterable +from typing import Iterator +from typing import List +from typing import MutableMapping +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union +import warnings + +import pluggy + +import _pytest._code +from _pytest._code import getfslineno +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import TerminalRepr +from _pytest._code.code import Traceback +from _pytest.compat import LEGACY_PATH +from _pytest.config import Config +from _pytest.config import ConftestImportFailure +from _pytest.config.compat import _check_path +from _pytest.deprecated import NODE_CTOR_FSPATH_ARG +from _pytest.mark.structures import Mark +from _pytest.mark.structures import MarkDecorator +from _pytest.mark.structures import NodeKeywords +from _pytest.outcomes import fail +from _pytest.pathlib import absolutepath +from _pytest.pathlib import commonpath +from _pytest.stash import Stash +from _pytest.warning_types import PytestWarning + + +if TYPE_CHECKING: + from typing import Self + + # Imported here due to circular import. + from _pytest._code.code import _TracebackStyle + from _pytest.main import Session + + +SEP = "/" + +tracebackcutdir = Path(_pytest.__file__).parent + + +_T = TypeVar("_T") + + +def _imply_path( + node_type: Type["Node"], + path: Optional[Path], + fspath: Optional[LEGACY_PATH], +) -> Path: + if fspath is not None: + warnings.warn( + NODE_CTOR_FSPATH_ARG.format( + node_type_name=node_type.__name__, + ), + stacklevel=6, + ) + if path is not None: + if fspath is not None: + _check_path(path, fspath) + return path + else: + assert fspath is not None + return Path(fspath) + + +_NodeType = TypeVar("_NodeType", bound="Node") + + +class NodeMeta(abc.ABCMeta): + """Metaclass used by :class:`Node` to enforce that direct construction raises + :class:`Failed`. + + This behaviour supports the indirection introduced with :meth:`Node.from_parent`, + the named constructor to be used instead of direct construction. The design + decision to enforce indirection with :class:`NodeMeta` was made as a + temporary aid for refactoring the collection tree, which was diagnosed to + have :class:`Node` objects whose creational patterns were overly entangled. + Once the refactoring is complete, this metaclass can be removed. + + See https://github.com/pytest-dev/pytest/projects/3 for an overview of the + progress on detangling the :class:`Node` classes. + """ + + def __call__(cls, *k, **kw) -> NoReturn: + msg = ( + "Direct construction of {name} has been deprecated, please use {name}.from_parent.\n" + "See " + "https://docs.pytest.org/en/stable/deprecations.html#node-construction-changed-to-node-from-parent" + " for more details." + ).format(name=f"{cls.__module__}.{cls.__name__}") + fail(msg, pytrace=False) + + def _create(cls: Type[_T], *k, **kw) -> _T: + try: + return super().__call__(*k, **kw) # type: ignore[no-any-return,misc] + except TypeError: + sig = signature(getattr(cls, "__init__")) + known_kw = {k: v for k, v in kw.items() if k in sig.parameters} + from .warning_types import PytestDeprecationWarning + + warnings.warn( + PytestDeprecationWarning( + f"{cls} is not using a cooperative constructor and only takes {set(known_kw)}.\n" + "See https://docs.pytest.org/en/stable/deprecations.html" + "#constructors-of-custom-pytest-node-subclasses-should-take-kwargs " + "for more details." + ) + ) + + return super().__call__(*k, **known_kw) # type: ignore[no-any-return,misc] + + +class Node(abc.ABC, metaclass=NodeMeta): + r"""Base class of :class:`Collector` and :class:`Item`, the components of + the test collection tree. + + ``Collector``\'s are the internal nodes of the tree, and ``Item``\'s are the + leaf nodes. + """ + + # Implemented in the legacypath plugin. + #: A ``LEGACY_PATH`` copy of the :attr:`path` attribute. Intended for usage + #: for methods not migrated to ``pathlib.Path`` yet, such as + #: :meth:`Item.reportinfo `. Will be deprecated in + #: a future release, prefer using :attr:`path` instead. + fspath: LEGACY_PATH + + # Use __slots__ to make attribute access faster. + # Note that __dict__ is still available. + __slots__ = ( + "name", + "parent", + "config", + "session", + "path", + "_nodeid", + "_store", + "__dict__", + ) + + def __init__( + self, + name: str, + parent: "Optional[Node]" = None, + config: Optional[Config] = None, + session: "Optional[Session]" = None, + fspath: Optional[LEGACY_PATH] = None, + path: Optional[Path] = None, + nodeid: Optional[str] = None, + ) -> None: + #: A unique name within the scope of the parent node. + self.name: str = name + + #: The parent collector node. + self.parent = parent + + if config: + #: The pytest config object. + self.config: Config = config + else: + if not parent: + raise TypeError("config or parent must be provided") + self.config = parent.config + + if session: + #: The pytest session this node is part of. + self.session: Session = session + else: + if not parent: + raise TypeError("session or parent must be provided") + self.session = parent.session + + if path is None and fspath is None: + path = getattr(parent, "path", None) + #: Filesystem path where this node was collected from (can be None). + self.path: pathlib.Path = _imply_path(type(self), path, fspath=fspath) + + # The explicit annotation is to avoid publicly exposing NodeKeywords. + #: Keywords/markers collected from all scopes. + self.keywords: MutableMapping[str, Any] = NodeKeywords(self) + + #: The marker objects belonging to this node. + self.own_markers: List[Mark] = [] + + #: Allow adding of extra keywords to use for matching. + self.extra_keyword_matches: Set[str] = set() + + if nodeid is not None: + assert "::()" not in nodeid + self._nodeid = nodeid + else: + if not self.parent: + raise TypeError("nodeid or parent must be provided") + self._nodeid = self.parent.nodeid + "::" + self.name + + #: A place where plugins can store information on the node for their + #: own use. + self.stash: Stash = Stash() + # Deprecated alias. Was never public. Can be removed in a few releases. + self._store = self.stash + + @classmethod + def from_parent(cls, parent: "Node", **kw) -> "Self": + """Public constructor for Nodes. + + This indirection got introduced in order to enable removing + the fragile logic from the node constructors. + + Subclasses can use ``super().from_parent(...)`` when overriding the + construction. + + :param parent: The parent node of this Node. + """ + if "config" in kw: + raise TypeError("config is not a valid argument for from_parent") + if "session" in kw: + raise TypeError("session is not a valid argument for from_parent") + return cls._create(parent=parent, **kw) + + @property + def ihook(self) -> pluggy.HookRelay: + """fspath-sensitive hook proxy used to call pytest hooks.""" + return self.session.gethookproxy(self.path) + + def __repr__(self) -> str: + return "<{} {}>".format(self.__class__.__name__, getattr(self, "name", None)) + + def warn(self, warning: Warning) -> None: + """Issue a warning for this Node. + + Warnings will be displayed after the test session, unless explicitly suppressed. + + :param Warning warning: + The warning instance to issue. + + :raises ValueError: If ``warning`` instance is not a subclass of Warning. + + Example usage: + + .. code-block:: python + + node.warn(PytestWarning("some message")) + node.warn(UserWarning("some message")) + + .. versionchanged:: 6.2 + Any subclass of :class:`Warning` is now accepted, rather than only + :class:`PytestWarning ` subclasses. + """ + # enforce type checks here to avoid getting a generic type error later otherwise. + if not isinstance(warning, Warning): + raise ValueError( + f"warning must be an instance of Warning or subclass, got {warning!r}" + ) + path, lineno = get_fslocation_from_item(self) + assert lineno is not None + warnings.warn_explicit( + warning, + category=None, + filename=str(path), + lineno=lineno + 1, + ) + + # Methods for ordering nodes. + + @property + def nodeid(self) -> str: + """A ::-separated string denoting its collection tree address.""" + return self._nodeid + + def __hash__(self) -> int: + return hash(self._nodeid) + + def setup(self) -> None: + pass + + def teardown(self) -> None: + pass + + def iter_parents(self) -> Iterator["Node"]: + """Iterate over all parent collectors starting from and including self + up to the root of the collection tree. + + .. versionadded:: 8.1 + """ + parent: Optional[Node] = self + while parent is not None: + yield parent + parent = parent.parent + + def listchain(self) -> List["Node"]: + """Return a list of all parent collectors starting from the root of the + collection tree down to and including self.""" + chain = [] + item: Optional[Node] = self + while item is not None: + chain.append(item) + item = item.parent + chain.reverse() + return chain + + def add_marker( + self, marker: Union[str, MarkDecorator], append: bool = True + ) -> None: + """Dynamically add a marker object to the node. + + :param marker: + The marker. + :param append: + Whether to append the marker, or prepend it. + """ + from _pytest.mark import MARK_GEN + + if isinstance(marker, MarkDecorator): + marker_ = marker + elif isinstance(marker, str): + marker_ = getattr(MARK_GEN, marker) + else: + raise ValueError("is not a string or pytest.mark.* Marker") + self.keywords[marker_.name] = marker_ + if append: + self.own_markers.append(marker_.mark) + else: + self.own_markers.insert(0, marker_.mark) + + def iter_markers(self, name: Optional[str] = None) -> Iterator[Mark]: + """Iterate over all markers of the node. + + :param name: If given, filter the results by the name attribute. + :returns: An iterator of the markers of the node. + """ + return (x[1] for x in self.iter_markers_with_node(name=name)) + + def iter_markers_with_node( + self, name: Optional[str] = None + ) -> Iterator[Tuple["Node", Mark]]: + """Iterate over all markers of the node. + + :param name: If given, filter the results by the name attribute. + :returns: An iterator of (node, mark) tuples. + """ + for node in self.iter_parents(): + for mark in node.own_markers: + if name is None or getattr(mark, "name", None) == name: + yield node, mark + + @overload + def get_closest_marker(self, name: str) -> Optional[Mark]: ... + + @overload + def get_closest_marker(self, name: str, default: Mark) -> Mark: ... + + def get_closest_marker( + self, name: str, default: Optional[Mark] = None + ) -> Optional[Mark]: + """Return the first marker matching the name, from closest (for + example function) to farther level (for example module level). + + :param default: Fallback return value if no marker was found. + :param name: Name to filter by. + """ + return next(self.iter_markers(name=name), default) + + def listextrakeywords(self) -> Set[str]: + """Return a set of all extra keywords in self and any parents.""" + extra_keywords: Set[str] = set() + for item in self.listchain(): + extra_keywords.update(item.extra_keyword_matches) + return extra_keywords + + def listnames(self) -> List[str]: + return [x.name for x in self.listchain()] + + def addfinalizer(self, fin: Callable[[], object]) -> None: + """Register a function to be called without arguments when this node is + finalized. + + This method can only be called when this node is active + in a setup chain, for example during self.setup(). + """ + self.session._setupstate.addfinalizer(fin, self) + + def getparent(self, cls: Type[_NodeType]) -> Optional[_NodeType]: + """Get the closest parent node (including self) which is an instance of + the given class. + + :param cls: The node class to search for. + :returns: The node, if found. + """ + for node in self.iter_parents(): + if isinstance(node, cls): + return node + return None + + def _traceback_filter(self, excinfo: ExceptionInfo[BaseException]) -> Traceback: + return excinfo.traceback + + def _repr_failure_py( + self, + excinfo: ExceptionInfo[BaseException], + style: "Optional[_TracebackStyle]" = None, + ) -> TerminalRepr: + from _pytest.fixtures import FixtureLookupError + + if isinstance(excinfo.value, ConftestImportFailure): + excinfo = ExceptionInfo.from_exception(excinfo.value.cause) + if isinstance(excinfo.value, fail.Exception): + if not excinfo.value.pytrace: + style = "value" + if isinstance(excinfo.value, FixtureLookupError): + return excinfo.value.formatrepr() + + tbfilter: Union[bool, Callable[[ExceptionInfo[BaseException]], Traceback]] + if self.config.getoption("fulltrace", False): + style = "long" + tbfilter = False + else: + tbfilter = self._traceback_filter + if style == "auto": + style = "long" + # XXX should excinfo.getrepr record all data and toterminal() process it? + if style is None: + if self.config.getoption("tbstyle", "auto") == "short": + style = "short" + else: + style = "long" + + if self.config.getoption("verbose", 0) > 1: + truncate_locals = False + else: + truncate_locals = True + + # excinfo.getrepr() formats paths relative to the CWD if `abspath` is False. + # It is possible for a fixture/test to change the CWD while this code runs, which + # would then result in the user seeing confusing paths in the failure message. + # To fix this, if the CWD changed, always display the full absolute path. + # It will be better to just always display paths relative to invocation_dir, but + # this requires a lot of plumbing (#6428). + try: + abspath = Path(os.getcwd()) != self.config.invocation_params.dir + except OSError: + abspath = True + + return excinfo.getrepr( + funcargs=True, + abspath=abspath, + showlocals=self.config.getoption("showlocals", False), + style=style, + tbfilter=tbfilter, + truncate_locals=truncate_locals, + ) + + def repr_failure( + self, + excinfo: ExceptionInfo[BaseException], + style: "Optional[_TracebackStyle]" = None, + ) -> Union[str, TerminalRepr]: + """Return a representation of a collection or test failure. + + .. seealso:: :ref:`non-python tests` + + :param excinfo: Exception information for the failure. + """ + return self._repr_failure_py(excinfo, style) + + +def get_fslocation_from_item(node: "Node") -> Tuple[Union[str, Path], Optional[int]]: + """Try to extract the actual location from a node, depending on available attributes: + + * "location": a pair (path, lineno) + * "obj": a Python object that the node wraps. + * "path": just a path + + :rtype: A tuple of (str|Path, int) with filename and 0-based line number. + """ + # See Item.location. + location: Optional[Tuple[str, Optional[int], str]] = getattr(node, "location", None) + if location is not None: + return location[:2] + obj = getattr(node, "obj", None) + if obj is not None: + return getfslineno(obj) + return getattr(node, "path", "unknown location"), -1 + + +class Collector(Node, abc.ABC): + """Base class of all collectors. + + Collector create children through `collect()` and thus iteratively build + the collection tree. + """ + + class CollectError(Exception): + """An error during collection, contains a custom message.""" + + @abc.abstractmethod + def collect(self) -> Iterable[Union["Item", "Collector"]]: + """Collect children (items and collectors) for this collector.""" + raise NotImplementedError("abstract") + + # TODO: This omits the style= parameter which breaks Liskov Substitution. + def repr_failure( # type: ignore[override] + self, excinfo: ExceptionInfo[BaseException] + ) -> Union[str, TerminalRepr]: + """Return a representation of a collection failure. + + :param excinfo: Exception information for the failure. + """ + if isinstance(excinfo.value, self.CollectError) and not self.config.getoption( + "fulltrace", False + ): + exc = excinfo.value + return str(exc.args[0]) + + # Respect explicit tbstyle option, but default to "short" + # (_repr_failure_py uses "long" with "fulltrace" option always). + tbstyle = self.config.getoption("tbstyle", "auto") + if tbstyle == "auto": + tbstyle = "short" + + return self._repr_failure_py(excinfo, style=tbstyle) + + def _traceback_filter(self, excinfo: ExceptionInfo[BaseException]) -> Traceback: + if hasattr(self, "path"): + traceback = excinfo.traceback + ntraceback = traceback.cut(path=self.path) + if ntraceback == traceback: + ntraceback = ntraceback.cut(excludepath=tracebackcutdir) + return ntraceback.filter(excinfo) + return excinfo.traceback + + +def _check_initialpaths_for_relpath(session: "Session", path: Path) -> Optional[str]: + for initial_path in session._initialpaths: + if commonpath(path, initial_path) == initial_path: + rel = str(path.relative_to(initial_path)) + return "" if rel == "." else rel + return None + + +class FSCollector(Collector, abc.ABC): + """Base class for filesystem collectors.""" + + def __init__( + self, + fspath: Optional[LEGACY_PATH] = None, + path_or_parent: Optional[Union[Path, Node]] = None, + path: Optional[Path] = None, + name: Optional[str] = None, + parent: Optional[Node] = None, + config: Optional[Config] = None, + session: Optional["Session"] = None, + nodeid: Optional[str] = None, + ) -> None: + if path_or_parent: + if isinstance(path_or_parent, Node): + assert parent is None + parent = cast(FSCollector, path_or_parent) + elif isinstance(path_or_parent, Path): + assert path is None + path = path_or_parent + + path = _imply_path(type(self), path, fspath=fspath) + if name is None: + name = path.name + if parent is not None and parent.path != path: + try: + rel = path.relative_to(parent.path) + except ValueError: + pass + else: + name = str(rel) + name = name.replace(os.sep, SEP) + self.path = path + + if session is None: + assert parent is not None + session = parent.session + + if nodeid is None: + try: + nodeid = str(self.path.relative_to(session.config.rootpath)) + except ValueError: + nodeid = _check_initialpaths_for_relpath(session, path) + + if nodeid and os.sep != SEP: + nodeid = nodeid.replace(os.sep, SEP) + + super().__init__( + name=name, + parent=parent, + config=config, + session=session, + nodeid=nodeid, + path=path, + ) + + @classmethod + def from_parent( + cls, + parent, + *, + fspath: Optional[LEGACY_PATH] = None, + path: Optional[Path] = None, + **kw, + ) -> "Self": + """The public constructor.""" + return super().from_parent(parent=parent, fspath=fspath, path=path, **kw) + + +class File(FSCollector, abc.ABC): + """Base class for collecting tests from a file. + + :ref:`non-python tests`. + """ + + +class Directory(FSCollector, abc.ABC): + """Base class for collecting files from a directory. + + A basic directory collector does the following: goes over the files and + sub-directories in the directory and creates collectors for them by calling + the hooks :hook:`pytest_collect_directory` and :hook:`pytest_collect_file`, + after checking that they are not ignored using + :hook:`pytest_ignore_collect`. + + The default directory collectors are :class:`~pytest.Dir` and + :class:`~pytest.Package`. + + .. versionadded:: 8.0 + + :ref:`custom directory collectors`. + """ + + +class Item(Node, abc.ABC): + """Base class of all test invocation items. + + Note that for a single function there might be multiple test invocation items. + """ + + nextitem = None + + def __init__( + self, + name, + parent=None, + config: Optional[Config] = None, + session: Optional["Session"] = None, + nodeid: Optional[str] = None, + **kw, + ) -> None: + # The first two arguments are intentionally passed positionally, + # to keep plugins who define a node type which inherits from + # (pytest.Item, pytest.File) working (see issue #8435). + # They can be made kwargs when the deprecation above is done. + super().__init__( + name, + parent, + config=config, + session=session, + nodeid=nodeid, + **kw, + ) + self._report_sections: List[Tuple[str, str, str]] = [] + + #: A list of tuples (name, value) that holds user defined properties + #: for this test. + self.user_properties: List[Tuple[str, object]] = [] + + self._check_item_and_collector_diamond_inheritance() + + def _check_item_and_collector_diamond_inheritance(self) -> None: + """ + Check if the current type inherits from both File and Collector + at the same time, emitting a warning accordingly (#8447). + """ + cls = type(self) + + # We inject an attribute in the type to avoid issuing this warning + # for the same class more than once, which is not helpful. + # It is a hack, but was deemed acceptable in order to avoid + # flooding the user in the common case. + attr_name = "_pytest_diamond_inheritance_warning_shown" + if getattr(cls, attr_name, False): + return + setattr(cls, attr_name, True) + + problems = ", ".join( + base.__name__ for base in cls.__bases__ if issubclass(base, Collector) + ) + if problems: + warnings.warn( + f"{cls.__name__} is an Item subclass and should not be a collector, " + f"however its bases {problems} are collectors.\n" + "Please split the Collectors and the Item into separate node types.\n" + "Pytest Doc example: https://docs.pytest.org/en/latest/example/nonpython.html\n" + "example pull request on a plugin: https://github.com/asmeurer/pytest-flakes/pull/40/", + PytestWarning, + ) + + @abc.abstractmethod + def runtest(self) -> None: + """Run the test case for this item. + + Must be implemented by subclasses. + + .. seealso:: :ref:`non-python tests` + """ + raise NotImplementedError("runtest must be implemented by Item subclass") + + def add_report_section(self, when: str, key: str, content: str) -> None: + """Add a new report section, similar to what's done internally to add + stdout and stderr captured output:: + + item.add_report_section("call", "stdout", "report section contents") + + :param str when: + One of the possible capture states, ``"setup"``, ``"call"``, ``"teardown"``. + :param str key: + Name of the section, can be customized at will. Pytest uses ``"stdout"`` and + ``"stderr"`` internally. + :param str content: + The full contents as a string. + """ + if content: + self._report_sections.append((when, key, content)) + + def reportinfo(self) -> Tuple[Union["os.PathLike[str]", str], Optional[int], str]: + """Get location information for this item for test reports. + + Returns a tuple with three elements: + + - The path of the test (default ``self.path``) + - The 0-based line number of the test (default ``None``) + - A name of the test to be shown (default ``""``) + + .. seealso:: :ref:`non-python tests` + """ + return self.path, None, "" + + @cached_property + def location(self) -> Tuple[str, Optional[int], str]: + """ + Returns a tuple of ``(relfspath, lineno, testname)`` for this item + where ``relfspath`` is file path relative to ``config.rootpath`` + and lineno is a 0-based line number. + """ + location = self.reportinfo() + path = absolutepath(location[0]) + relfspath = self.session._node_location_to_relpath(path) + assert type(location[2]) is str + return (relfspath, location[1], location[2]) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/outcomes.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/outcomes.py new file mode 100644 index 000000000..76d94accd --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/outcomes.py @@ -0,0 +1,302 @@ +"""Exception classes and constants handling test outcomes as well as +functions creating them.""" + +import sys +from typing import Any +from typing import Callable +from typing import cast +from typing import NoReturn +from typing import Optional +from typing import Protocol +from typing import Type +from typing import TypeVar + +from .warning_types import PytestDeprecationWarning + + +class OutcomeException(BaseException): + """OutcomeException and its subclass instances indicate and contain info + about test and collection outcomes.""" + + def __init__(self, msg: Optional[str] = None, pytrace: bool = True) -> None: + if msg is not None and not isinstance(msg, str): + error_msg = ( # type: ignore[unreachable] + "{} expected string as 'msg' parameter, got '{}' instead.\n" + "Perhaps you meant to use a mark?" + ) + raise TypeError(error_msg.format(type(self).__name__, type(msg).__name__)) + super().__init__(msg) + self.msg = msg + self.pytrace = pytrace + + def __repr__(self) -> str: + if self.msg is not None: + return self.msg + return f"<{self.__class__.__name__} instance>" + + __str__ = __repr__ + + +TEST_OUTCOME = (OutcomeException, Exception) + + +class Skipped(OutcomeException): + # XXX hackish: on 3k we fake to live in the builtins + # in order to have Skipped exception printing shorter/nicer + __module__ = "builtins" + + def __init__( + self, + msg: Optional[str] = None, + pytrace: bool = True, + allow_module_level: bool = False, + *, + _use_item_location: bool = False, + ) -> None: + super().__init__(msg=msg, pytrace=pytrace) + self.allow_module_level = allow_module_level + # If true, the skip location is reported as the item's location, + # instead of the place that raises the exception/calls skip(). + self._use_item_location = _use_item_location + + +class Failed(OutcomeException): + """Raised from an explicit call to pytest.fail().""" + + __module__ = "builtins" + + +class Exit(Exception): + """Raised for immediate program exits (no tracebacks/summaries).""" + + def __init__( + self, msg: str = "unknown reason", returncode: Optional[int] = None + ) -> None: + self.msg = msg + self.returncode = returncode + super().__init__(msg) + + +# Elaborate hack to work around https://github.com/python/mypy/issues/2087. +# Ideally would just be `exit.Exception = Exit` etc. + +_F = TypeVar("_F", bound=Callable[..., object]) +_ET = TypeVar("_ET", bound=Type[BaseException]) + + +class _WithException(Protocol[_F, _ET]): + Exception: _ET + __call__: _F + + +def _with_exception(exception_type: _ET) -> Callable[[_F], _WithException[_F, _ET]]: + def decorate(func: _F) -> _WithException[_F, _ET]: + func_with_exception = cast(_WithException[_F, _ET], func) + func_with_exception.Exception = exception_type + return func_with_exception + + return decorate + + +# Exposed helper methods. + + +@_with_exception(Exit) +def exit( + reason: str = "", + returncode: Optional[int] = None, +) -> NoReturn: + """Exit testing process. + + :param reason: + The message to show as the reason for exiting pytest. reason has a default value + only because `msg` is deprecated. + + :param returncode: + Return code to be used when exiting pytest. None means the same as ``0`` (no error), same as :func:`sys.exit`. + """ + __tracebackhide__ = True + raise Exit(reason, returncode) + + +@_with_exception(Skipped) +def skip( + reason: str = "", + *, + allow_module_level: bool = False, +) -> NoReturn: + """Skip an executing test with the given message. + + This function should be called only during testing (setup, call or teardown) or + during collection by using the ``allow_module_level`` flag. This function can + be called in doctests as well. + + :param reason: + The message to show the user as reason for the skip. + + :param allow_module_level: + Allows this function to be called at module level. + Raising the skip exception at module level will stop + the execution of the module and prevent the collection of all tests in the module, + even those defined before the `skip` call. + + Defaults to False. + + .. note:: + It is better to use the :ref:`pytest.mark.skipif ref` marker when + possible to declare a test to be skipped under certain conditions + like mismatching platforms or dependencies. + Similarly, use the ``# doctest: +SKIP`` directive (see :py:data:`doctest.SKIP`) + to skip a doctest statically. + """ + __tracebackhide__ = True + raise Skipped(msg=reason, allow_module_level=allow_module_level) + + +@_with_exception(Failed) +def fail(reason: str = "", pytrace: bool = True) -> NoReturn: + """Explicitly fail an executing test with the given message. + + :param reason: + The message to show the user as reason for the failure. + + :param pytrace: + If False, msg represents the full failure information and no + python traceback will be reported. + """ + __tracebackhide__ = True + raise Failed(msg=reason, pytrace=pytrace) + + +class XFailed(Failed): + """Raised from an explicit call to pytest.xfail().""" + + +@_with_exception(XFailed) +def xfail(reason: str = "") -> NoReturn: + """Imperatively xfail an executing test or setup function with the given reason. + + This function should be called only during testing (setup, call or teardown). + + No other code is executed after using ``xfail()`` (it is implemented + internally by raising an exception). + + :param reason: + The message to show the user as reason for the xfail. + + .. note:: + It is better to use the :ref:`pytest.mark.xfail ref` marker when + possible to declare a test to be xfailed under certain conditions + like known bugs or missing features. + """ + __tracebackhide__ = True + raise XFailed(reason) + + +def importorskip( + modname: str, + minversion: Optional[str] = None, + reason: Optional[str] = None, + *, + exc_type: Optional[Type[ImportError]] = None, +) -> Any: + """Import and return the requested module ``modname``, or skip the + current test if the module cannot be imported. + + :param modname: + The name of the module to import. + :param minversion: + If given, the imported module's ``__version__`` attribute must be at + least this minimal version, otherwise the test is still skipped. + :param reason: + If given, this reason is shown as the message when the module cannot + be imported. + :param exc_type: + The exception that should be captured in order to skip modules. + Must be :py:class:`ImportError` or a subclass. + + If the module can be imported but raises :class:`ImportError`, pytest will + issue a warning to the user, as often users expect the module not to be + found (which would raise :class:`ModuleNotFoundError` instead). + + This warning can be suppressed by passing ``exc_type=ImportError`` explicitly. + + See :ref:`import-or-skip-import-error` for details. + + + :returns: + The imported module. This should be assigned to its canonical name. + + Example:: + + docutils = pytest.importorskip("docutils") + + .. versionadded:: 8.2 + + The ``exc_type`` parameter. + """ + import warnings + + __tracebackhide__ = True + compile(modname, "", "eval") # to catch syntaxerrors + + # Until pytest 9.1, we will warn the user if we catch ImportError (instead of ModuleNotFoundError), + # as this might be hiding an installation/environment problem, which is not usually what is intended + # when using importorskip() (#11523). + # In 9.1, to keep the function signature compatible, we just change the code below to: + # 1. Use `exc_type = ModuleNotFoundError` if `exc_type` is not given. + # 2. Remove `warn_on_import` and the warning handling. + if exc_type is None: + exc_type = ImportError + warn_on_import_error = True + else: + warn_on_import_error = False + + skipped: Optional[Skipped] = None + warning: Optional[Warning] = None + + with warnings.catch_warnings(): + # Make sure to ignore ImportWarnings that might happen because + # of existing directories with the same name we're trying to + # import but without a __init__.py file. + warnings.simplefilter("ignore") + + try: + __import__(modname) + except exc_type as exc: + # Do not raise or issue warnings inside the catch_warnings() block. + if reason is None: + reason = f"could not import {modname!r}: {exc}" + skipped = Skipped(reason, allow_module_level=True) + + if warn_on_import_error and not isinstance(exc, ModuleNotFoundError): + lines = [ + "", + f"Module '{modname}' was found, but when imported by pytest it raised:", + f" {exc!r}", + "In pytest 9.1 this warning will become an error by default.", + "You can fix the underlying problem, or alternatively overwrite this behavior and silence this " + "warning by passing exc_type=ImportError explicitly.", + "See https://docs.pytest.org/en/stable/deprecations.html#pytest-importorskip-default-behavior-regarding-importerror", + ] + warning = PytestDeprecationWarning("\n".join(lines)) + + if warning: + warnings.warn(warning, stacklevel=2) + if skipped: + raise skipped + + mod = sys.modules[modname] + if minversion is None: + return mod + verattr = getattr(mod, "__version__", None) + if minversion is not None: + # Imported lazily to improve start-up time. + from packaging.version import Version + + if verattr is None or Version(verattr) < Version(minversion): + raise Skipped( + f"module {modname!r} has __version__ {verattr!r}, required is: {minversion!r}", + allow_module_level=True, + ) + return mod diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/pastebin.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/pastebin.py new file mode 100644 index 000000000..533d78c9a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/pastebin.py @@ -0,0 +1,112 @@ +# mypy: allow-untyped-defs +"""Submit failure or test session information to a pastebin service.""" + +from io import StringIO +import tempfile +from typing import IO +from typing import Union + +from _pytest.config import Config +from _pytest.config import create_terminal_writer +from _pytest.config.argparsing import Parser +from _pytest.stash import StashKey +from _pytest.terminal import TerminalReporter +import pytest + + +pastebinfile_key = StashKey[IO[bytes]]() + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("terminal reporting") + group._addoption( + "--pastebin", + metavar="mode", + action="store", + dest="pastebin", + default=None, + choices=["failed", "all"], + help="Send failed|all info to bpaste.net pastebin service", + ) + + +@pytest.hookimpl(trylast=True) +def pytest_configure(config: Config) -> None: + if config.option.pastebin == "all": + tr = config.pluginmanager.getplugin("terminalreporter") + # If no terminal reporter plugin is present, nothing we can do here; + # this can happen when this function executes in a worker node + # when using pytest-xdist, for example. + if tr is not None: + # pastebin file will be UTF-8 encoded binary file. + config.stash[pastebinfile_key] = tempfile.TemporaryFile("w+b") + oldwrite = tr._tw.write + + def tee_write(s, **kwargs): + oldwrite(s, **kwargs) + if isinstance(s, str): + s = s.encode("utf-8") + config.stash[pastebinfile_key].write(s) + + tr._tw.write = tee_write + + +def pytest_unconfigure(config: Config) -> None: + if pastebinfile_key in config.stash: + pastebinfile = config.stash[pastebinfile_key] + # Get terminal contents and delete file. + pastebinfile.seek(0) + sessionlog = pastebinfile.read() + pastebinfile.close() + del config.stash[pastebinfile_key] + # Undo our patching in the terminal reporter. + tr = config.pluginmanager.getplugin("terminalreporter") + del tr._tw.__dict__["write"] + # Write summary. + tr.write_sep("=", "Sending information to Paste Service") + pastebinurl = create_new_paste(sessionlog) + tr.write_line("pastebin session-log: %s\n" % pastebinurl) + + +def create_new_paste(contents: Union[str, bytes]) -> str: + """Create a new paste using the bpaste.net service. + + :contents: Paste contents string. + :returns: URL to the pasted contents, or an error message. + """ + import re + from urllib.parse import urlencode + from urllib.request import urlopen + + params = {"code": contents, "lexer": "text", "expiry": "1week"} + url = "https://bpa.st" + try: + response: str = ( + urlopen(url, data=urlencode(params).encode("ascii")).read().decode("utf-8") + ) + except OSError as exc_info: # urllib errors + return "bad response: %s" % exc_info + m = re.search(r'href="/raw/(\w+)"', response) + if m: + return f"{url}/show/{m.group(1)}" + else: + return "bad response: invalid format ('" + response + "')" + + +def pytest_terminal_summary(terminalreporter: TerminalReporter) -> None: + if terminalreporter.config.option.pastebin != "failed": + return + if "failed" in terminalreporter.stats: + terminalreporter.write_sep("=", "Sending information to Paste Service") + for rep in terminalreporter.stats["failed"]: + try: + msg = rep.longrepr.reprtraceback.reprentries[-1].reprfileloc + except AttributeError: + msg = terminalreporter._getfailureheadline(rep) + file = StringIO() + tw = create_terminal_writer(terminalreporter.config, file) + rep.toterminal(tw) + s = file.getvalue() + assert len(s) + pastebinurl = create_new_paste(s) + terminalreporter.write_line(f"{msg} --> {pastebinurl}") diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/pathlib.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/pathlib.py new file mode 100644 index 000000000..e14c2acd3 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/pathlib.py @@ -0,0 +1,984 @@ +import atexit +import contextlib +from enum import Enum +from errno import EBADF +from errno import ELOOP +from errno import ENOENT +from errno import ENOTDIR +import fnmatch +from functools import partial +from importlib.machinery import ModuleSpec +import importlib.util +import itertools +import os +from os.path import expanduser +from os.path import expandvars +from os.path import isabs +from os.path import sep +from pathlib import Path +from pathlib import PurePath +from posixpath import sep as posix_sep +import shutil +import sys +import types +from types import ModuleType +from typing import Any +from typing import Callable +from typing import Dict +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Optional +from typing import Set +from typing import Tuple +from typing import Type +from typing import TypeVar +from typing import Union +import uuid +import warnings + +from _pytest.compat import assert_never +from _pytest.outcomes import skip +from _pytest.warning_types import PytestWarning + + +LOCK_TIMEOUT = 60 * 60 * 24 * 3 + + +_AnyPurePath = TypeVar("_AnyPurePath", bound=PurePath) + +# The following function, variables and comments were +# copied from cpython 3.9 Lib/pathlib.py file. + +# EBADF - guard against macOS `stat` throwing EBADF +_IGNORED_ERRORS = (ENOENT, ENOTDIR, EBADF, ELOOP) + +_IGNORED_WINERRORS = ( + 21, # ERROR_NOT_READY - drive exists but is not accessible + 1921, # ERROR_CANT_RESOLVE_FILENAME - fix for broken symlink pointing to itself +) + + +def _ignore_error(exception: Exception) -> bool: + return ( + getattr(exception, "errno", None) in _IGNORED_ERRORS + or getattr(exception, "winerror", None) in _IGNORED_WINERRORS + ) + + +def get_lock_path(path: _AnyPurePath) -> _AnyPurePath: + return path.joinpath(".lock") + + +def on_rm_rf_error( + func: Optional[Callable[..., Any]], + path: str, + excinfo: Union[ + BaseException, + Tuple[Type[BaseException], BaseException, Optional[types.TracebackType]], + ], + *, + start_path: Path, +) -> bool: + """Handle known read-only errors during rmtree. + + The returned value is used only by our own tests. + """ + if isinstance(excinfo, BaseException): + exc = excinfo + else: + exc = excinfo[1] + + # Another process removed the file in the middle of the "rm_rf" (xdist for example). + # More context: https://github.com/pytest-dev/pytest/issues/5974#issuecomment-543799018 + if isinstance(exc, FileNotFoundError): + return False + + if not isinstance(exc, PermissionError): + warnings.warn( + PytestWarning(f"(rm_rf) error removing {path}\n{type(exc)}: {exc}") + ) + return False + + if func not in (os.rmdir, os.remove, os.unlink): + if func not in (os.open,): + warnings.warn( + PytestWarning( + f"(rm_rf) unknown function {func} when removing {path}:\n{type(exc)}: {exc}" + ) + ) + return False + + # Chmod + retry. + import stat + + def chmod_rw(p: str) -> None: + mode = os.stat(p).st_mode + os.chmod(p, mode | stat.S_IRUSR | stat.S_IWUSR) + + # For files, we need to recursively go upwards in the directories to + # ensure they all are also writable. + p = Path(path) + if p.is_file(): + for parent in p.parents: + chmod_rw(str(parent)) + # Stop when we reach the original path passed to rm_rf. + if parent == start_path: + break + chmod_rw(str(path)) + + func(path) + return True + + +def ensure_extended_length_path(path: Path) -> Path: + """Get the extended-length version of a path (Windows). + + On Windows, by default, the maximum length of a path (MAX_PATH) is 260 + characters, and operations on paths longer than that fail. But it is possible + to overcome this by converting the path to "extended-length" form before + performing the operation: + https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file#maximum-path-length-limitation + + On Windows, this function returns the extended-length absolute version of path. + On other platforms it returns path unchanged. + """ + if sys.platform.startswith("win32"): + path = path.resolve() + path = Path(get_extended_length_path_str(str(path))) + return path + + +def get_extended_length_path_str(path: str) -> str: + """Convert a path to a Windows extended length path.""" + long_path_prefix = "\\\\?\\" + unc_long_path_prefix = "\\\\?\\UNC\\" + if path.startswith((long_path_prefix, unc_long_path_prefix)): + return path + # UNC + if path.startswith("\\\\"): + return unc_long_path_prefix + path[2:] + return long_path_prefix + path + + +def rm_rf(path: Path) -> None: + """Remove the path contents recursively, even if some elements + are read-only.""" + path = ensure_extended_length_path(path) + onerror = partial(on_rm_rf_error, start_path=path) + if sys.version_info >= (3, 12): + shutil.rmtree(str(path), onexc=onerror) + else: + shutil.rmtree(str(path), onerror=onerror) + + +def find_prefixed(root: Path, prefix: str) -> Iterator["os.DirEntry[str]"]: + """Find all elements in root that begin with the prefix, case insensitive.""" + l_prefix = prefix.lower() + for x in os.scandir(root): + if x.name.lower().startswith(l_prefix): + yield x + + +def extract_suffixes(iter: Iterable["os.DirEntry[str]"], prefix: str) -> Iterator[str]: + """Return the parts of the paths following the prefix. + + :param iter: Iterator over path names. + :param prefix: Expected prefix of the path names. + """ + p_len = len(prefix) + for entry in iter: + yield entry.name[p_len:] + + +def find_suffixes(root: Path, prefix: str) -> Iterator[str]: + """Combine find_prefixes and extract_suffixes.""" + return extract_suffixes(find_prefixed(root, prefix), prefix) + + +def parse_num(maybe_num: str) -> int: + """Parse number path suffixes, returns -1 on error.""" + try: + return int(maybe_num) + except ValueError: + return -1 + + +def _force_symlink( + root: Path, target: Union[str, PurePath], link_to: Union[str, Path] +) -> None: + """Helper to create the current symlink. + + It's full of race conditions that are reasonably OK to ignore + for the context of best effort linking to the latest test run. + + The presumption being that in case of much parallelism + the inaccuracy is going to be acceptable. + """ + current_symlink = root.joinpath(target) + try: + current_symlink.unlink() + except OSError: + pass + try: + current_symlink.symlink_to(link_to) + except Exception: + pass + + +def make_numbered_dir(root: Path, prefix: str, mode: int = 0o700) -> Path: + """Create a directory with an increased number as suffix for the given prefix.""" + for i in range(10): + # try up to 10 times to create the folder + max_existing = max(map(parse_num, find_suffixes(root, prefix)), default=-1) + new_number = max_existing + 1 + new_path = root.joinpath(f"{prefix}{new_number}") + try: + new_path.mkdir(mode=mode) + except Exception: + pass + else: + _force_symlink(root, prefix + "current", new_path) + return new_path + else: + raise OSError( + "could not create numbered dir with prefix " + f"{prefix} in {root} after 10 tries" + ) + + +def create_cleanup_lock(p: Path) -> Path: + """Create a lock to prevent premature folder cleanup.""" + lock_path = get_lock_path(p) + try: + fd = os.open(str(lock_path), os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0o644) + except FileExistsError as e: + raise OSError(f"cannot create lockfile in {p}") from e + else: + pid = os.getpid() + spid = str(pid).encode() + os.write(fd, spid) + os.close(fd) + if not lock_path.is_file(): + raise OSError("lock path got renamed after successful creation") + return lock_path + + +def register_cleanup_lock_removal( + lock_path: Path, register: Any = atexit.register +) -> Any: + """Register a cleanup function for removing a lock, by default on atexit.""" + pid = os.getpid() + + def cleanup_on_exit(lock_path: Path = lock_path, original_pid: int = pid) -> None: + current_pid = os.getpid() + if current_pid != original_pid: + # fork + return + try: + lock_path.unlink() + except OSError: + pass + + return register(cleanup_on_exit) + + +def maybe_delete_a_numbered_dir(path: Path) -> None: + """Remove a numbered directory if its lock can be obtained and it does + not seem to be in use.""" + path = ensure_extended_length_path(path) + lock_path = None + try: + lock_path = create_cleanup_lock(path) + parent = path.parent + + garbage = parent.joinpath(f"garbage-{uuid.uuid4()}") + path.rename(garbage) + rm_rf(garbage) + except OSError: + # known races: + # * other process did a cleanup at the same time + # * deletable folder was found + # * process cwd (Windows) + return + finally: + # If we created the lock, ensure we remove it even if we failed + # to properly remove the numbered dir. + if lock_path is not None: + try: + lock_path.unlink() + except OSError: + pass + + +def ensure_deletable(path: Path, consider_lock_dead_if_created_before: float) -> bool: + """Check if `path` is deletable based on whether the lock file is expired.""" + if path.is_symlink(): + return False + lock = get_lock_path(path) + try: + if not lock.is_file(): + return True + except OSError: + # we might not have access to the lock file at all, in this case assume + # we don't have access to the entire directory (#7491). + return False + try: + lock_time = lock.stat().st_mtime + except Exception: + return False + else: + if lock_time < consider_lock_dead_if_created_before: + # We want to ignore any errors while trying to remove the lock such as: + # - PermissionDenied, like the file permissions have changed since the lock creation; + # - FileNotFoundError, in case another pytest process got here first; + # and any other cause of failure. + with contextlib.suppress(OSError): + lock.unlink() + return True + return False + + +def try_cleanup(path: Path, consider_lock_dead_if_created_before: float) -> None: + """Try to cleanup a folder if we can ensure it's deletable.""" + if ensure_deletable(path, consider_lock_dead_if_created_before): + maybe_delete_a_numbered_dir(path) + + +def cleanup_candidates(root: Path, prefix: str, keep: int) -> Iterator[Path]: + """List candidates for numbered directories to be removed - follows py.path.""" + max_existing = max(map(parse_num, find_suffixes(root, prefix)), default=-1) + max_delete = max_existing - keep + entries = find_prefixed(root, prefix) + entries, entries2 = itertools.tee(entries) + numbers = map(parse_num, extract_suffixes(entries2, prefix)) + for entry, number in zip(entries, numbers): + if number <= max_delete: + yield Path(entry) + + +def cleanup_dead_symlinks(root: Path) -> None: + for left_dir in root.iterdir(): + if left_dir.is_symlink(): + if not left_dir.resolve().exists(): + left_dir.unlink() + + +def cleanup_numbered_dir( + root: Path, prefix: str, keep: int, consider_lock_dead_if_created_before: float +) -> None: + """Cleanup for lock driven numbered directories.""" + if not root.exists(): + return + for path in cleanup_candidates(root, prefix, keep): + try_cleanup(path, consider_lock_dead_if_created_before) + for path in root.glob("garbage-*"): + try_cleanup(path, consider_lock_dead_if_created_before) + + cleanup_dead_symlinks(root) + + +def make_numbered_dir_with_cleanup( + root: Path, + prefix: str, + keep: int, + lock_timeout: float, + mode: int, +) -> Path: + """Create a numbered dir with a cleanup lock and remove old ones.""" + e = None + for i in range(10): + try: + p = make_numbered_dir(root, prefix, mode) + # Only lock the current dir when keep is not 0 + if keep != 0: + lock_path = create_cleanup_lock(p) + register_cleanup_lock_removal(lock_path) + except Exception as exc: + e = exc + else: + consider_lock_dead_if_created_before = p.stat().st_mtime - lock_timeout + # Register a cleanup for program exit + atexit.register( + cleanup_numbered_dir, + root, + prefix, + keep, + consider_lock_dead_if_created_before, + ) + return p + assert e is not None + raise e + + +def resolve_from_str(input: str, rootpath: Path) -> Path: + input = expanduser(input) + input = expandvars(input) + if isabs(input): + return Path(input) + else: + return rootpath.joinpath(input) + + +def fnmatch_ex(pattern: str, path: Union[str, "os.PathLike[str]"]) -> bool: + """A port of FNMatcher from py.path.common which works with PurePath() instances. + + The difference between this algorithm and PurePath.match() is that the + latter matches "**" glob expressions for each part of the path, while + this algorithm uses the whole path instead. + + For example: + "tests/foo/bar/doc/test_foo.py" matches pattern "tests/**/doc/test*.py" + with this algorithm, but not with PurePath.match(). + + This algorithm was ported to keep backward-compatibility with existing + settings which assume paths match according this logic. + + References: + * https://bugs.python.org/issue29249 + * https://bugs.python.org/issue34731 + """ + path = PurePath(path) + iswin32 = sys.platform.startswith("win") + + if iswin32 and sep not in pattern and posix_sep in pattern: + # Running on Windows, the pattern has no Windows path separators, + # and the pattern has one or more Posix path separators. Replace + # the Posix path separators with the Windows path separator. + pattern = pattern.replace(posix_sep, sep) + + if sep not in pattern: + name = path.name + else: + name = str(path) + if path.is_absolute() and not os.path.isabs(pattern): + pattern = f"*{os.sep}{pattern}" + return fnmatch.fnmatch(name, pattern) + + +def parts(s: str) -> Set[str]: + parts = s.split(sep) + return {sep.join(parts[: i + 1]) or sep for i in range(len(parts))} + + +def symlink_or_skip( + src: Union["os.PathLike[str]", str], + dst: Union["os.PathLike[str]", str], + **kwargs: Any, +) -> None: + """Make a symlink, or skip the test in case symlinks are not supported.""" + try: + os.symlink(src, dst, **kwargs) + except OSError as e: + skip(f"symlinks not supported: {e}") + + +class ImportMode(Enum): + """Possible values for `mode` parameter of `import_path`.""" + + prepend = "prepend" + append = "append" + importlib = "importlib" + + +class ImportPathMismatchError(ImportError): + """Raised on import_path() if there is a mismatch of __file__'s. + + This can happen when `import_path` is called multiple times with different filenames that has + the same basename but reside in packages + (for example "/tests1/test_foo.py" and "/tests2/test_foo.py"). + """ + + +def import_path( + path: Union[str, "os.PathLike[str]"], + *, + mode: Union[str, ImportMode] = ImportMode.prepend, + root: Path, + consider_namespace_packages: bool, +) -> ModuleType: + """ + Import and return a module from the given path, which can be a file (a module) or + a directory (a package). + + :param path: + Path to the file to import. + + :param mode: + Controls the underlying import mechanism that will be used: + + * ImportMode.prepend: the directory containing the module (or package, taking + `__init__.py` files into account) will be put at the *start* of `sys.path` before + being imported with `importlib.import_module`. + + * ImportMode.append: same as `prepend`, but the directory will be appended + to the end of `sys.path`, if not already in `sys.path`. + + * ImportMode.importlib: uses more fine control mechanisms provided by `importlib` + to import the module, which avoids having to muck with `sys.path` at all. It effectively + allows having same-named test modules in different places. + + :param root: + Used as an anchor when mode == ImportMode.importlib to obtain + a unique name for the module being imported so it can safely be stored + into ``sys.modules``. + + :param consider_namespace_packages: + If True, consider namespace packages when resolving module names. + + :raises ImportPathMismatchError: + If after importing the given `path` and the module `__file__` + are different. Only raised in `prepend` and `append` modes. + """ + path = Path(path) + mode = ImportMode(mode) + + if not path.exists(): + raise ImportError(path) + + if mode is ImportMode.importlib: + # Try to import this module using the standard import mechanisms, but + # without touching sys.path. + try: + pkg_root, module_name = resolve_pkg_root_and_module_name( + path, consider_namespace_packages=consider_namespace_packages + ) + except CouldNotResolvePathError: + pass + else: + # If the given module name is already in sys.modules, do not import it again. + with contextlib.suppress(KeyError): + return sys.modules[module_name] + + mod = _import_module_using_spec( + module_name, path, pkg_root, insert_modules=False + ) + if mod is not None: + return mod + + # Could not import the module with the current sys.path, so we fall back + # to importing the file as a single module, not being a part of a package. + module_name = module_name_from_path(path, root) + with contextlib.suppress(KeyError): + return sys.modules[module_name] + + mod = _import_module_using_spec( + module_name, path, path.parent, insert_modules=True + ) + if mod is None: + raise ImportError(f"Can't find module {module_name} at location {path}") + return mod + + try: + pkg_root, module_name = resolve_pkg_root_and_module_name( + path, consider_namespace_packages=consider_namespace_packages + ) + except CouldNotResolvePathError: + pkg_root, module_name = path.parent, path.stem + + # Change sys.path permanently: restoring it at the end of this function would cause surprising + # problems because of delayed imports: for example, a conftest.py file imported by this function + # might have local imports, which would fail at runtime if we restored sys.path. + if mode is ImportMode.append: + if str(pkg_root) not in sys.path: + sys.path.append(str(pkg_root)) + elif mode is ImportMode.prepend: + if str(pkg_root) != sys.path[0]: + sys.path.insert(0, str(pkg_root)) + else: + assert_never(mode) + + importlib.import_module(module_name) + + mod = sys.modules[module_name] + if path.name == "__init__.py": + return mod + + ignore = os.environ.get("PY_IGNORE_IMPORTMISMATCH", "") + if ignore != "1": + module_file = mod.__file__ + if module_file is None: + raise ImportPathMismatchError(module_name, module_file, path) + + if module_file.endswith((".pyc", ".pyo")): + module_file = module_file[:-1] + if module_file.endswith(os.sep + "__init__.py"): + module_file = module_file[: -(len(os.sep + "__init__.py"))] + + try: + is_same = _is_same(str(path), module_file) + except FileNotFoundError: + is_same = False + + if not is_same: + raise ImportPathMismatchError(module_name, module_file, path) + + return mod + + +def _import_module_using_spec( + module_name: str, module_path: Path, module_location: Path, *, insert_modules: bool +) -> Optional[ModuleType]: + """ + Tries to import a module by its canonical name, path to the .py file, and its + parent location. + + :param insert_modules: + If True, will call insert_missing_modules to create empty intermediate modules + for made-up module names (when importing test files not reachable from sys.path). + """ + # Checking with sys.meta_path first in case one of its hooks can import this module, + # such as our own assertion-rewrite hook. + for meta_importer in sys.meta_path: + spec = meta_importer.find_spec(module_name, [str(module_location)]) + if spec_matches_module_path(spec, module_path): + break + else: + spec = importlib.util.spec_from_file_location(module_name, str(module_path)) + + if spec_matches_module_path(spec, module_path): + assert spec is not None + # Attempt to import the parent module, seems is our responsibility: + # https://github.com/python/cpython/blob/73906d5c908c1e0b73c5436faeff7d93698fc074/Lib/importlib/_bootstrap.py#L1308-L1311 + parent_module_name, _, name = module_name.rpartition(".") + parent_module: Optional[ModuleType] = None + if parent_module_name: + parent_module = sys.modules.get(parent_module_name) + if parent_module is None: + # Find the directory of this module's parent. + parent_dir = ( + module_path.parent.parent + if module_path.name == "__init__.py" + else module_path.parent + ) + # Consider the parent module path as its __init__.py file, if it has one. + parent_module_path = ( + parent_dir / "__init__.py" + if (parent_dir / "__init__.py").is_file() + else parent_dir + ) + parent_module = _import_module_using_spec( + parent_module_name, + parent_module_path, + parent_dir, + insert_modules=insert_modules, + ) + + # Find spec and import this module. + mod = importlib.util.module_from_spec(spec) + sys.modules[module_name] = mod + spec.loader.exec_module(mod) # type: ignore[union-attr] + + # Set this module as an attribute of the parent module (#12194). + if parent_module is not None: + setattr(parent_module, name, mod) + + if insert_modules: + insert_missing_modules(sys.modules, module_name) + return mod + + return None + + +def spec_matches_module_path( + module_spec: Optional[ModuleSpec], module_path: Path +) -> bool: + """Return true if the given ModuleSpec can be used to import the given module path.""" + if module_spec is None or module_spec.origin is None: + return False + + return Path(module_spec.origin) == module_path + + +# Implement a special _is_same function on Windows which returns True if the two filenames +# compare equal, to circumvent os.path.samefile returning False for mounts in UNC (#7678). +if sys.platform.startswith("win"): + + def _is_same(f1: str, f2: str) -> bool: + return Path(f1) == Path(f2) or os.path.samefile(f1, f2) + +else: + + def _is_same(f1: str, f2: str) -> bool: + return os.path.samefile(f1, f2) + + +def module_name_from_path(path: Path, root: Path) -> str: + """ + Return a dotted module name based on the given path, anchored on root. + + For example: path="projects/src/tests/test_foo.py" and root="/projects", the + resulting module name will be "src.tests.test_foo". + """ + path = path.with_suffix("") + try: + relative_path = path.relative_to(root) + except ValueError: + # If we can't get a relative path to root, use the full path, except + # for the first part ("d:\\" or "/" depending on the platform, for example). + path_parts = path.parts[1:] + else: + # Use the parts for the relative path to the root path. + path_parts = relative_path.parts + + # Module name for packages do not contain the __init__ file, unless + # the `__init__.py` file is at the root. + if len(path_parts) >= 2 and path_parts[-1] == "__init__": + path_parts = path_parts[:-1] + + # Module names cannot contain ".", normalize them to "_". This prevents + # a directory having a "." in the name (".env.310" for example) causing extra intermediate modules. + # Also, important to replace "." at the start of paths, as those are considered relative imports. + path_parts = tuple(x.replace(".", "_") for x in path_parts) + + return ".".join(path_parts) + + +def insert_missing_modules(modules: Dict[str, ModuleType], module_name: str) -> None: + """ + Used by ``import_path`` to create intermediate modules when using mode=importlib. + + When we want to import a module as "src.tests.test_foo" for example, we need + to create empty modules "src" and "src.tests" after inserting "src.tests.test_foo", + otherwise "src.tests.test_foo" is not importable by ``__import__``. + """ + module_parts = module_name.split(".") + while module_name: + parent_module_name, _, child_name = module_name.rpartition(".") + if parent_module_name: + parent_module = modules.get(parent_module_name) + if parent_module is None: + try: + # If sys.meta_path is empty, calling import_module will issue + # a warning and raise ModuleNotFoundError. To avoid the + # warning, we check sys.meta_path explicitly and raise the error + # ourselves to fall back to creating a dummy module. + if not sys.meta_path: + raise ModuleNotFoundError + parent_module = importlib.import_module(parent_module_name) + except ModuleNotFoundError: + parent_module = ModuleType( + module_name, + doc="Empty module created by pytest's importmode=importlib.", + ) + modules[parent_module_name] = parent_module + + # Add child attribute to the parent that can reference the child + # modules. + if not hasattr(parent_module, child_name): + setattr(parent_module, child_name, modules[module_name]) + + module_parts.pop(-1) + module_name = ".".join(module_parts) + + +def resolve_package_path(path: Path) -> Optional[Path]: + """Return the Python package path by looking for the last + directory upwards which still contains an __init__.py. + + Returns None if it can not be determined. + """ + result = None + for parent in itertools.chain((path,), path.parents): + if parent.is_dir(): + if not (parent / "__init__.py").is_file(): + break + if not parent.name.isidentifier(): + break + result = parent + return result + + +def resolve_pkg_root_and_module_name( + path: Path, *, consider_namespace_packages: bool = False +) -> Tuple[Path, str]: + """ + Return the path to the directory of the root package that contains the + given Python file, and its module name: + + src/ + app/ + __init__.py + core/ + __init__.py + models.py + + Passing the full path to `models.py` will yield Path("src") and "app.core.models". + + If consider_namespace_packages is True, then we additionally check upwards in the hierarchy + for namespace packages: + + https://packaging.python.org/en/latest/guides/packaging-namespace-packages + + Raises CouldNotResolvePathError if the given path does not belong to a package (missing any __init__.py files). + """ + pkg_root: Optional[Path] = None + pkg_path = resolve_package_path(path) + if pkg_path is not None: + pkg_root = pkg_path.parent + if consider_namespace_packages: + start = pkg_root if pkg_root is not None else path.parent + for candidate in (start, *start.parents): + module_name = compute_module_name(candidate, path) + if module_name and is_importable(module_name, path): + # Point the pkg_root to the root of the namespace package. + pkg_root = candidate + break + + if pkg_root is not None: + module_name = compute_module_name(pkg_root, path) + if module_name: + return pkg_root, module_name + + raise CouldNotResolvePathError(f"Could not resolve for {path}") + + +def is_importable(module_name: str, module_path: Path) -> bool: + """ + Return if the given module path could be imported normally by Python, akin to the user + entering the REPL and importing the corresponding module name directly, and corresponds + to the module_path specified. + + :param module_name: + Full module name that we want to check if is importable. + For example, "app.models". + + :param module_path: + Full path to the python module/package we want to check if is importable. + For example, "/projects/src/app/models.py". + """ + try: + # Note this is different from what we do in ``_import_module_using_spec``, where we explicitly search through + # sys.meta_path to be able to pass the path of the module that we want to import (``meta_importer.find_spec``). + # Using importlib.util.find_spec() is different, it gives the same results as trying to import + # the module normally in the REPL. + spec = importlib.util.find_spec(module_name) + except (ImportError, ValueError, ImportWarning): + return False + else: + return spec_matches_module_path(spec, module_path) + + +def compute_module_name(root: Path, module_path: Path) -> Optional[str]: + """Compute a module name based on a path and a root anchor.""" + try: + path_without_suffix = module_path.with_suffix("") + except ValueError: + # Empty paths (such as Path.cwd()) might break meta_path hooks (like our own assertion rewriter). + return None + + try: + relative = path_without_suffix.relative_to(root) + except ValueError: # pragma: no cover + return None + names = list(relative.parts) + if not names: + return None + if names[-1] == "__init__": + names.pop() + return ".".join(names) + + +class CouldNotResolvePathError(Exception): + """Custom exception raised by resolve_pkg_root_and_module_name.""" + + +def scandir( + path: Union[str, "os.PathLike[str]"], + sort_key: Callable[["os.DirEntry[str]"], object] = lambda entry: entry.name, +) -> List["os.DirEntry[str]"]: + """Scan a directory recursively, in breadth-first order. + + The returned entries are sorted according to the given key. + The default is to sort by name. + """ + entries = [] + with os.scandir(path) as s: + # Skip entries with symlink loops and other brokenness, so the caller + # doesn't have to deal with it. + for entry in s: + try: + entry.is_file() + except OSError as err: + if _ignore_error(err): + continue + raise + entries.append(entry) + entries.sort(key=sort_key) # type: ignore[arg-type] + return entries + + +def visit( + path: Union[str, "os.PathLike[str]"], recurse: Callable[["os.DirEntry[str]"], bool] +) -> Iterator["os.DirEntry[str]"]: + """Walk a directory recursively, in breadth-first order. + + The `recurse` predicate determines whether a directory is recursed. + + Entries at each directory level are sorted. + """ + entries = scandir(path) + yield from entries + for entry in entries: + if entry.is_dir() and recurse(entry): + yield from visit(entry.path, recurse) + + +def absolutepath(path: "Union[str, os.PathLike[str]]") -> Path: + """Convert a path to an absolute path using os.path.abspath. + + Prefer this over Path.resolve() (see #6523). + Prefer this over Path.absolute() (not public, doesn't normalize). + """ + return Path(os.path.abspath(path)) + + +def commonpath(path1: Path, path2: Path) -> Optional[Path]: + """Return the common part shared with the other path, or None if there is + no common part. + + If one path is relative and one is absolute, returns None. + """ + try: + return Path(os.path.commonpath((str(path1), str(path2)))) + except ValueError: + return None + + +def bestrelpath(directory: Path, dest: Path) -> str: + """Return a string which is a relative path from directory to dest such + that directory/bestrelpath == dest. + + The paths must be either both absolute or both relative. + + If no such path can be determined, returns dest. + """ + assert isinstance(directory, Path) + assert isinstance(dest, Path) + if dest == directory: + return os.curdir + # Find the longest common directory. + base = commonpath(directory, dest) + # Can be the case on Windows for two absolute paths on different drives. + # Can be the case for two relative paths without common prefix. + # Can be the case for a relative path and an absolute path. + if not base: + return str(dest) + reldirectory = directory.relative_to(base) + reldest = dest.relative_to(base) + return os.path.join( + # Back from directory to base. + *([os.pardir] * len(reldirectory.parts)), + # Forward from base to dest. + *reldest.parts, + ) + + +def safe_exists(p: Path) -> bool: + """Like Path.exists(), but account for input arguments that might be too long (#11394).""" + try: + return p.exists() + except (ValueError, OSError): + # ValueError: stat: path too long for Windows + # OSError: [WinError 123] The filename, directory name, or volume label syntax is incorrect + return False diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/py.typed b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/pytester.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/pytester.py new file mode 100644 index 000000000..23f44da69 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/pytester.py @@ -0,0 +1,1769 @@ +# mypy: allow-untyped-defs +"""(Disabled by default) support for testing pytest and pytest plugins. + +PYTEST_DONT_REWRITE +""" + +import collections.abc +import contextlib +from fnmatch import fnmatch +import gc +import importlib +from io import StringIO +import locale +import os +from pathlib import Path +import platform +import re +import shutil +import subprocess +import sys +import traceback +from typing import Any +from typing import Callable +from typing import Dict +from typing import Final +from typing import final +from typing import Generator +from typing import IO +from typing import Iterable +from typing import List +from typing import Literal +from typing import Optional +from typing import overload +from typing import Sequence +from typing import TextIO +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import Union +from weakref import WeakKeyDictionary + +from iniconfig import IniConfig +from iniconfig import SectionWrapper + +from _pytest import timing +from _pytest._code import Source +from _pytest.capture import _get_multicapture +from _pytest.compat import NOTSET +from _pytest.compat import NotSetType +from _pytest.config import _PluggyPlugin +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config import main +from _pytest.config import PytestPluginManager +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.main import Session +from _pytest.monkeypatch import MonkeyPatch +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.outcomes import fail +from _pytest.outcomes import importorskip +from _pytest.outcomes import skip +from _pytest.pathlib import bestrelpath +from _pytest.pathlib import make_numbered_dir +from _pytest.reports import CollectReport +from _pytest.reports import TestReport +from _pytest.tmpdir import TempPathFactory +from _pytest.warning_types import PytestWarning + + +if TYPE_CHECKING: + import pexpect + + +pytest_plugins = ["pytester_assertions"] + + +IGNORE_PAM = [ # filenames added when obtaining details about the current user + "/var/lib/sss/mc/passwd" +] + + +def pytest_addoption(parser: Parser) -> None: + parser.addoption( + "--lsof", + action="store_true", + dest="lsof", + default=False, + help="Run FD checks if lsof is available", + ) + + parser.addoption( + "--runpytest", + default="inprocess", + dest="runpytest", + choices=("inprocess", "subprocess"), + help=( + "Run pytest sub runs in tests using an 'inprocess' " + "or 'subprocess' (python -m main) method" + ), + ) + + parser.addini( + "pytester_example_dir", help="Directory to take the pytester example files from" + ) + + +def pytest_configure(config: Config) -> None: + if config.getvalue("lsof"): + checker = LsofFdLeakChecker() + if checker.matching_platform(): + config.pluginmanager.register(checker) + + config.addinivalue_line( + "markers", + "pytester_example_path(*path_segments): join the given path " + "segments to `pytester_example_dir` for this test.", + ) + + +class LsofFdLeakChecker: + def get_open_files(self) -> List[Tuple[str, str]]: + if sys.version_info >= (3, 11): + # New in Python 3.11, ignores utf-8 mode + encoding = locale.getencoding() + else: + encoding = locale.getpreferredencoding(False) + out = subprocess.run( + ("lsof", "-Ffn0", "-p", str(os.getpid())), + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + check=True, + text=True, + encoding=encoding, + ).stdout + + def isopen(line: str) -> bool: + return line.startswith("f") and ( + "deleted" not in line + and "mem" not in line + and "txt" not in line + and "cwd" not in line + ) + + open_files = [] + + for line in out.split("\n"): + if isopen(line): + fields = line.split("\0") + fd = fields[0][1:] + filename = fields[1][1:] + if filename in IGNORE_PAM: + continue + if filename.startswith("/"): + open_files.append((fd, filename)) + + return open_files + + def matching_platform(self) -> bool: + try: + subprocess.run(("lsof", "-v"), check=True) + except (OSError, subprocess.CalledProcessError): + return False + else: + return True + + @hookimpl(wrapper=True, tryfirst=True) + def pytest_runtest_protocol(self, item: Item) -> Generator[None, object, object]: + lines1 = self.get_open_files() + try: + return (yield) + finally: + if hasattr(sys, "pypy_version_info"): + gc.collect() + lines2 = self.get_open_files() + + new_fds = {t[0] for t in lines2} - {t[0] for t in lines1} + leaked_files = [t for t in lines2 if t[0] in new_fds] + if leaked_files: + error = [ + "***** %s FD leakage detected" % len(leaked_files), + *(str(f) for f in leaked_files), + "*** Before:", + *(str(f) for f in lines1), + "*** After:", + *(str(f) for f in lines2), + "***** %s FD leakage detected" % len(leaked_files), + "*** function {}:{}: {} ".format(*item.location), + "See issue #2366", + ] + item.warn(PytestWarning("\n".join(error))) + + +# used at least by pytest-xdist plugin + + +@fixture +def _pytest(request: FixtureRequest) -> "PytestArg": + """Return a helper which offers a gethookrecorder(hook) method which + returns a HookRecorder instance which helps to make assertions about called + hooks.""" + return PytestArg(request) + + +class PytestArg: + def __init__(self, request: FixtureRequest) -> None: + self._request = request + + def gethookrecorder(self, hook) -> "HookRecorder": + hookrecorder = HookRecorder(hook._pm) + self._request.addfinalizer(hookrecorder.finish_recording) + return hookrecorder + + +def get_public_names(values: Iterable[str]) -> List[str]: + """Only return names from iterator values without a leading underscore.""" + return [x for x in values if x[0] != "_"] + + +@final +class RecordedHookCall: + """A recorded call to a hook. + + The arguments to the hook call are set as attributes. + For example: + + .. code-block:: python + + calls = hook_recorder.getcalls("pytest_runtest_setup") + # Suppose pytest_runtest_setup was called once with `item=an_item`. + assert calls[0].item is an_item + """ + + def __init__(self, name: str, kwargs) -> None: + self.__dict__.update(kwargs) + self._name = name + + def __repr__(self) -> str: + d = self.__dict__.copy() + del d["_name"] + return f"" + + if TYPE_CHECKING: + # The class has undetermined attributes, this tells mypy about it. + def __getattr__(self, key: str): ... + + +@final +class HookRecorder: + """Record all hooks called in a plugin manager. + + Hook recorders are created by :class:`Pytester`. + + This wraps all the hook calls in the plugin manager, recording each call + before propagating the normal calls. + """ + + def __init__( + self, pluginmanager: PytestPluginManager, *, _ispytest: bool = False + ) -> None: + check_ispytest(_ispytest) + + self._pluginmanager = pluginmanager + self.calls: List[RecordedHookCall] = [] + self.ret: Optional[Union[int, ExitCode]] = None + + def before(hook_name: str, hook_impls, kwargs) -> None: + self.calls.append(RecordedHookCall(hook_name, kwargs)) + + def after(outcome, hook_name: str, hook_impls, kwargs) -> None: + pass + + self._undo_wrapping = pluginmanager.add_hookcall_monitoring(before, after) + + def finish_recording(self) -> None: + self._undo_wrapping() + + def getcalls(self, names: Union[str, Iterable[str]]) -> List[RecordedHookCall]: + """Get all recorded calls to hooks with the given names (or name).""" + if isinstance(names, str): + names = names.split() + return [call for call in self.calls if call._name in names] + + def assert_contains(self, entries: Sequence[Tuple[str, str]]) -> None: + __tracebackhide__ = True + i = 0 + entries = list(entries) + backlocals = sys._getframe(1).f_locals + while entries: + name, check = entries.pop(0) + for ind, call in enumerate(self.calls[i:]): + if call._name == name: + print("NAMEMATCH", name, call) + if eval(check, backlocals, call.__dict__): + print("CHECKERMATCH", repr(check), "->", call) + else: + print("NOCHECKERMATCH", repr(check), "-", call) + continue + i += ind + 1 + break + print("NONAMEMATCH", name, "with", call) + else: + fail(f"could not find {name!r} check {check!r}") + + def popcall(self, name: str) -> RecordedHookCall: + __tracebackhide__ = True + for i, call in enumerate(self.calls): + if call._name == name: + del self.calls[i] + return call + lines = [f"could not find call {name!r}, in:"] + lines.extend([" %s" % x for x in self.calls]) + fail("\n".join(lines)) + + def getcall(self, name: str) -> RecordedHookCall: + values = self.getcalls(name) + assert len(values) == 1, (name, values) + return values[0] + + # functionality for test reports + + @overload + def getreports( + self, + names: "Literal['pytest_collectreport']", + ) -> Sequence[CollectReport]: ... + + @overload + def getreports( + self, + names: "Literal['pytest_runtest_logreport']", + ) -> Sequence[TestReport]: ... + + @overload + def getreports( + self, + names: Union[str, Iterable[str]] = ( + "pytest_collectreport", + "pytest_runtest_logreport", + ), + ) -> Sequence[Union[CollectReport, TestReport]]: ... + + def getreports( + self, + names: Union[str, Iterable[str]] = ( + "pytest_collectreport", + "pytest_runtest_logreport", + ), + ) -> Sequence[Union[CollectReport, TestReport]]: + return [x.report for x in self.getcalls(names)] + + def matchreport( + self, + inamepart: str = "", + names: Union[str, Iterable[str]] = ( + "pytest_runtest_logreport", + "pytest_collectreport", + ), + when: Optional[str] = None, + ) -> Union[CollectReport, TestReport]: + """Return a testreport whose dotted import path matches.""" + values = [] + for rep in self.getreports(names=names): + if not when and rep.when != "call" and rep.passed: + # setup/teardown passing reports - let's ignore those + continue + if when and rep.when != when: + continue + if not inamepart or inamepart in rep.nodeid.split("::"): + values.append(rep) + if not values: + raise ValueError( + f"could not find test report matching {inamepart!r}: " + "no test reports at all!" + ) + if len(values) > 1: + raise ValueError( + f"found 2 or more testreports matching {inamepart!r}: {values}" + ) + return values[0] + + @overload + def getfailures( + self, + names: "Literal['pytest_collectreport']", + ) -> Sequence[CollectReport]: ... + + @overload + def getfailures( + self, + names: "Literal['pytest_runtest_logreport']", + ) -> Sequence[TestReport]: ... + + @overload + def getfailures( + self, + names: Union[str, Iterable[str]] = ( + "pytest_collectreport", + "pytest_runtest_logreport", + ), + ) -> Sequence[Union[CollectReport, TestReport]]: ... + + def getfailures( + self, + names: Union[str, Iterable[str]] = ( + "pytest_collectreport", + "pytest_runtest_logreport", + ), + ) -> Sequence[Union[CollectReport, TestReport]]: + return [rep for rep in self.getreports(names) if rep.failed] + + def getfailedcollections(self) -> Sequence[CollectReport]: + return self.getfailures("pytest_collectreport") + + def listoutcomes( + self, + ) -> Tuple[ + Sequence[TestReport], + Sequence[Union[CollectReport, TestReport]], + Sequence[Union[CollectReport, TestReport]], + ]: + passed = [] + skipped = [] + failed = [] + for rep in self.getreports( + ("pytest_collectreport", "pytest_runtest_logreport") + ): + if rep.passed: + if rep.when == "call": + assert isinstance(rep, TestReport) + passed.append(rep) + elif rep.skipped: + skipped.append(rep) + else: + assert rep.failed, f"Unexpected outcome: {rep!r}" + failed.append(rep) + return passed, skipped, failed + + def countoutcomes(self) -> List[int]: + return [len(x) for x in self.listoutcomes()] + + def assertoutcome(self, passed: int = 0, skipped: int = 0, failed: int = 0) -> None: + __tracebackhide__ = True + from _pytest.pytester_assertions import assertoutcome + + outcomes = self.listoutcomes() + assertoutcome( + outcomes, + passed=passed, + skipped=skipped, + failed=failed, + ) + + def clear(self) -> None: + self.calls[:] = [] + + +@fixture +def linecomp() -> "LineComp": + """A :class: `LineComp` instance for checking that an input linearly + contains a sequence of strings.""" + return LineComp() + + +@fixture(name="LineMatcher") +def LineMatcher_fixture(request: FixtureRequest) -> Type["LineMatcher"]: + """A reference to the :class: `LineMatcher`. + + This is instantiable with a list of lines (without their trailing newlines). + This is useful for testing large texts, such as the output of commands. + """ + return LineMatcher + + +@fixture +def pytester( + request: FixtureRequest, tmp_path_factory: TempPathFactory, monkeypatch: MonkeyPatch +) -> "Pytester": + """ + Facilities to write tests/configuration files, execute pytest in isolation, and match + against expected output, perfect for black-box testing of pytest plugins. + + It attempts to isolate the test run from external factors as much as possible, modifying + the current working directory to ``path`` and environment variables during initialization. + + It is particularly useful for testing plugins. It is similar to the :fixture:`tmp_path` + fixture but provides methods which aid in testing pytest itself. + """ + return Pytester(request, tmp_path_factory, monkeypatch, _ispytest=True) + + +@fixture +def _sys_snapshot() -> Generator[None, None, None]: + snappaths = SysPathsSnapshot() + snapmods = SysModulesSnapshot() + yield + snapmods.restore() + snappaths.restore() + + +@fixture +def _config_for_test() -> Generator[Config, None, None]: + from _pytest.config import get_config + + config = get_config() + yield config + config._ensure_unconfigure() # cleanup, e.g. capman closing tmpfiles. + + +# Regex to match the session duration string in the summary: "74.34s". +rex_session_duration = re.compile(r"\d+\.\d\ds") +# Regex to match all the counts and phrases in the summary line: "34 passed, 111 skipped". +rex_outcome = re.compile(r"(\d+) (\w+)") + + +@final +class RunResult: + """The result of running a command from :class:`~pytest.Pytester`.""" + + def __init__( + self, + ret: Union[int, ExitCode], + outlines: List[str], + errlines: List[str], + duration: float, + ) -> None: + try: + self.ret: Union[int, ExitCode] = ExitCode(ret) + """The return value.""" + except ValueError: + self.ret = ret + self.outlines = outlines + """List of lines captured from stdout.""" + self.errlines = errlines + """List of lines captured from stderr.""" + self.stdout = LineMatcher(outlines) + """:class:`~pytest.LineMatcher` of stdout. + + Use e.g. :func:`str(stdout) ` to reconstruct stdout, or the commonly used + :func:`stdout.fnmatch_lines() ` method. + """ + self.stderr = LineMatcher(errlines) + """:class:`~pytest.LineMatcher` of stderr.""" + self.duration = duration + """Duration in seconds.""" + + def __repr__(self) -> str: + return ( + "" + % (self.ret, len(self.stdout.lines), len(self.stderr.lines), self.duration) + ) + + def parseoutcomes(self) -> Dict[str, int]: + """Return a dictionary of outcome noun -> count from parsing the terminal + output that the test process produced. + + The returned nouns will always be in plural form:: + + ======= 1 failed, 1 passed, 1 warning, 1 error in 0.13s ==== + + Will return ``{"failed": 1, "passed": 1, "warnings": 1, "errors": 1}``. + """ + return self.parse_summary_nouns(self.outlines) + + @classmethod + def parse_summary_nouns(cls, lines) -> Dict[str, int]: + """Extract the nouns from a pytest terminal summary line. + + It always returns the plural noun for consistency:: + + ======= 1 failed, 1 passed, 1 warning, 1 error in 0.13s ==== + + Will return ``{"failed": 1, "passed": 1, "warnings": 1, "errors": 1}``. + """ + for line in reversed(lines): + if rex_session_duration.search(line): + outcomes = rex_outcome.findall(line) + ret = {noun: int(count) for (count, noun) in outcomes} + break + else: + raise ValueError("Pytest terminal summary report not found") + + to_plural = { + "warning": "warnings", + "error": "errors", + } + return {to_plural.get(k, k): v for k, v in ret.items()} + + def assert_outcomes( + self, + passed: int = 0, + skipped: int = 0, + failed: int = 0, + errors: int = 0, + xpassed: int = 0, + xfailed: int = 0, + warnings: Optional[int] = None, + deselected: Optional[int] = None, + ) -> None: + """ + Assert that the specified outcomes appear with the respective + numbers (0 means it didn't occur) in the text output from a test run. + + ``warnings`` and ``deselected`` are only checked if not None. + """ + __tracebackhide__ = True + from _pytest.pytester_assertions import assert_outcomes + + outcomes = self.parseoutcomes() + assert_outcomes( + outcomes, + passed=passed, + skipped=skipped, + failed=failed, + errors=errors, + xpassed=xpassed, + xfailed=xfailed, + warnings=warnings, + deselected=deselected, + ) + + +class SysModulesSnapshot: + def __init__(self, preserve: Optional[Callable[[str], bool]] = None) -> None: + self.__preserve = preserve + self.__saved = dict(sys.modules) + + def restore(self) -> None: + if self.__preserve: + self.__saved.update( + (k, m) for k, m in sys.modules.items() if self.__preserve(k) + ) + sys.modules.clear() + sys.modules.update(self.__saved) + + +class SysPathsSnapshot: + def __init__(self) -> None: + self.__saved = list(sys.path), list(sys.meta_path) + + def restore(self) -> None: + sys.path[:], sys.meta_path[:] = self.__saved + + +@final +class Pytester: + """ + Facilities to write tests/configuration files, execute pytest in isolation, and match + against expected output, perfect for black-box testing of pytest plugins. + + It attempts to isolate the test run from external factors as much as possible, modifying + the current working directory to :attr:`path` and environment variables during initialization. + """ + + __test__ = False + + CLOSE_STDIN: "Final" = NOTSET + + class TimeoutExpired(Exception): + pass + + def __init__( + self, + request: FixtureRequest, + tmp_path_factory: TempPathFactory, + monkeypatch: MonkeyPatch, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self._request = request + self._mod_collections: WeakKeyDictionary[ + Collector, List[Union[Item, Collector]] + ] = WeakKeyDictionary() + if request.function: + name: str = request.function.__name__ + else: + name = request.node.name + self._name = name + self._path: Path = tmp_path_factory.mktemp(name, numbered=True) + #: A list of plugins to use with :py:meth:`parseconfig` and + #: :py:meth:`runpytest`. Initially this is an empty list but plugins can + #: be added to the list. The type of items to add to the list depends on + #: the method using them so refer to them for details. + self.plugins: List[Union[str, _PluggyPlugin]] = [] + self._sys_path_snapshot = SysPathsSnapshot() + self._sys_modules_snapshot = self.__take_sys_modules_snapshot() + self._request.addfinalizer(self._finalize) + self._method = self._request.config.getoption("--runpytest") + self._test_tmproot = tmp_path_factory.mktemp(f"tmp-{name}", numbered=True) + + self._monkeypatch = mp = monkeypatch + self.chdir() + mp.setenv("PYTEST_DEBUG_TEMPROOT", str(self._test_tmproot)) + # Ensure no unexpected caching via tox. + mp.delenv("TOX_ENV_DIR", raising=False) + # Discard outer pytest options. + mp.delenv("PYTEST_ADDOPTS", raising=False) + # Ensure no user config is used. + tmphome = str(self.path) + mp.setenv("HOME", tmphome) + mp.setenv("USERPROFILE", tmphome) + # Do not use colors for inner runs by default. + mp.setenv("PY_COLORS", "0") + + @property + def path(self) -> Path: + """Temporary directory path used to create files/run tests from, etc.""" + return self._path + + def __repr__(self) -> str: + return f"" + + def _finalize(self) -> None: + """ + Clean up global state artifacts. + + Some methods modify the global interpreter state and this tries to + clean this up. It does not remove the temporary directory however so + it can be looked at after the test run has finished. + """ + self._sys_modules_snapshot.restore() + self._sys_path_snapshot.restore() + + def __take_sys_modules_snapshot(self) -> SysModulesSnapshot: + # Some zope modules used by twisted-related tests keep internal state + # and can't be deleted; we had some trouble in the past with + # `zope.interface` for example. + # + # Preserve readline due to https://bugs.python.org/issue41033. + # pexpect issues a SIGWINCH. + def preserve_module(name): + return name.startswith(("zope", "readline")) + + return SysModulesSnapshot(preserve=preserve_module) + + def make_hook_recorder(self, pluginmanager: PytestPluginManager) -> HookRecorder: + """Create a new :class:`HookRecorder` for a :class:`PytestPluginManager`.""" + pluginmanager.reprec = reprec = HookRecorder(pluginmanager, _ispytest=True) # type: ignore[attr-defined] + self._request.addfinalizer(reprec.finish_recording) + return reprec + + def chdir(self) -> None: + """Cd into the temporary directory. + + This is done automatically upon instantiation. + """ + self._monkeypatch.chdir(self.path) + + def _makefile( + self, + ext: str, + lines: Sequence[Union[Any, bytes]], + files: Dict[str, str], + encoding: str = "utf-8", + ) -> Path: + items = list(files.items()) + + if ext and not ext.startswith("."): + raise ValueError( + f"pytester.makefile expects a file extension, try .{ext} instead of {ext}" + ) + + def to_text(s: Union[Any, bytes]) -> str: + return s.decode(encoding) if isinstance(s, bytes) else str(s) + + if lines: + source = "\n".join(to_text(x) for x in lines) + basename = self._name + items.insert(0, (basename, source)) + + ret = None + for basename, value in items: + p = self.path.joinpath(basename).with_suffix(ext) + p.parent.mkdir(parents=True, exist_ok=True) + source_ = Source(value) + source = "\n".join(to_text(line) for line in source_.lines) + p.write_text(source.strip(), encoding=encoding) + if ret is None: + ret = p + assert ret is not None + return ret + + def makefile(self, ext: str, *args: str, **kwargs: str) -> Path: + r"""Create new text file(s) in the test directory. + + :param ext: + The extension the file(s) should use, including the dot, e.g. `.py`. + :param args: + All args are treated as strings and joined using newlines. + The result is written as contents to the file. The name of the + file is based on the test function requesting this fixture. + :param kwargs: + Each keyword is the name of a file, while the value of it will + be written as contents of the file. + :returns: + The first created file. + + Examples: + .. code-block:: python + + pytester.makefile(".txt", "line1", "line2") + + pytester.makefile(".ini", pytest="[pytest]\naddopts=-rs\n") + + To create binary files, use :meth:`pathlib.Path.write_bytes` directly: + + .. code-block:: python + + filename = pytester.path.joinpath("foo.bin") + filename.write_bytes(b"...") + """ + return self._makefile(ext, args, kwargs) + + def makeconftest(self, source: str) -> Path: + """Write a conftest.py file. + + :param source: The contents. + :returns: The conftest.py file. + """ + return self.makepyfile(conftest=source) + + def makeini(self, source: str) -> Path: + """Write a tox.ini file. + + :param source: The contents. + :returns: The tox.ini file. + """ + return self.makefile(".ini", tox=source) + + def getinicfg(self, source: str) -> SectionWrapper: + """Return the pytest section from the tox.ini config file.""" + p = self.makeini(source) + return IniConfig(str(p))["pytest"] + + def makepyprojecttoml(self, source: str) -> Path: + """Write a pyproject.toml file. + + :param source: The contents. + :returns: The pyproject.ini file. + + .. versionadded:: 6.0 + """ + return self.makefile(".toml", pyproject=source) + + def makepyfile(self, *args, **kwargs) -> Path: + r"""Shortcut for .makefile() with a .py extension. + + Defaults to the test name with a '.py' extension, e.g test_foobar.py, overwriting + existing files. + + Examples: + .. code-block:: python + + def test_something(pytester): + # Initial file is created test_something.py. + pytester.makepyfile("foobar") + # To create multiple files, pass kwargs accordingly. + pytester.makepyfile(custom="foobar") + # At this point, both 'test_something.py' & 'custom.py' exist in the test directory. + + """ + return self._makefile(".py", args, kwargs) + + def maketxtfile(self, *args, **kwargs) -> Path: + r"""Shortcut for .makefile() with a .txt extension. + + Defaults to the test name with a '.txt' extension, e.g test_foobar.txt, overwriting + existing files. + + Examples: + .. code-block:: python + + def test_something(pytester): + # Initial file is created test_something.txt. + pytester.maketxtfile("foobar") + # To create multiple files, pass kwargs accordingly. + pytester.maketxtfile(custom="foobar") + # At this point, both 'test_something.txt' & 'custom.txt' exist in the test directory. + + """ + return self._makefile(".txt", args, kwargs) + + def syspathinsert( + self, path: Optional[Union[str, "os.PathLike[str]"]] = None + ) -> None: + """Prepend a directory to sys.path, defaults to :attr:`path`. + + This is undone automatically when this object dies at the end of each + test. + + :param path: + The path. + """ + if path is None: + path = self.path + + self._monkeypatch.syspath_prepend(str(path)) + + def mkdir(self, name: Union[str, "os.PathLike[str]"]) -> Path: + """Create a new (sub)directory. + + :param name: + The name of the directory, relative to the pytester path. + :returns: + The created directory. + """ + p = self.path / name + p.mkdir() + return p + + def mkpydir(self, name: Union[str, "os.PathLike[str]"]) -> Path: + """Create a new python package. + + This creates a (sub)directory with an empty ``__init__.py`` file so it + gets recognised as a Python package. + """ + p = self.path / name + p.mkdir() + p.joinpath("__init__.py").touch() + return p + + def copy_example(self, name: Optional[str] = None) -> Path: + """Copy file from project's directory into the testdir. + + :param name: + The name of the file to copy. + :return: + Path to the copied directory (inside ``self.path``). + """ + example_dir_ = self._request.config.getini("pytester_example_dir") + if example_dir_ is None: + raise ValueError("pytester_example_dir is unset, can't copy examples") + example_dir: Path = self._request.config.rootpath / example_dir_ + + for extra_element in self._request.node.iter_markers("pytester_example_path"): + assert extra_element.args + example_dir = example_dir.joinpath(*extra_element.args) + + if name is None: + func_name = self._name + maybe_dir = example_dir / func_name + maybe_file = example_dir / (func_name + ".py") + + if maybe_dir.is_dir(): + example_path = maybe_dir + elif maybe_file.is_file(): + example_path = maybe_file + else: + raise LookupError( + f"{func_name} can't be found as module or package in {example_dir}" + ) + else: + example_path = example_dir.joinpath(name) + + if example_path.is_dir() and not example_path.joinpath("__init__.py").is_file(): + shutil.copytree(example_path, self.path, symlinks=True, dirs_exist_ok=True) + return self.path + elif example_path.is_file(): + result = self.path.joinpath(example_path.name) + shutil.copy(example_path, result) + return result + else: + raise LookupError( + f'example "{example_path}" is not found as a file or directory' + ) + + def getnode( + self, config: Config, arg: Union[str, "os.PathLike[str]"] + ) -> Union[Collector, Item]: + """Get the collection node of a file. + + :param config: + A pytest config. + See :py:meth:`parseconfig` and :py:meth:`parseconfigure` for creating it. + :param arg: + Path to the file. + :returns: + The node. + """ + session = Session.from_config(config) + assert "::" not in str(arg) + p = Path(os.path.abspath(arg)) + config.hook.pytest_sessionstart(session=session) + res = session.perform_collect([str(p)], genitems=False)[0] + config.hook.pytest_sessionfinish(session=session, exitstatus=ExitCode.OK) + return res + + def getpathnode( + self, path: Union[str, "os.PathLike[str]"] + ) -> Union[Collector, Item]: + """Return the collection node of a file. + + This is like :py:meth:`getnode` but uses :py:meth:`parseconfigure` to + create the (configured) pytest Config instance. + + :param path: + Path to the file. + :returns: + The node. + """ + path = Path(path) + config = self.parseconfigure(path) + session = Session.from_config(config) + x = bestrelpath(session.path, path) + config.hook.pytest_sessionstart(session=session) + res = session.perform_collect([x], genitems=False)[0] + config.hook.pytest_sessionfinish(session=session, exitstatus=ExitCode.OK) + return res + + def genitems(self, colitems: Sequence[Union[Item, Collector]]) -> List[Item]: + """Generate all test items from a collection node. + + This recurses into the collection node and returns a list of all the + test items contained within. + + :param colitems: + The collection nodes. + :returns: + The collected items. + """ + session = colitems[0].session + result: List[Item] = [] + for colitem in colitems: + result.extend(session.genitems(colitem)) + return result + + def runitem(self, source: str) -> Any: + """Run the "test_func" Item. + + The calling test instance (class containing the test method) must + provide a ``.getrunner()`` method which should return a runner which + can run the test protocol for a single item, e.g. + ``_pytest.runner.runtestprotocol``. + """ + # used from runner functional tests + item = self.getitem(source) + # the test class where we are called from wants to provide the runner + testclassinstance = self._request.instance + runner = testclassinstance.getrunner() + return runner(item) + + def inline_runsource(self, source: str, *cmdlineargs) -> HookRecorder: + """Run a test module in process using ``pytest.main()``. + + This run writes "source" into a temporary file and runs + ``pytest.main()`` on it, returning a :py:class:`HookRecorder` instance + for the result. + + :param source: The source code of the test module. + :param cmdlineargs: Any extra command line arguments to use. + """ + p = self.makepyfile(source) + values = [*list(cmdlineargs), p] + return self.inline_run(*values) + + def inline_genitems(self, *args) -> Tuple[List[Item], HookRecorder]: + """Run ``pytest.main(['--collect-only'])`` in-process. + + Runs the :py:func:`pytest.main` function to run all of pytest inside + the test process itself like :py:meth:`inline_run`, but returns a + tuple of the collected items and a :py:class:`HookRecorder` instance. + """ + rec = self.inline_run("--collect-only", *args) + items = [x.item for x in rec.getcalls("pytest_itemcollected")] + return items, rec + + def inline_run( + self, + *args: Union[str, "os.PathLike[str]"], + plugins=(), + no_reraise_ctrlc: bool = False, + ) -> HookRecorder: + """Run ``pytest.main()`` in-process, returning a HookRecorder. + + Runs the :py:func:`pytest.main` function to run all of pytest inside + the test process itself. This means it can return a + :py:class:`HookRecorder` instance which gives more detailed results + from that run than can be done by matching stdout/stderr from + :py:meth:`runpytest`. + + :param args: + Command line arguments to pass to :py:func:`pytest.main`. + :param plugins: + Extra plugin instances the ``pytest.main()`` instance should use. + :param no_reraise_ctrlc: + Typically we reraise keyboard interrupts from the child run. If + True, the KeyboardInterrupt exception is captured. + """ + # (maybe a cpython bug?) the importlib cache sometimes isn't updated + # properly between file creation and inline_run (especially if imports + # are interspersed with file creation) + importlib.invalidate_caches() + + plugins = list(plugins) + finalizers = [] + try: + # Any sys.module or sys.path changes done while running pytest + # inline should be reverted after the test run completes to avoid + # clashing with later inline tests run within the same pytest test, + # e.g. just because they use matching test module names. + finalizers.append(self.__take_sys_modules_snapshot().restore) + finalizers.append(SysPathsSnapshot().restore) + + # Important note: + # - our tests should not leave any other references/registrations + # laying around other than possibly loaded test modules + # referenced from sys.modules, as nothing will clean those up + # automatically + + rec = [] + + class Collect: + def pytest_configure(x, config: Config) -> None: + rec.append(self.make_hook_recorder(config.pluginmanager)) + + plugins.append(Collect()) + ret = main([str(x) for x in args], plugins=plugins) + if len(rec) == 1: + reprec = rec.pop() + else: + + class reprec: # type: ignore + pass + + reprec.ret = ret + + # Typically we reraise keyboard interrupts from the child run + # because it's our user requesting interruption of the testing. + if ret == ExitCode.INTERRUPTED and not no_reraise_ctrlc: + calls = reprec.getcalls("pytest_keyboard_interrupt") + if calls and calls[-1].excinfo.type == KeyboardInterrupt: + raise KeyboardInterrupt() + return reprec + finally: + for finalizer in finalizers: + finalizer() + + def runpytest_inprocess( + self, *args: Union[str, "os.PathLike[str]"], **kwargs: Any + ) -> RunResult: + """Return result of running pytest in-process, providing a similar + interface to what self.runpytest() provides.""" + syspathinsert = kwargs.pop("syspathinsert", False) + + if syspathinsert: + self.syspathinsert() + now = timing.time() + capture = _get_multicapture("sys") + capture.start_capturing() + try: + try: + reprec = self.inline_run(*args, **kwargs) + except SystemExit as e: + ret = e.args[0] + try: + ret = ExitCode(e.args[0]) + except ValueError: + pass + + class reprec: # type: ignore + ret = ret + + except Exception: + traceback.print_exc() + + class reprec: # type: ignore + ret = ExitCode(3) + + finally: + out, err = capture.readouterr() + capture.stop_capturing() + sys.stdout.write(out) + sys.stderr.write(err) + + assert reprec.ret is not None + res = RunResult( + reprec.ret, out.splitlines(), err.splitlines(), timing.time() - now + ) + res.reprec = reprec # type: ignore + return res + + def runpytest( + self, *args: Union[str, "os.PathLike[str]"], **kwargs: Any + ) -> RunResult: + """Run pytest inline or in a subprocess, depending on the command line + option "--runpytest" and return a :py:class:`~pytest.RunResult`.""" + new_args = self._ensure_basetemp(args) + if self._method == "inprocess": + return self.runpytest_inprocess(*new_args, **kwargs) + elif self._method == "subprocess": + return self.runpytest_subprocess(*new_args, **kwargs) + raise RuntimeError(f"Unrecognized runpytest option: {self._method}") + + def _ensure_basetemp( + self, args: Sequence[Union[str, "os.PathLike[str]"]] + ) -> List[Union[str, "os.PathLike[str]"]]: + new_args = list(args) + for x in new_args: + if str(x).startswith("--basetemp"): + break + else: + new_args.append("--basetemp=%s" % self.path.parent.joinpath("basetemp")) + return new_args + + def parseconfig(self, *args: Union[str, "os.PathLike[str]"]) -> Config: + """Return a new pytest :class:`pytest.Config` instance from given + commandline args. + + This invokes the pytest bootstrapping code in _pytest.config to create a + new :py:class:`pytest.PytestPluginManager` and call the + :hook:`pytest_cmdline_parse` hook to create a new :class:`pytest.Config` + instance. + + If :attr:`plugins` has been populated they should be plugin modules + to be registered with the plugin manager. + """ + import _pytest.config + + new_args = self._ensure_basetemp(args) + new_args = [str(x) for x in new_args] + + config = _pytest.config._prepareconfig(new_args, self.plugins) # type: ignore[arg-type] + # we don't know what the test will do with this half-setup config + # object and thus we make sure it gets unconfigured properly in any + # case (otherwise capturing could still be active, for example) + self._request.addfinalizer(config._ensure_unconfigure) + return config + + def parseconfigure(self, *args: Union[str, "os.PathLike[str]"]) -> Config: + """Return a new pytest configured Config instance. + + Returns a new :py:class:`pytest.Config` instance like + :py:meth:`parseconfig`, but also calls the :hook:`pytest_configure` + hook. + """ + config = self.parseconfig(*args) + config._do_configure() + return config + + def getitem( + self, source: Union[str, "os.PathLike[str]"], funcname: str = "test_func" + ) -> Item: + """Return the test item for a test function. + + Writes the source to a python file and runs pytest's collection on + the resulting module, returning the test item for the requested + function name. + + :param source: + The module source. + :param funcname: + The name of the test function for which to return a test item. + :returns: + The test item. + """ + items = self.getitems(source) + for item in items: + if item.name == funcname: + return item + assert 0, f"{funcname!r} item not found in module:\n{source}\nitems: {items}" + + def getitems(self, source: Union[str, "os.PathLike[str]"]) -> List[Item]: + """Return all test items collected from the module. + + Writes the source to a Python file and runs pytest's collection on + the resulting module, returning all test items contained within. + """ + modcol = self.getmodulecol(source) + return self.genitems([modcol]) + + def getmodulecol( + self, + source: Union[str, "os.PathLike[str]"], + configargs=(), + *, + withinit: bool = False, + ): + """Return the module collection node for ``source``. + + Writes ``source`` to a file using :py:meth:`makepyfile` and then + runs the pytest collection on it, returning the collection node for the + test module. + + :param source: + The source code of the module to collect. + + :param configargs: + Any extra arguments to pass to :py:meth:`parseconfigure`. + + :param withinit: + Whether to also write an ``__init__.py`` file to the same + directory to ensure it is a package. + """ + if isinstance(source, os.PathLike): + path = self.path.joinpath(source) + assert not withinit, "not supported for paths" + else: + kw = {self._name: str(source)} + path = self.makepyfile(**kw) + if withinit: + self.makepyfile(__init__="#") + self.config = config = self.parseconfigure(path, *configargs) + return self.getnode(config, path) + + def collect_by_name( + self, modcol: Collector, name: str + ) -> Optional[Union[Item, Collector]]: + """Return the collection node for name from the module collection. + + Searches a module collection node for a collection node matching the + given name. + + :param modcol: A module collection node; see :py:meth:`getmodulecol`. + :param name: The name of the node to return. + """ + if modcol not in self._mod_collections: + self._mod_collections[modcol] = list(modcol.collect()) + for colitem in self._mod_collections[modcol]: + if colitem.name == name: + return colitem + return None + + def popen( + self, + cmdargs: Sequence[Union[str, "os.PathLike[str]"]], + stdout: Union[int, TextIO] = subprocess.PIPE, + stderr: Union[int, TextIO] = subprocess.PIPE, + stdin: Union[NotSetType, bytes, IO[Any], int] = CLOSE_STDIN, + **kw, + ): + """Invoke :py:class:`subprocess.Popen`. + + Calls :py:class:`subprocess.Popen` making sure the current working + directory is in ``PYTHONPATH``. + + You probably want to use :py:meth:`run` instead. + """ + env = os.environ.copy() + env["PYTHONPATH"] = os.pathsep.join( + filter(None, [os.getcwd(), env.get("PYTHONPATH", "")]) + ) + kw["env"] = env + + if stdin is self.CLOSE_STDIN: + kw["stdin"] = subprocess.PIPE + elif isinstance(stdin, bytes): + kw["stdin"] = subprocess.PIPE + else: + kw["stdin"] = stdin + + popen = subprocess.Popen(cmdargs, stdout=stdout, stderr=stderr, **kw) + if stdin is self.CLOSE_STDIN: + assert popen.stdin is not None + popen.stdin.close() + elif isinstance(stdin, bytes): + assert popen.stdin is not None + popen.stdin.write(stdin) + + return popen + + def run( + self, + *cmdargs: Union[str, "os.PathLike[str]"], + timeout: Optional[float] = None, + stdin: Union[NotSetType, bytes, IO[Any], int] = CLOSE_STDIN, + ) -> RunResult: + """Run a command with arguments. + + Run a process using :py:class:`subprocess.Popen` saving the stdout and + stderr. + + :param cmdargs: + The sequence of arguments to pass to :py:class:`subprocess.Popen`, + with path-like objects being converted to :py:class:`str` + automatically. + :param timeout: + The period in seconds after which to timeout and raise + :py:class:`Pytester.TimeoutExpired`. + :param stdin: + Optional standard input. + + - If it is ``CLOSE_STDIN`` (Default), then this method calls + :py:class:`subprocess.Popen` with ``stdin=subprocess.PIPE``, and + the standard input is closed immediately after the new command is + started. + + - If it is of type :py:class:`bytes`, these bytes are sent to the + standard input of the command. + + - Otherwise, it is passed through to :py:class:`subprocess.Popen`. + For further information in this case, consult the document of the + ``stdin`` parameter in :py:class:`subprocess.Popen`. + :returns: + The result. + """ + __tracebackhide__ = True + + cmdargs = tuple(os.fspath(arg) for arg in cmdargs) + p1 = self.path.joinpath("stdout") + p2 = self.path.joinpath("stderr") + print("running:", *cmdargs) + print(" in:", Path.cwd()) + + with p1.open("w", encoding="utf8") as f1, p2.open("w", encoding="utf8") as f2: + now = timing.time() + popen = self.popen( + cmdargs, + stdin=stdin, + stdout=f1, + stderr=f2, + close_fds=(sys.platform != "win32"), + ) + if popen.stdin is not None: + popen.stdin.close() + + def handle_timeout() -> None: + __tracebackhide__ = True + + timeout_message = f"{timeout} second timeout expired running: {cmdargs}" + + popen.kill() + popen.wait() + raise self.TimeoutExpired(timeout_message) + + if timeout is None: + ret = popen.wait() + else: + try: + ret = popen.wait(timeout) + except subprocess.TimeoutExpired: + handle_timeout() + + with p1.open(encoding="utf8") as f1, p2.open(encoding="utf8") as f2: + out = f1.read().splitlines() + err = f2.read().splitlines() + + self._dump_lines(out, sys.stdout) + self._dump_lines(err, sys.stderr) + + with contextlib.suppress(ValueError): + ret = ExitCode(ret) + return RunResult(ret, out, err, timing.time() - now) + + def _dump_lines(self, lines, fp): + try: + for line in lines: + print(line, file=fp) + except UnicodeEncodeError: + print(f"couldn't print to {fp} because of encoding") + + def _getpytestargs(self) -> Tuple[str, ...]: + return sys.executable, "-mpytest" + + def runpython(self, script: "os.PathLike[str]") -> RunResult: + """Run a python script using sys.executable as interpreter.""" + return self.run(sys.executable, script) + + def runpython_c(self, command: str) -> RunResult: + """Run ``python -c "command"``.""" + return self.run(sys.executable, "-c", command) + + def runpytest_subprocess( + self, *args: Union[str, "os.PathLike[str]"], timeout: Optional[float] = None + ) -> RunResult: + """Run pytest as a subprocess with given arguments. + + Any plugins added to the :py:attr:`plugins` list will be added using the + ``-p`` command line option. Additionally ``--basetemp`` is used to put + any temporary files and directories in a numbered directory prefixed + with "runpytest-" to not conflict with the normal numbered pytest + location for temporary files and directories. + + :param args: + The sequence of arguments to pass to the pytest subprocess. + :param timeout: + The period in seconds after which to timeout and raise + :py:class:`Pytester.TimeoutExpired`. + :returns: + The result. + """ + __tracebackhide__ = True + p = make_numbered_dir(root=self.path, prefix="runpytest-", mode=0o700) + args = ("--basetemp=%s" % p, *args) + plugins = [x for x in self.plugins if isinstance(x, str)] + if plugins: + args = ("-p", plugins[0], *args) + args = self._getpytestargs() + args + return self.run(*args, timeout=timeout) + + def spawn_pytest( + self, string: str, expect_timeout: float = 10.0 + ) -> "pexpect.spawn": + """Run pytest using pexpect. + + This makes sure to use the right pytest and sets up the temporary + directory locations. + + The pexpect child is returned. + """ + basetemp = self.path / "temp-pexpect" + basetemp.mkdir(mode=0o700) + invoke = " ".join(map(str, self._getpytestargs())) + cmd = f"{invoke} --basetemp={basetemp} {string}" + return self.spawn(cmd, expect_timeout=expect_timeout) + + def spawn(self, cmd: str, expect_timeout: float = 10.0) -> "pexpect.spawn": + """Run a command using pexpect. + + The pexpect child is returned. + """ + pexpect = importorskip("pexpect", "3.0") + if hasattr(sys, "pypy_version_info") and "64" in platform.machine(): + skip("pypy-64 bit not supported") + if not hasattr(pexpect, "spawn"): + skip("pexpect.spawn not available") + logfile = self.path.joinpath("spawn.out").open("wb") + + child = pexpect.spawn(cmd, logfile=logfile, timeout=expect_timeout) + self._request.addfinalizer(logfile.close) + return child + + +class LineComp: + def __init__(self) -> None: + self.stringio = StringIO() + """:class:`python:io.StringIO()` instance used for input.""" + + def assert_contains_lines(self, lines2: Sequence[str]) -> None: + """Assert that ``lines2`` are contained (linearly) in :attr:`stringio`'s value. + + Lines are matched using :func:`LineMatcher.fnmatch_lines `. + """ + __tracebackhide__ = True + val = self.stringio.getvalue() + self.stringio.truncate(0) + self.stringio.seek(0) + lines1 = val.split("\n") + LineMatcher(lines1).fnmatch_lines(lines2) + + +class LineMatcher: + """Flexible matching of text. + + This is a convenience class to test large texts like the output of + commands. + + The constructor takes a list of lines without their trailing newlines, i.e. + ``text.splitlines()``. + """ + + def __init__(self, lines: List[str]) -> None: + self.lines = lines + self._log_output: List[str] = [] + + def __str__(self) -> str: + """Return the entire original text. + + .. versionadded:: 6.2 + You can use :meth:`str` in older versions. + """ + return "\n".join(self.lines) + + def _getlines(self, lines2: Union[str, Sequence[str], Source]) -> Sequence[str]: + if isinstance(lines2, str): + lines2 = Source(lines2) + if isinstance(lines2, Source): + lines2 = lines2.strip().lines + return lines2 + + def fnmatch_lines_random(self, lines2: Sequence[str]) -> None: + """Check lines exist in the output in any order (using :func:`python:fnmatch.fnmatch`).""" + __tracebackhide__ = True + self._match_lines_random(lines2, fnmatch) + + def re_match_lines_random(self, lines2: Sequence[str]) -> None: + """Check lines exist in the output in any order (using :func:`python:re.match`).""" + __tracebackhide__ = True + self._match_lines_random(lines2, lambda name, pat: bool(re.match(pat, name))) + + def _match_lines_random( + self, lines2: Sequence[str], match_func: Callable[[str, str], bool] + ) -> None: + __tracebackhide__ = True + lines2 = self._getlines(lines2) + for line in lines2: + for x in self.lines: + if line == x or match_func(x, line): + self._log("matched: ", repr(line)) + break + else: + msg = "line %r not found in output" % line + self._log(msg) + self._fail(msg) + + def get_lines_after(self, fnline: str) -> Sequence[str]: + """Return all lines following the given line in the text. + + The given line can contain glob wildcards. + """ + for i, line in enumerate(self.lines): + if fnline == line or fnmatch(line, fnline): + return self.lines[i + 1 :] + raise ValueError("line %r not found in output" % fnline) + + def _log(self, *args) -> None: + self._log_output.append(" ".join(str(x) for x in args)) + + @property + def _log_text(self) -> str: + return "\n".join(self._log_output) + + def fnmatch_lines( + self, lines2: Sequence[str], *, consecutive: bool = False + ) -> None: + """Check lines exist in the output (using :func:`python:fnmatch.fnmatch`). + + The argument is a list of lines which have to match and can use glob + wildcards. If they do not match a pytest.fail() is called. The + matches and non-matches are also shown as part of the error message. + + :param lines2: String patterns to match. + :param consecutive: Match lines consecutively? + """ + __tracebackhide__ = True + self._match_lines(lines2, fnmatch, "fnmatch", consecutive=consecutive) + + def re_match_lines( + self, lines2: Sequence[str], *, consecutive: bool = False + ) -> None: + """Check lines exist in the output (using :func:`python:re.match`). + + The argument is a list of lines which have to match using ``re.match``. + If they do not match a pytest.fail() is called. + + The matches and non-matches are also shown as part of the error message. + + :param lines2: string patterns to match. + :param consecutive: match lines consecutively? + """ + __tracebackhide__ = True + self._match_lines( + lines2, + lambda name, pat: bool(re.match(pat, name)), + "re.match", + consecutive=consecutive, + ) + + def _match_lines( + self, + lines2: Sequence[str], + match_func: Callable[[str, str], bool], + match_nickname: str, + *, + consecutive: bool = False, + ) -> None: + """Underlying implementation of ``fnmatch_lines`` and ``re_match_lines``. + + :param Sequence[str] lines2: + List of string patterns to match. The actual format depends on + ``match_func``. + :param match_func: + A callable ``match_func(line, pattern)`` where line is the + captured line from stdout/stderr and pattern is the matching + pattern. + :param str match_nickname: + The nickname for the match function that will be logged to stdout + when a match occurs. + :param consecutive: + Match lines consecutively? + """ + if not isinstance(lines2, collections.abc.Sequence): + raise TypeError(f"invalid type for lines2: {type(lines2).__name__}") + lines2 = self._getlines(lines2) + lines1 = self.lines[:] + extralines = [] + __tracebackhide__ = True + wnick = len(match_nickname) + 1 + started = False + for line in lines2: + nomatchprinted = False + while lines1: + nextline = lines1.pop(0) + if line == nextline: + self._log("exact match:", repr(line)) + started = True + break + elif match_func(nextline, line): + self._log("%s:" % match_nickname, repr(line)) + self._log( + "{:>{width}}".format("with:", width=wnick), repr(nextline) + ) + started = True + break + else: + if consecutive and started: + msg = f"no consecutive match: {line!r}" + self._log(msg) + self._log( + "{:>{width}}".format("with:", width=wnick), repr(nextline) + ) + self._fail(msg) + if not nomatchprinted: + self._log( + "{:>{width}}".format("nomatch:", width=wnick), repr(line) + ) + nomatchprinted = True + self._log("{:>{width}}".format("and:", width=wnick), repr(nextline)) + extralines.append(nextline) + else: + msg = f"remains unmatched: {line!r}" + self._log(msg) + self._fail(msg) + self._log_output = [] + + def no_fnmatch_line(self, pat: str) -> None: + """Ensure captured lines do not match the given pattern, using ``fnmatch.fnmatch``. + + :param str pat: The pattern to match lines. + """ + __tracebackhide__ = True + self._no_match_line(pat, fnmatch, "fnmatch") + + def no_re_match_line(self, pat: str) -> None: + """Ensure captured lines do not match the given pattern, using ``re.match``. + + :param str pat: The regular expression to match lines. + """ + __tracebackhide__ = True + self._no_match_line( + pat, lambda name, pat: bool(re.match(pat, name)), "re.match" + ) + + def _no_match_line( + self, pat: str, match_func: Callable[[str, str], bool], match_nickname: str + ) -> None: + """Ensure captured lines does not have a the given pattern, using ``fnmatch.fnmatch``. + + :param str pat: The pattern to match lines. + """ + __tracebackhide__ = True + nomatch_printed = False + wnick = len(match_nickname) + 1 + for line in self.lines: + if match_func(line, pat): + msg = f"{match_nickname}: {pat!r}" + self._log(msg) + self._log("{:>{width}}".format("with:", width=wnick), repr(line)) + self._fail(msg) + else: + if not nomatch_printed: + self._log("{:>{width}}".format("nomatch:", width=wnick), repr(pat)) + nomatch_printed = True + self._log("{:>{width}}".format("and:", width=wnick), repr(line)) + self._log_output = [] + + def _fail(self, msg: str) -> None: + __tracebackhide__ = True + log_text = self._log_text + self._log_output = [] + fail(log_text) + + def str(self) -> str: + """Return the entire original text.""" + return str(self) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/pytester_assertions.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/pytester_assertions.py new file mode 100644 index 000000000..d20c2bb59 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/pytester_assertions.py @@ -0,0 +1,76 @@ +"""Helper plugin for pytester; should not be loaded on its own.""" + +# This plugin contains assertions used by pytester. pytester cannot +# contain them itself, since it is imported by the `pytest` module, +# hence cannot be subject to assertion rewriting, which requires a +# module to not be already imported. +from typing import Dict +from typing import Optional +from typing import Sequence +from typing import Tuple +from typing import Union + +from _pytest.reports import CollectReport +from _pytest.reports import TestReport + + +def assertoutcome( + outcomes: Tuple[ + Sequence[TestReport], + Sequence[Union[CollectReport, TestReport]], + Sequence[Union[CollectReport, TestReport]], + ], + passed: int = 0, + skipped: int = 0, + failed: int = 0, +) -> None: + __tracebackhide__ = True + + realpassed, realskipped, realfailed = outcomes + obtained = { + "passed": len(realpassed), + "skipped": len(realskipped), + "failed": len(realfailed), + } + expected = {"passed": passed, "skipped": skipped, "failed": failed} + assert obtained == expected, outcomes + + +def assert_outcomes( + outcomes: Dict[str, int], + passed: int = 0, + skipped: int = 0, + failed: int = 0, + errors: int = 0, + xpassed: int = 0, + xfailed: int = 0, + warnings: Optional[int] = None, + deselected: Optional[int] = None, +) -> None: + """Assert that the specified outcomes appear with the respective + numbers (0 means it didn't occur) in the text output from a test run.""" + __tracebackhide__ = True + + obtained = { + "passed": outcomes.get("passed", 0), + "skipped": outcomes.get("skipped", 0), + "failed": outcomes.get("failed", 0), + "errors": outcomes.get("errors", 0), + "xpassed": outcomes.get("xpassed", 0), + "xfailed": outcomes.get("xfailed", 0), + } + expected = { + "passed": passed, + "skipped": skipped, + "failed": failed, + "errors": errors, + "xpassed": xpassed, + "xfailed": xfailed, + } + if warnings is not None: + obtained["warnings"] = outcomes.get("warnings", 0) + expected["warnings"] = warnings + if deselected is not None: + obtained["deselected"] = outcomes.get("deselected", 0) + expected["deselected"] = deselected + assert obtained == expected diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/python.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/python.py new file mode 100644 index 000000000..5e059f2c4 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/python.py @@ -0,0 +1,1679 @@ +# mypy: allow-untyped-defs +"""Python test discovery, setup and run of test functions.""" + +import abc +from collections import Counter +from collections import defaultdict +import dataclasses +import enum +import fnmatch +from functools import partial +import inspect +import itertools +import os +from pathlib import Path +import types +from typing import Any +from typing import Callable +from typing import Dict +from typing import final +from typing import Generator +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Literal +from typing import Mapping +from typing import Optional +from typing import Pattern +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union +import warnings + +import _pytest +from _pytest import fixtures +from _pytest import nodes +from _pytest._code import filter_traceback +from _pytest._code import getfslineno +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import TerminalRepr +from _pytest._code.code import Traceback +from _pytest._io.saferepr import saferepr +from _pytest.compat import ascii_escaped +from _pytest.compat import get_default_arg_names +from _pytest.compat import get_real_func +from _pytest.compat import getimfunc +from _pytest.compat import is_async_function +from _pytest.compat import is_generator +from _pytest.compat import LEGACY_PATH +from _pytest.compat import NOTSET +from _pytest.compat import safe_getattr +from _pytest.compat import safe_isclass +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import FixtureDef +from _pytest.fixtures import FixtureRequest +from _pytest.fixtures import FuncFixtureInfo +from _pytest.fixtures import get_scope_node +from _pytest.main import Session +from _pytest.mark import MARK_GEN +from _pytest.mark import ParameterSet +from _pytest.mark.structures import get_unpacked_marks +from _pytest.mark.structures import Mark +from _pytest.mark.structures import MarkDecorator +from _pytest.mark.structures import normalize_mark_list +from _pytest.outcomes import fail +from _pytest.outcomes import skip +from _pytest.pathlib import fnmatch_ex +from _pytest.pathlib import import_path +from _pytest.pathlib import ImportPathMismatchError +from _pytest.pathlib import scandir +from _pytest.scope import _ScopeName +from _pytest.scope import Scope +from _pytest.stash import StashKey +from _pytest.warning_types import PytestCollectionWarning +from _pytest.warning_types import PytestReturnNotNoneWarning +from _pytest.warning_types import PytestUnhandledCoroutineWarning + + +if TYPE_CHECKING: + from typing import Self + + +def pytest_addoption(parser: Parser) -> None: + parser.addini( + "python_files", + type="args", + # NOTE: default is also used in AssertionRewritingHook. + default=["test_*.py", "*_test.py"], + help="Glob-style file patterns for Python test module discovery", + ) + parser.addini( + "python_classes", + type="args", + default=["Test"], + help="Prefixes or glob names for Python test class discovery", + ) + parser.addini( + "python_functions", + type="args", + default=["test"], + help="Prefixes or glob names for Python test function and method discovery", + ) + parser.addini( + "disable_test_id_escaping_and_forfeit_all_rights_to_community_support", + type="bool", + default=False, + help="Disable string escape non-ASCII characters, might cause unwanted " + "side effects(use at your own risk)", + ) + + +def pytest_generate_tests(metafunc: "Metafunc") -> None: + for marker in metafunc.definition.iter_markers(name="parametrize"): + metafunc.parametrize(*marker.args, **marker.kwargs, _param_mark=marker) + + +def pytest_configure(config: Config) -> None: + config.addinivalue_line( + "markers", + "parametrize(argnames, argvalues): call a test function multiple " + "times passing in different arguments in turn. argvalues generally " + "needs to be a list of values if argnames specifies only one name " + "or a list of tuples of values if argnames specifies multiple names. " + "Example: @parametrize('arg1', [1,2]) would lead to two calls of the " + "decorated test function, one with arg1=1 and another with arg1=2." + "see https://docs.pytest.org/en/stable/how-to/parametrize.html for more info " + "and examples.", + ) + config.addinivalue_line( + "markers", + "usefixtures(fixturename1, fixturename2, ...): mark tests as needing " + "all of the specified fixtures. see " + "https://docs.pytest.org/en/stable/explanation/fixtures.html#usefixtures ", + ) + + +def async_warn_and_skip(nodeid: str) -> None: + msg = "async def functions are not natively supported and have been skipped.\n" + msg += ( + "You need to install a suitable plugin for your async framework, for example:\n" + ) + msg += " - anyio\n" + msg += " - pytest-asyncio\n" + msg += " - pytest-tornasync\n" + msg += " - pytest-trio\n" + msg += " - pytest-twisted" + warnings.warn(PytestUnhandledCoroutineWarning(msg.format(nodeid))) + skip(reason="async def function and no async plugin installed (see warnings)") + + +@hookimpl(trylast=True) +def pytest_pyfunc_call(pyfuncitem: "Function") -> Optional[object]: + testfunction = pyfuncitem.obj + if is_async_function(testfunction): + async_warn_and_skip(pyfuncitem.nodeid) + funcargs = pyfuncitem.funcargs + testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames} + result = testfunction(**testargs) + if hasattr(result, "__await__") or hasattr(result, "__aiter__"): + async_warn_and_skip(pyfuncitem.nodeid) + elif result is not None: + warnings.warn( + PytestReturnNotNoneWarning( + f"Expected None, but {pyfuncitem.nodeid} returned {result!r}, which will be an error in a " + "future version of pytest. Did you mean to use `assert` instead of `return`?" + ) + ) + return True + + +def pytest_collect_directory( + path: Path, parent: nodes.Collector +) -> Optional[nodes.Collector]: + pkginit = path / "__init__.py" + if pkginit.is_file(): + return Package.from_parent(parent, path=path) + return None + + +def pytest_collect_file(file_path: Path, parent: nodes.Collector) -> Optional["Module"]: + if file_path.suffix == ".py": + if not parent.session.isinitpath(file_path): + if not path_matches_patterns( + file_path, parent.config.getini("python_files") + ): + return None + ihook = parent.session.gethookproxy(file_path) + module: Module = ihook.pytest_pycollect_makemodule( + module_path=file_path, parent=parent + ) + return module + return None + + +def path_matches_patterns(path: Path, patterns: Iterable[str]) -> bool: + """Return whether path matches any of the patterns in the list of globs given.""" + return any(fnmatch_ex(pattern, path) for pattern in patterns) + + +def pytest_pycollect_makemodule(module_path: Path, parent) -> "Module": + return Module.from_parent(parent, path=module_path) + + +@hookimpl(trylast=True) +def pytest_pycollect_makeitem( + collector: Union["Module", "Class"], name: str, obj: object +) -> Union[None, nodes.Item, nodes.Collector, List[Union[nodes.Item, nodes.Collector]]]: + assert isinstance(collector, (Class, Module)), type(collector) + # Nothing was collected elsewhere, let's do it here. + if safe_isclass(obj): + if collector.istestclass(obj, name): + return Class.from_parent(collector, name=name, obj=obj) + elif collector.istestfunction(obj, name): + # mock seems to store unbound methods (issue473), normalize it. + obj = getattr(obj, "__func__", obj) + # We need to try and unwrap the function if it's a functools.partial + # or a functools.wrapped. + # We mustn't if it's been wrapped with mock.patch (python 2 only). + if not (inspect.isfunction(obj) or inspect.isfunction(get_real_func(obj))): + filename, lineno = getfslineno(obj) + warnings.warn_explicit( + message=PytestCollectionWarning( + "cannot collect %r because it is not a function." % name + ), + category=None, + filename=str(filename), + lineno=lineno + 1, + ) + elif getattr(obj, "__test__", True): + if is_generator(obj): + res = Function.from_parent(collector, name=name) + reason = ( + f"yield tests were removed in pytest 4.0 - {name} will be ignored" + ) + res.add_marker(MARK_GEN.xfail(run=False, reason=reason)) + res.warn(PytestCollectionWarning(reason)) + return res + else: + return list(collector._genfunctions(name, obj)) + return None + + +class PyobjMixin(nodes.Node): + """this mix-in inherits from Node to carry over the typing information + + as its intended to always mix in before a node + its position in the mro is unaffected""" + + _ALLOW_MARKERS = True + + @property + def module(self): + """Python module object this node was collected from (can be None).""" + node = self.getparent(Module) + return node.obj if node is not None else None + + @property + def cls(self): + """Python class object this node was collected from (can be None).""" + node = self.getparent(Class) + return node.obj if node is not None else None + + @property + def instance(self): + """Python instance object the function is bound to. + + Returns None if not a test method, e.g. for a standalone test function, + a class or a module. + """ + # Overridden by Function. + return None + + @property + def obj(self): + """Underlying Python object.""" + obj = getattr(self, "_obj", None) + if obj is None: + self._obj = obj = self._getobj() + # XXX evil hack + # used to avoid Function marker duplication + if self._ALLOW_MARKERS: + self.own_markers.extend(get_unpacked_marks(self.obj)) + # This assumes that `obj` is called before there is a chance + # to add custom keys to `self.keywords`, so no fear of overriding. + self.keywords.update((mark.name, mark) for mark in self.own_markers) + return obj + + @obj.setter + def obj(self, value): + self._obj = value + + def _getobj(self): + """Get the underlying Python object. May be overwritten by subclasses.""" + # TODO: Improve the type of `parent` such that assert/ignore aren't needed. + assert self.parent is not None + obj = self.parent.obj # type: ignore[attr-defined] + return getattr(obj, self.name) + + def getmodpath(self, stopatmodule: bool = True, includemodule: bool = False) -> str: + """Return Python path relative to the containing module.""" + parts = [] + for node in self.iter_parents(): + name = node.name + if isinstance(node, Module): + name = os.path.splitext(name)[0] + if stopatmodule: + if includemodule: + parts.append(name) + break + parts.append(name) + parts.reverse() + return ".".join(parts) + + def reportinfo(self) -> Tuple[Union["os.PathLike[str]", str], Optional[int], str]: + # XXX caching? + path, lineno = getfslineno(self.obj) + modpath = self.getmodpath() + return path, lineno, modpath + + +# As an optimization, these builtin attribute names are pre-ignored when +# iterating over an object during collection -- the pytest_pycollect_makeitem +# hook is not called for them. +# fmt: off +class _EmptyClass: pass # noqa: E701 +IGNORED_ATTRIBUTES = frozenset.union( + frozenset(), + # Module. + dir(types.ModuleType("empty_module")), + # Some extra module attributes the above doesn't catch. + {"__builtins__", "__file__", "__cached__"}, + # Class. + dir(_EmptyClass), + # Instance. + dir(_EmptyClass()), +) +del _EmptyClass +# fmt: on + + +class PyCollector(PyobjMixin, nodes.Collector, abc.ABC): + def funcnamefilter(self, name: str) -> bool: + return self._matches_prefix_or_glob_option("python_functions", name) + + def isnosetest(self, obj: object) -> bool: + """Look for the __test__ attribute, which is applied by the + @nose.tools.istest decorator. + """ + # We explicitly check for "is True" here to not mistakenly treat + # classes with a custom __getattr__ returning something truthy (like a + # function) as test classes. + return safe_getattr(obj, "__test__", False) is True + + def classnamefilter(self, name: str) -> bool: + return self._matches_prefix_or_glob_option("python_classes", name) + + def istestfunction(self, obj: object, name: str) -> bool: + if self.funcnamefilter(name) or self.isnosetest(obj): + if isinstance(obj, (staticmethod, classmethod)): + # staticmethods and classmethods need to be unwrapped. + obj = safe_getattr(obj, "__func__", False) + return callable(obj) and fixtures.getfixturemarker(obj) is None + else: + return False + + def istestclass(self, obj: object, name: str) -> bool: + return self.classnamefilter(name) or self.isnosetest(obj) + + def _matches_prefix_or_glob_option(self, option_name: str, name: str) -> bool: + """Check if the given name matches the prefix or glob-pattern defined + in ini configuration.""" + for option in self.config.getini(option_name): + if name.startswith(option): + return True + # Check that name looks like a glob-string before calling fnmatch + # because this is called for every name in each collected module, + # and fnmatch is somewhat expensive to call. + elif ("*" in option or "?" in option or "[" in option) and fnmatch.fnmatch( + name, option + ): + return True + return False + + def collect(self) -> Iterable[Union[nodes.Item, nodes.Collector]]: + if not getattr(self.obj, "__test__", True): + return [] + + # Avoid random getattrs and peek in the __dict__ instead. + dicts = [getattr(self.obj, "__dict__", {})] + if isinstance(self.obj, type): + for basecls in self.obj.__mro__: + dicts.append(basecls.__dict__) + + # In each class, nodes should be definition ordered. + # __dict__ is definition ordered. + seen: Set[str] = set() + dict_values: List[List[Union[nodes.Item, nodes.Collector]]] = [] + ihook = self.ihook + for dic in dicts: + values: List[Union[nodes.Item, nodes.Collector]] = [] + # Note: seems like the dict can change during iteration - + # be careful not to remove the list() without consideration. + for name, obj in list(dic.items()): + if name in IGNORED_ATTRIBUTES: + continue + if name in seen: + continue + seen.add(name) + res = ihook.pytest_pycollect_makeitem( + collector=self, name=name, obj=obj + ) + if res is None: + continue + elif isinstance(res, list): + values.extend(res) + else: + values.append(res) + dict_values.append(values) + + # Between classes in the class hierarchy, reverse-MRO order -- nodes + # inherited from base classes should come before subclasses. + result = [] + for values in reversed(dict_values): + result.extend(values) + return result + + def _genfunctions(self, name: str, funcobj) -> Iterator["Function"]: + modulecol = self.getparent(Module) + assert modulecol is not None + module = modulecol.obj + clscol = self.getparent(Class) + cls = clscol and clscol.obj or None + + definition = FunctionDefinition.from_parent(self, name=name, callobj=funcobj) + fixtureinfo = definition._fixtureinfo + + # pytest_generate_tests impls call metafunc.parametrize() which fills + # metafunc._calls, the outcome of the hook. + metafunc = Metafunc( + definition=definition, + fixtureinfo=fixtureinfo, + config=self.config, + cls=cls, + module=module, + _ispytest=True, + ) + methods = [] + if hasattr(module, "pytest_generate_tests"): + methods.append(module.pytest_generate_tests) + if cls is not None and hasattr(cls, "pytest_generate_tests"): + methods.append(cls().pytest_generate_tests) + self.ihook.pytest_generate_tests.call_extra(methods, dict(metafunc=metafunc)) + + if not metafunc._calls: + yield Function.from_parent(self, name=name, fixtureinfo=fixtureinfo) + else: + # Direct parametrizations taking place in module/class-specific + # `metafunc.parametrize` calls may have shadowed some fixtures, so make sure + # we update what the function really needs a.k.a its fixture closure. Note that + # direct parametrizations using `@pytest.mark.parametrize` have already been considered + # into making the closure using `ignore_args` arg to `getfixtureclosure`. + fixtureinfo.prune_dependency_tree() + + for callspec in metafunc._calls: + subname = f"{name}[{callspec.id}]" + yield Function.from_parent( + self, + name=subname, + callspec=callspec, + fixtureinfo=fixtureinfo, + keywords={callspec.id: True}, + originalname=name, + ) + + +def importtestmodule( + path: Path, + config: Config, +): + # We assume we are only called once per module. + importmode = config.getoption("--import-mode") + try: + mod = import_path( + path, + mode=importmode, + root=config.rootpath, + consider_namespace_packages=config.getini("consider_namespace_packages"), + ) + except SyntaxError as e: + raise nodes.Collector.CollectError( + ExceptionInfo.from_current().getrepr(style="short") + ) from e + except ImportPathMismatchError as e: + raise nodes.Collector.CollectError( + "import file mismatch:\n" + "imported module {!r} has this __file__ attribute:\n" + " {}\n" + "which is not the same as the test file we want to collect:\n" + " {}\n" + "HINT: remove __pycache__ / .pyc files and/or use a " + "unique basename for your test file modules".format(*e.args) + ) from e + except ImportError as e: + exc_info = ExceptionInfo.from_current() + if config.getoption("verbose") < 2: + exc_info.traceback = exc_info.traceback.filter(filter_traceback) + exc_repr = ( + exc_info.getrepr(style="short") + if exc_info.traceback + else exc_info.exconly() + ) + formatted_tb = str(exc_repr) + raise nodes.Collector.CollectError( + f"ImportError while importing test module '{path}'.\n" + "Hint: make sure your test modules/packages have valid Python names.\n" + "Traceback:\n" + f"{formatted_tb}" + ) from e + except skip.Exception as e: + if e.allow_module_level: + raise + raise nodes.Collector.CollectError( + "Using pytest.skip outside of a test will skip the entire module. " + "If that's your intention, pass `allow_module_level=True`. " + "If you want to skip a specific test or an entire class, " + "use the @pytest.mark.skip or @pytest.mark.skipif decorators." + ) from e + config.pluginmanager.consider_module(mod) + return mod + + +class Module(nodes.File, PyCollector): + """Collector for test classes and functions in a Python module.""" + + def _getobj(self): + return importtestmodule(self.path, self.config) + + def collect(self) -> Iterable[Union[nodes.Item, nodes.Collector]]: + self._register_setup_module_fixture() + self._register_setup_function_fixture() + self.session._fixturemanager.parsefactories(self) + return super().collect() + + def _register_setup_module_fixture(self) -> None: + """Register an autouse, module-scoped fixture for the collected module object + that invokes setUpModule/tearDownModule if either or both are available. + + Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with + other fixtures (#517). + """ + setup_module = _get_first_non_fixture_func( + self.obj, ("setUpModule", "setup_module") + ) + teardown_module = _get_first_non_fixture_func( + self.obj, ("tearDownModule", "teardown_module") + ) + + if setup_module is None and teardown_module is None: + return + + def xunit_setup_module_fixture(request) -> Generator[None, None, None]: + module = request.module + if setup_module is not None: + _call_with_optional_argument(setup_module, module) + yield + if teardown_module is not None: + _call_with_optional_argument(teardown_module, module) + + self.session._fixturemanager._register_fixture( + # Use a unique name to speed up lookup. + name=f"_xunit_setup_module_fixture_{self.obj.__name__}", + func=xunit_setup_module_fixture, + nodeid=self.nodeid, + scope="module", + autouse=True, + ) + + def _register_setup_function_fixture(self) -> None: + """Register an autouse, function-scoped fixture for the collected module object + that invokes setup_function/teardown_function if either or both are available. + + Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with + other fixtures (#517). + """ + setup_function = _get_first_non_fixture_func(self.obj, ("setup_function",)) + teardown_function = _get_first_non_fixture_func( + self.obj, ("teardown_function",) + ) + if setup_function is None and teardown_function is None: + return + + def xunit_setup_function_fixture(request) -> Generator[None, None, None]: + if request.instance is not None: + # in this case we are bound to an instance, so we need to let + # setup_method handle this + yield + return + function = request.function + if setup_function is not None: + _call_with_optional_argument(setup_function, function) + yield + if teardown_function is not None: + _call_with_optional_argument(teardown_function, function) + + self.session._fixturemanager._register_fixture( + # Use a unique name to speed up lookup. + name=f"_xunit_setup_function_fixture_{self.obj.__name__}", + func=xunit_setup_function_fixture, + nodeid=self.nodeid, + scope="function", + autouse=True, + ) + + +class Package(nodes.Directory): + """Collector for files and directories in a Python packages -- directories + with an `__init__.py` file. + + .. note:: + + Directories without an `__init__.py` file are instead collected by + :class:`~pytest.Dir` by default. Both are :class:`~pytest.Directory` + collectors. + + .. versionchanged:: 8.0 + + Now inherits from :class:`~pytest.Directory`. + """ + + def __init__( + self, + fspath: Optional[LEGACY_PATH], + parent: nodes.Collector, + # NOTE: following args are unused: + config=None, + session=None, + nodeid=None, + path: Optional[Path] = None, + ) -> None: + # NOTE: Could be just the following, but kept as-is for compat. + # super().__init__(self, fspath, parent=parent) + session = parent.session + super().__init__( + fspath=fspath, + path=path, + parent=parent, + config=config, + session=session, + nodeid=nodeid, + ) + + def setup(self) -> None: + init_mod = importtestmodule(self.path / "__init__.py", self.config) + + # Not using fixtures to call setup_module here because autouse fixtures + # from packages are not called automatically (#4085). + setup_module = _get_first_non_fixture_func( + init_mod, ("setUpModule", "setup_module") + ) + if setup_module is not None: + _call_with_optional_argument(setup_module, init_mod) + + teardown_module = _get_first_non_fixture_func( + init_mod, ("tearDownModule", "teardown_module") + ) + if teardown_module is not None: + func = partial(_call_with_optional_argument, teardown_module, init_mod) + self.addfinalizer(func) + + def collect(self) -> Iterable[Union[nodes.Item, nodes.Collector]]: + # Always collect __init__.py first. + def sort_key(entry: "os.DirEntry[str]") -> object: + return (entry.name != "__init__.py", entry.name) + + config = self.config + col: Optional[nodes.Collector] + cols: Sequence[nodes.Collector] + ihook = self.ihook + for direntry in scandir(self.path, sort_key): + if direntry.is_dir(): + path = Path(direntry.path) + if not self.session.isinitpath(path, with_parents=True): + if ihook.pytest_ignore_collect(collection_path=path, config=config): + continue + col = ihook.pytest_collect_directory(path=path, parent=self) + if col is not None: + yield col + + elif direntry.is_file(): + path = Path(direntry.path) + if not self.session.isinitpath(path): + if ihook.pytest_ignore_collect(collection_path=path, config=config): + continue + cols = ihook.pytest_collect_file(file_path=path, parent=self) + yield from cols + + +def _call_with_optional_argument(func, arg) -> None: + """Call the given function with the given argument if func accepts one argument, otherwise + calls func without arguments.""" + arg_count = func.__code__.co_argcount + if inspect.ismethod(func): + arg_count -= 1 + if arg_count: + func(arg) + else: + func() + + +def _get_first_non_fixture_func(obj: object, names: Iterable[str]) -> Optional[object]: + """Return the attribute from the given object to be used as a setup/teardown + xunit-style function, but only if not marked as a fixture to avoid calling it twice. + """ + for name in names: + meth: Optional[object] = getattr(obj, name, None) + if meth is not None and fixtures.getfixturemarker(meth) is None: + return meth + return None + + +class Class(PyCollector): + """Collector for test methods (and nested classes) in a Python class.""" + + @classmethod + def from_parent(cls, parent, *, name, obj=None, **kw) -> "Self": # type: ignore[override] + """The public constructor.""" + return super().from_parent(name=name, parent=parent, **kw) + + def newinstance(self): + return self.obj() + + def collect(self) -> Iterable[Union[nodes.Item, nodes.Collector]]: + if not safe_getattr(self.obj, "__test__", True): + return [] + if hasinit(self.obj): + assert self.parent is not None + self.warn( + PytestCollectionWarning( + f"cannot collect test class {self.obj.__name__!r} because it has a " + f"__init__ constructor (from: {self.parent.nodeid})" + ) + ) + return [] + elif hasnew(self.obj): + assert self.parent is not None + self.warn( + PytestCollectionWarning( + f"cannot collect test class {self.obj.__name__!r} because it has a " + f"__new__ constructor (from: {self.parent.nodeid})" + ) + ) + return [] + + self._register_setup_class_fixture() + self._register_setup_method_fixture() + + self.session._fixturemanager.parsefactories(self.newinstance(), self.nodeid) + + return super().collect() + + def _register_setup_class_fixture(self) -> None: + """Register an autouse, class scoped fixture into the collected class object + that invokes setup_class/teardown_class if either or both are available. + + Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with + other fixtures (#517). + """ + setup_class = _get_first_non_fixture_func(self.obj, ("setup_class",)) + teardown_class = _get_first_non_fixture_func(self.obj, ("teardown_class",)) + if setup_class is None and teardown_class is None: + return + + def xunit_setup_class_fixture(request) -> Generator[None, None, None]: + cls = request.cls + if setup_class is not None: + func = getimfunc(setup_class) + _call_with_optional_argument(func, cls) + yield + if teardown_class is not None: + func = getimfunc(teardown_class) + _call_with_optional_argument(func, cls) + + self.session._fixturemanager._register_fixture( + # Use a unique name to speed up lookup. + name=f"_xunit_setup_class_fixture_{self.obj.__qualname__}", + func=xunit_setup_class_fixture, + nodeid=self.nodeid, + scope="class", + autouse=True, + ) + + def _register_setup_method_fixture(self) -> None: + """Register an autouse, function scoped fixture into the collected class object + that invokes setup_method/teardown_method if either or both are available. + + Using a fixture to invoke these methods ensures we play nicely and unsurprisingly with + other fixtures (#517). + """ + setup_name = "setup_method" + setup_method = _get_first_non_fixture_func(self.obj, (setup_name,)) + teardown_name = "teardown_method" + teardown_method = _get_first_non_fixture_func(self.obj, (teardown_name,)) + if setup_method is None and teardown_method is None: + return + + def xunit_setup_method_fixture(request) -> Generator[None, None, None]: + instance = request.instance + method = request.function + if setup_method is not None: + func = getattr(instance, setup_name) + _call_with_optional_argument(func, method) + yield + if teardown_method is not None: + func = getattr(instance, teardown_name) + _call_with_optional_argument(func, method) + + self.session._fixturemanager._register_fixture( + # Use a unique name to speed up lookup. + name=f"_xunit_setup_method_fixture_{self.obj.__qualname__}", + func=xunit_setup_method_fixture, + nodeid=self.nodeid, + scope="function", + autouse=True, + ) + + +def hasinit(obj: object) -> bool: + init: object = getattr(obj, "__init__", None) + if init: + return init != object.__init__ + return False + + +def hasnew(obj: object) -> bool: + new: object = getattr(obj, "__new__", None) + if new: + return new != object.__new__ + return False + + +@final +@dataclasses.dataclass(frozen=True) +class IdMaker: + """Make IDs for a parametrization.""" + + __slots__ = ( + "argnames", + "parametersets", + "idfn", + "ids", + "config", + "nodeid", + "func_name", + ) + + # The argnames of the parametrization. + argnames: Sequence[str] + # The ParameterSets of the parametrization. + parametersets: Sequence[ParameterSet] + # Optionally, a user-provided callable to make IDs for parameters in a + # ParameterSet. + idfn: Optional[Callable[[Any], Optional[object]]] + # Optionally, explicit IDs for ParameterSets by index. + ids: Optional[Sequence[Optional[object]]] + # Optionally, the pytest config. + # Used for controlling ASCII escaping, and for calling the + # :hook:`pytest_make_parametrize_id` hook. + config: Optional[Config] + # Optionally, the ID of the node being parametrized. + # Used only for clearer error messages. + nodeid: Optional[str] + # Optionally, the ID of the function being parametrized. + # Used only for clearer error messages. + func_name: Optional[str] + + def make_unique_parameterset_ids(self) -> List[str]: + """Make a unique identifier for each ParameterSet, that may be used to + identify the parametrization in a node ID. + + Format is -...-[counter], where prm_x_token is + - user-provided id, if given + - else an id derived from the value, applicable for certain types + - else + The counter suffix is appended only in case a string wouldn't be unique + otherwise. + """ + resolved_ids = list(self._resolve_ids()) + # All IDs must be unique! + if len(resolved_ids) != len(set(resolved_ids)): + # Record the number of occurrences of each ID. + id_counts = Counter(resolved_ids) + # Map the ID to its next suffix. + id_suffixes: Dict[str, int] = defaultdict(int) + # Suffix non-unique IDs to make them unique. + for index, id in enumerate(resolved_ids): + if id_counts[id] > 1: + suffix = "" + if id and id[-1].isdigit(): + suffix = "_" + new_id = f"{id}{suffix}{id_suffixes[id]}" + while new_id in set(resolved_ids): + id_suffixes[id] += 1 + new_id = f"{id}{suffix}{id_suffixes[id]}" + resolved_ids[index] = new_id + id_suffixes[id] += 1 + assert len(resolved_ids) == len( + set(resolved_ids) + ), f"Internal error: {resolved_ids=}" + return resolved_ids + + def _resolve_ids(self) -> Iterable[str]: + """Resolve IDs for all ParameterSets (may contain duplicates).""" + for idx, parameterset in enumerate(self.parametersets): + if parameterset.id is not None: + # ID provided directly - pytest.param(..., id="...") + yield parameterset.id + elif self.ids and idx < len(self.ids) and self.ids[idx] is not None: + # ID provided in the IDs list - parametrize(..., ids=[...]). + yield self._idval_from_value_required(self.ids[idx], idx) + else: + # ID not provided - generate it. + yield "-".join( + self._idval(val, argname, idx) + for val, argname in zip(parameterset.values, self.argnames) + ) + + def _idval(self, val: object, argname: str, idx: int) -> str: + """Make an ID for a parameter in a ParameterSet.""" + idval = self._idval_from_function(val, argname, idx) + if idval is not None: + return idval + idval = self._idval_from_hook(val, argname) + if idval is not None: + return idval + idval = self._idval_from_value(val) + if idval is not None: + return idval + return self._idval_from_argname(argname, idx) + + def _idval_from_function( + self, val: object, argname: str, idx: int + ) -> Optional[str]: + """Try to make an ID for a parameter in a ParameterSet using the + user-provided id callable, if given.""" + if self.idfn is None: + return None + try: + id = self.idfn(val) + except Exception as e: + prefix = f"{self.nodeid}: " if self.nodeid is not None else "" + msg = "error raised while trying to determine id of parameter '{}' at position {}" + msg = prefix + msg.format(argname, idx) + raise ValueError(msg) from e + if id is None: + return None + return self._idval_from_value(id) + + def _idval_from_hook(self, val: object, argname: str) -> Optional[str]: + """Try to make an ID for a parameter in a ParameterSet by calling the + :hook:`pytest_make_parametrize_id` hook.""" + if self.config: + id: Optional[str] = self.config.hook.pytest_make_parametrize_id( + config=self.config, val=val, argname=argname + ) + return id + return None + + def _idval_from_value(self, val: object) -> Optional[str]: + """Try to make an ID for a parameter in a ParameterSet from its value, + if the value type is supported.""" + if isinstance(val, (str, bytes)): + return _ascii_escaped_by_config(val, self.config) + elif val is None or isinstance(val, (float, int, bool, complex)): + return str(val) + elif isinstance(val, Pattern): + return ascii_escaped(val.pattern) + elif val is NOTSET: + # Fallback to default. Note that NOTSET is an enum.Enum. + pass + elif isinstance(val, enum.Enum): + return str(val) + elif isinstance(getattr(val, "__name__", None), str): + # Name of a class, function, module, etc. + name: str = getattr(val, "__name__") + return name + return None + + def _idval_from_value_required(self, val: object, idx: int) -> str: + """Like _idval_from_value(), but fails if the type is not supported.""" + id = self._idval_from_value(val) + if id is not None: + return id + + # Fail. + if self.func_name is not None: + prefix = f"In {self.func_name}: " + elif self.nodeid is not None: + prefix = f"In {self.nodeid}: " + else: + prefix = "" + msg = ( + f"{prefix}ids contains unsupported value {saferepr(val)} (type: {type(val)!r}) at index {idx}. " + "Supported types are: str, bytes, int, float, complex, bool, enum, regex or anything with a __name__." + ) + fail(msg, pytrace=False) + + @staticmethod + def _idval_from_argname(argname: str, idx: int) -> str: + """Make an ID for a parameter in a ParameterSet from the argument name + and the index of the ParameterSet.""" + return str(argname) + str(idx) + + +@final +@dataclasses.dataclass(frozen=True) +class CallSpec2: + """A planned parameterized invocation of a test function. + + Calculated during collection for a given test function's Metafunc. + Once collection is over, each callspec is turned into a single Item + and stored in item.callspec. + """ + + # arg name -> arg value which will be passed to a fixture or pseudo-fixture + # of the same name. (indirect or direct parametrization respectively) + params: Dict[str, object] = dataclasses.field(default_factory=dict) + # arg name -> arg index. + indices: Dict[str, int] = dataclasses.field(default_factory=dict) + # Used for sorting parametrized resources. + _arg2scope: Mapping[str, Scope] = dataclasses.field(default_factory=dict) + # Parts which will be added to the item's name in `[..]` separated by "-". + _idlist: Sequence[str] = dataclasses.field(default_factory=tuple) + # Marks which will be applied to the item. + marks: List[Mark] = dataclasses.field(default_factory=list) + + def setmulti( + self, + *, + argnames: Iterable[str], + valset: Iterable[object], + id: str, + marks: Iterable[Union[Mark, MarkDecorator]], + scope: Scope, + param_index: int, + ) -> "CallSpec2": + params = self.params.copy() + indices = self.indices.copy() + arg2scope = dict(self._arg2scope) + for arg, val in zip(argnames, valset): + if arg in params: + raise ValueError(f"duplicate parametrization of {arg!r}") + params[arg] = val + indices[arg] = param_index + arg2scope[arg] = scope + return CallSpec2( + params=params, + indices=indices, + _arg2scope=arg2scope, + _idlist=[*self._idlist, id], + marks=[*self.marks, *normalize_mark_list(marks)], + ) + + def getparam(self, name: str) -> object: + try: + return self.params[name] + except KeyError as e: + raise ValueError(name) from e + + @property + def id(self) -> str: + return "-".join(self._idlist) + + +def get_direct_param_fixture_func(request: FixtureRequest) -> Any: + return request.param + + +# Used for storing pseudo fixturedefs for direct parametrization. +name2pseudofixturedef_key = StashKey[Dict[str, FixtureDef[Any]]]() + + +@final +class Metafunc: + """Objects passed to the :hook:`pytest_generate_tests` hook. + + They help to inspect a test function and to generate tests according to + test configuration or values specified in the class or module where a + test function is defined. + """ + + def __init__( + self, + definition: "FunctionDefinition", + fixtureinfo: fixtures.FuncFixtureInfo, + config: Config, + cls=None, + module=None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + + #: Access to the underlying :class:`_pytest.python.FunctionDefinition`. + self.definition = definition + + #: Access to the :class:`pytest.Config` object for the test session. + self.config = config + + #: The module object where the test function is defined in. + self.module = module + + #: Underlying Python test function. + self.function = definition.obj + + #: Set of fixture names required by the test function. + self.fixturenames = fixtureinfo.names_closure + + #: Class object where the test function is defined in or ``None``. + self.cls = cls + + self._arg2fixturedefs = fixtureinfo.name2fixturedefs + + # Result of parametrize(). + self._calls: List[CallSpec2] = [] + + def parametrize( + self, + argnames: Union[str, Sequence[str]], + argvalues: Iterable[Union[ParameterSet, Sequence[object], object]], + indirect: Union[bool, Sequence[str]] = False, + ids: Optional[ + Union[Iterable[Optional[object]], Callable[[Any], Optional[object]]] + ] = None, + scope: Optional[_ScopeName] = None, + *, + _param_mark: Optional[Mark] = None, + ) -> None: + """Add new invocations to the underlying test function using the list + of argvalues for the given argnames. Parametrization is performed + during the collection phase. If you need to setup expensive resources + see about setting indirect to do it rather than at test setup time. + + Can be called multiple times per test function (but only on different + argument names), in which case each call parametrizes all previous + parametrizations, e.g. + + :: + + unparametrized: t + parametrize ["x", "y"]: t[x], t[y] + parametrize [1, 2]: t[x-1], t[x-2], t[y-1], t[y-2] + + :param argnames: + A comma-separated string denoting one or more argument names, or + a list/tuple of argument strings. + + :param argvalues: + The list of argvalues determines how often a test is invoked with + different argument values. + + If only one argname was specified argvalues is a list of values. + If N argnames were specified, argvalues must be a list of + N-tuples, where each tuple-element specifies a value for its + respective argname. + + :param indirect: + A list of arguments' names (subset of argnames) or a boolean. + If True the list contains all names from the argnames. Each + argvalue corresponding to an argname in this list will + be passed as request.param to its respective argname fixture + function so that it can perform more expensive setups during the + setup phase of a test rather than at collection time. + + :param ids: + Sequence of (or generator for) ids for ``argvalues``, + or a callable to return part of the id for each argvalue. + + With sequences (and generators like ``itertools.count()``) the + returned ids should be of type ``string``, ``int``, ``float``, + ``bool``, or ``None``. + They are mapped to the corresponding index in ``argvalues``. + ``None`` means to use the auto-generated id. + + If it is a callable it will be called for each entry in + ``argvalues``, and the return value is used as part of the + auto-generated id for the whole set (where parts are joined with + dashes ("-")). + This is useful to provide more specific ids for certain items, e.g. + dates. Returning ``None`` will use an auto-generated id. + + If no ids are provided they will be generated automatically from + the argvalues. + + :param scope: + If specified it denotes the scope of the parameters. + The scope is used for grouping tests by parameter instances. + It will also override any fixture-function defined scope, allowing + to set a dynamic scope using test context or configuration. + """ + argnames, parametersets = ParameterSet._for_parametrize( + argnames, + argvalues, + self.function, + self.config, + nodeid=self.definition.nodeid, + ) + del argvalues + + if "request" in argnames: + fail( + "'request' is a reserved name and cannot be used in @pytest.mark.parametrize", + pytrace=False, + ) + + if scope is not None: + scope_ = Scope.from_user( + scope, descr=f"parametrize() call in {self.function.__name__}" + ) + else: + scope_ = _find_parametrized_scope(argnames, self._arg2fixturedefs, indirect) + + self._validate_if_using_arg_names(argnames, indirect) + + # Use any already (possibly) generated ids with parametrize Marks. + if _param_mark and _param_mark._param_ids_from: + generated_ids = _param_mark._param_ids_from._param_ids_generated + if generated_ids is not None: + ids = generated_ids + + ids = self._resolve_parameter_set_ids( + argnames, ids, parametersets, nodeid=self.definition.nodeid + ) + + # Store used (possibly generated) ids with parametrize Marks. + if _param_mark and _param_mark._param_ids_from and generated_ids is None: + object.__setattr__(_param_mark._param_ids_from, "_param_ids_generated", ids) + + # Add funcargs as fixturedefs to fixtureinfo.arg2fixturedefs by registering + # artificial "pseudo" FixtureDef's so that later at test execution time we can + # rely on a proper FixtureDef to exist for fixture setup. + node = None + # If we have a scope that is higher than function, we need + # to make sure we only ever create an according fixturedef on + # a per-scope basis. We thus store and cache the fixturedef on the + # node related to the scope. + if scope_ is not Scope.Function: + collector = self.definition.parent + assert collector is not None + node = get_scope_node(collector, scope_) + if node is None: + # If used class scope and there is no class, use module-level + # collector (for now). + if scope_ is Scope.Class: + assert isinstance(collector, Module) + node = collector + # If used package scope and there is no package, use session + # (for now). + elif scope_ is Scope.Package: + node = collector.session + else: + assert False, f"Unhandled missing scope: {scope}" + if node is None: + name2pseudofixturedef = None + else: + default: Dict[str, FixtureDef[Any]] = {} + name2pseudofixturedef = node.stash.setdefault( + name2pseudofixturedef_key, default + ) + arg_directness = self._resolve_args_directness(argnames, indirect) + for argname in argnames: + if arg_directness[argname] == "indirect": + continue + if name2pseudofixturedef is not None and argname in name2pseudofixturedef: + fixturedef = name2pseudofixturedef[argname] + else: + fixturedef = FixtureDef( + config=self.config, + baseid="", + argname=argname, + func=get_direct_param_fixture_func, + scope=scope_, + params=None, + ids=None, + _ispytest=True, + ) + if name2pseudofixturedef is not None: + name2pseudofixturedef[argname] = fixturedef + self._arg2fixturedefs[argname] = [fixturedef] + + # Create the new calls: if we are parametrize() multiple times (by applying the decorator + # more than once) then we accumulate those calls generating the cartesian product + # of all calls. + newcalls = [] + for callspec in self._calls or [CallSpec2()]: + for param_index, (param_id, param_set) in enumerate( + zip(ids, parametersets) + ): + newcallspec = callspec.setmulti( + argnames=argnames, + valset=param_set.values, + id=param_id, + marks=param_set.marks, + scope=scope_, + param_index=param_index, + ) + newcalls.append(newcallspec) + self._calls = newcalls + + def _resolve_parameter_set_ids( + self, + argnames: Sequence[str], + ids: Optional[ + Union[Iterable[Optional[object]], Callable[[Any], Optional[object]]] + ], + parametersets: Sequence[ParameterSet], + nodeid: str, + ) -> List[str]: + """Resolve the actual ids for the given parameter sets. + + :param argnames: + Argument names passed to ``parametrize()``. + :param ids: + The `ids` parameter of the ``parametrize()`` call (see docs). + :param parametersets: + The parameter sets, each containing a set of values corresponding + to ``argnames``. + :param nodeid str: + The nodeid of the definition item that generated this + parametrization. + :returns: + List with ids for each parameter set given. + """ + if ids is None: + idfn = None + ids_ = None + elif callable(ids): + idfn = ids + ids_ = None + else: + idfn = None + ids_ = self._validate_ids(ids, parametersets, self.function.__name__) + id_maker = IdMaker( + argnames, + parametersets, + idfn, + ids_, + self.config, + nodeid=nodeid, + func_name=self.function.__name__, + ) + return id_maker.make_unique_parameterset_ids() + + def _validate_ids( + self, + ids: Iterable[Optional[object]], + parametersets: Sequence[ParameterSet], + func_name: str, + ) -> List[Optional[object]]: + try: + num_ids = len(ids) # type: ignore[arg-type] + except TypeError: + try: + iter(ids) + except TypeError as e: + raise TypeError("ids must be a callable or an iterable") from e + num_ids = len(parametersets) + + # num_ids == 0 is a special case: https://github.com/pytest-dev/pytest/issues/1849 + if num_ids != len(parametersets) and num_ids != 0: + msg = "In {}: {} parameter sets specified, with different number of ids: {}" + fail(msg.format(func_name, len(parametersets), num_ids), pytrace=False) + + return list(itertools.islice(ids, num_ids)) + + def _resolve_args_directness( + self, + argnames: Sequence[str], + indirect: Union[bool, Sequence[str]], + ) -> Dict[str, Literal["indirect", "direct"]]: + """Resolve if each parametrized argument must be considered an indirect + parameter to a fixture of the same name, or a direct parameter to the + parametrized function, based on the ``indirect`` parameter of the + parametrized() call. + + :param argnames: + List of argument names passed to ``parametrize()``. + :param indirect: + Same as the ``indirect`` parameter of ``parametrize()``. + :returns + A dict mapping each arg name to either "indirect" or "direct". + """ + arg_directness: Dict[str, Literal["indirect", "direct"]] + if isinstance(indirect, bool): + arg_directness = dict.fromkeys( + argnames, "indirect" if indirect else "direct" + ) + elif isinstance(indirect, Sequence): + arg_directness = dict.fromkeys(argnames, "direct") + for arg in indirect: + if arg not in argnames: + fail( + f"In {self.function.__name__}: indirect fixture '{arg}' doesn't exist", + pytrace=False, + ) + arg_directness[arg] = "indirect" + else: + fail( + f"In {self.function.__name__}: expected Sequence or boolean" + f" for indirect, got {type(indirect).__name__}", + pytrace=False, + ) + return arg_directness + + def _validate_if_using_arg_names( + self, + argnames: Sequence[str], + indirect: Union[bool, Sequence[str]], + ) -> None: + """Check if all argnames are being used, by default values, or directly/indirectly. + + :param List[str] argnames: List of argument names passed to ``parametrize()``. + :param indirect: Same as the ``indirect`` parameter of ``parametrize()``. + :raises ValueError: If validation fails. + """ + default_arg_names = set(get_default_arg_names(self.function)) + func_name = self.function.__name__ + for arg in argnames: + if arg not in self.fixturenames: + if arg in default_arg_names: + fail( + f"In {func_name}: function already takes an argument '{arg}' with a default value", + pytrace=False, + ) + else: + if isinstance(indirect, Sequence): + name = "fixture" if arg in indirect else "argument" + else: + name = "fixture" if indirect else "argument" + fail( + f"In {func_name}: function uses no {name} '{arg}'", + pytrace=False, + ) + + +def _find_parametrized_scope( + argnames: Sequence[str], + arg2fixturedefs: Mapping[str, Sequence[fixtures.FixtureDef[object]]], + indirect: Union[bool, Sequence[str]], +) -> Scope: + """Find the most appropriate scope for a parametrized call based on its arguments. + + When there's at least one direct argument, always use "function" scope. + + When a test function is parametrized and all its arguments are indirect + (e.g. fixtures), return the most narrow scope based on the fixtures used. + + Related to issue #1832, based on code posted by @Kingdread. + """ + if isinstance(indirect, Sequence): + all_arguments_are_fixtures = len(indirect) == len(argnames) + else: + all_arguments_are_fixtures = bool(indirect) + + if all_arguments_are_fixtures: + fixturedefs = arg2fixturedefs or {} + used_scopes = [ + fixturedef[-1]._scope + for name, fixturedef in fixturedefs.items() + if name in argnames + ] + # Takes the most narrow scope from used fixtures. + return min(used_scopes, default=Scope.Function) + + return Scope.Function + + +def _ascii_escaped_by_config(val: Union[str, bytes], config: Optional[Config]) -> str: + if config is None: + escape_option = False + else: + escape_option = config.getini( + "disable_test_id_escaping_and_forfeit_all_rights_to_community_support" + ) + # TODO: If escaping is turned off and the user passes bytes, + # will return a bytes. For now we ignore this but the + # code *probably* doesn't handle this case. + return val if escape_option else ascii_escaped(val) # type: ignore + + +class Function(PyobjMixin, nodes.Item): + """Item responsible for setting up and executing a Python test function. + + :param name: + The full function name, including any decorations like those + added by parametrization (``my_func[my_param]``). + :param parent: + The parent Node. + :param config: + The pytest Config object. + :param callspec: + If given, this function has been parametrized and the callspec contains + meta information about the parametrization. + :param callobj: + If given, the object which will be called when the Function is invoked, + otherwise the callobj will be obtained from ``parent`` using ``originalname``. + :param keywords: + Keywords bound to the function object for "-k" matching. + :param session: + The pytest Session object. + :param fixtureinfo: + Fixture information already resolved at this fixture node.. + :param originalname: + The attribute name to use for accessing the underlying function object. + Defaults to ``name``. Set this if name is different from the original name, + for example when it contains decorations like those added by parametrization + (``my_func[my_param]``). + """ + + # Disable since functions handle it themselves. + _ALLOW_MARKERS = False + + def __init__( + self, + name: str, + parent, + config: Optional[Config] = None, + callspec: Optional[CallSpec2] = None, + callobj=NOTSET, + keywords: Optional[Mapping[str, Any]] = None, + session: Optional[Session] = None, + fixtureinfo: Optional[FuncFixtureInfo] = None, + originalname: Optional[str] = None, + ) -> None: + super().__init__(name, parent, config=config, session=session) + + if callobj is not NOTSET: + self._obj = callobj + self._instance = getattr(callobj, "__self__", None) + + #: Original function name, without any decorations (for example + #: parametrization adds a ``"[...]"`` suffix to function names), used to access + #: the underlying function object from ``parent`` (in case ``callobj`` is not given + #: explicitly). + #: + #: .. versionadded:: 3.0 + self.originalname = originalname or name + + # Note: when FunctionDefinition is introduced, we should change ``originalname`` + # to a readonly property that returns FunctionDefinition.name. + + self.own_markers.extend(get_unpacked_marks(self.obj)) + if callspec: + self.callspec = callspec + self.own_markers.extend(callspec.marks) + + # todo: this is a hell of a hack + # https://github.com/pytest-dev/pytest/issues/4569 + # Note: the order of the updates is important here; indicates what + # takes priority (ctor argument over function attributes over markers). + # Take own_markers only; NodeKeywords handles parent traversal on its own. + self.keywords.update((mark.name, mark) for mark in self.own_markers) + self.keywords.update(self.obj.__dict__) + if keywords: + self.keywords.update(keywords) + + if fixtureinfo is None: + fm = self.session._fixturemanager + fixtureinfo = fm.getfixtureinfo(self, self.obj, self.cls) + self._fixtureinfo: FuncFixtureInfo = fixtureinfo + self.fixturenames = fixtureinfo.names_closure + self._initrequest() + + # todo: determine sound type limitations + @classmethod + def from_parent(cls, parent, **kw) -> "Self": + """The public constructor.""" + return super().from_parent(parent=parent, **kw) + + def _initrequest(self) -> None: + self.funcargs: Dict[str, object] = {} + self._request = fixtures.TopRequest(self, _ispytest=True) + + @property + def function(self): + """Underlying python 'function' object.""" + return getimfunc(self.obj) + + @property + def instance(self): + try: + return self._instance + except AttributeError: + if isinstance(self.parent, Class): + # Each Function gets a fresh class instance. + self._instance = self._getinstance() + else: + self._instance = None + return self._instance + + def _getinstance(self): + if isinstance(self.parent, Class): + # Each Function gets a fresh class instance. + return self.parent.newinstance() + else: + return None + + def _getobj(self): + instance = self.instance + if instance is not None: + parent_obj = instance + else: + assert self.parent is not None + parent_obj = self.parent.obj # type: ignore[attr-defined] + return getattr(parent_obj, self.originalname) + + @property + def _pyfuncitem(self): + """(compatonly) for code expecting pytest-2.2 style request objects.""" + return self + + def runtest(self) -> None: + """Execute the underlying test function.""" + self.ihook.pytest_pyfunc_call(pyfuncitem=self) + + def setup(self) -> None: + self._request._fillfixtures() + + def _traceback_filter(self, excinfo: ExceptionInfo[BaseException]) -> Traceback: + if hasattr(self, "_obj") and not self.config.getoption("fulltrace", False): + code = _pytest._code.Code.from_function(get_real_func(self.obj)) + path, firstlineno = code.path, code.firstlineno + traceback = excinfo.traceback + ntraceback = traceback.cut(path=path, firstlineno=firstlineno) + if ntraceback == traceback: + ntraceback = ntraceback.cut(path=path) + if ntraceback == traceback: + ntraceback = ntraceback.filter(filter_traceback) + if not ntraceback: + ntraceback = traceback + ntraceback = ntraceback.filter(excinfo) + + # issue364: mark all but first and last frames to + # only show a single-line message for each frame. + if self.config.getoption("tbstyle", "auto") == "auto": + if len(ntraceback) > 2: + ntraceback = Traceback( + ( + ntraceback[0], + *(t.with_repr_style("short") for t in ntraceback[1:-1]), + ntraceback[-1], + ) + ) + + return ntraceback + return excinfo.traceback + + # TODO: Type ignored -- breaks Liskov Substitution. + def repr_failure( # type: ignore[override] + self, + excinfo: ExceptionInfo[BaseException], + ) -> Union[str, TerminalRepr]: + style = self.config.getoption("tbstyle", "auto") + if style == "auto": + style = "long" + return self._repr_failure_py(excinfo, style=style) + + +class FunctionDefinition(Function): + """This class is a stop gap solution until we evolve to have actual function + definition nodes and manage to get rid of ``metafunc``.""" + + def runtest(self) -> None: + raise RuntimeError("function definitions are not supposed to be run as tests") + + setup = runtest diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/python_api.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/python_api.py new file mode 100644 index 000000000..7d89fdd80 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/python_api.py @@ -0,0 +1,1006 @@ +# mypy: allow-untyped-defs +from collections.abc import Collection +from collections.abc import Sized +from decimal import Decimal +import math +from numbers import Complex +import pprint +from types import TracebackType +from typing import Any +from typing import Callable +from typing import cast +from typing import ContextManager +from typing import final +from typing import List +from typing import Mapping +from typing import Optional +from typing import overload +from typing import Pattern +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +import _pytest._code +from _pytest.outcomes import fail + + +if TYPE_CHECKING: + from numpy import ndarray + + +def _compare_approx( + full_object: object, + message_data: Sequence[Tuple[str, str, str]], + number_of_elements: int, + different_ids: Sequence[object], + max_abs_diff: float, + max_rel_diff: float, +) -> List[str]: + message_list = list(message_data) + message_list.insert(0, ("Index", "Obtained", "Expected")) + max_sizes = [0, 0, 0] + for index, obtained, expected in message_list: + max_sizes[0] = max(max_sizes[0], len(index)) + max_sizes[1] = max(max_sizes[1], len(obtained)) + max_sizes[2] = max(max_sizes[2], len(expected)) + explanation = [ + f"comparison failed. Mismatched elements: {len(different_ids)} / {number_of_elements}:", + f"Max absolute difference: {max_abs_diff}", + f"Max relative difference: {max_rel_diff}", + ] + [ + f"{indexes:<{max_sizes[0]}} | {obtained:<{max_sizes[1]}} | {expected:<{max_sizes[2]}}" + for indexes, obtained, expected in message_list + ] + return explanation + + +# builtin pytest.approx helper + + +class ApproxBase: + """Provide shared utilities for making approximate comparisons between + numbers or sequences of numbers.""" + + # Tell numpy to use our `__eq__` operator instead of its. + __array_ufunc__ = None + __array_priority__ = 100 + + def __init__(self, expected, rel=None, abs=None, nan_ok: bool = False) -> None: + __tracebackhide__ = True + self.expected = expected + self.abs = abs + self.rel = rel + self.nan_ok = nan_ok + self._check_type() + + def __repr__(self) -> str: + raise NotImplementedError + + def _repr_compare(self, other_side: Any) -> List[str]: + return [ + "comparison failed", + f"Obtained: {other_side}", + f"Expected: {self}", + ] + + def __eq__(self, actual) -> bool: + return all( + a == self._approx_scalar(x) for a, x in self._yield_comparisons(actual) + ) + + def __bool__(self): + __tracebackhide__ = True + raise AssertionError( + "approx() is not supported in a boolean context.\nDid you mean: `assert a == approx(b)`?" + ) + + # Ignore type because of https://github.com/python/mypy/issues/4266. + __hash__ = None # type: ignore + + def __ne__(self, actual) -> bool: + return not (actual == self) + + def _approx_scalar(self, x) -> "ApproxScalar": + if isinstance(x, Decimal): + return ApproxDecimal(x, rel=self.rel, abs=self.abs, nan_ok=self.nan_ok) + return ApproxScalar(x, rel=self.rel, abs=self.abs, nan_ok=self.nan_ok) + + def _yield_comparisons(self, actual): + """Yield all the pairs of numbers to be compared. + + This is used to implement the `__eq__` method. + """ + raise NotImplementedError + + def _check_type(self) -> None: + """Raise a TypeError if the expected value is not a valid type.""" + # This is only a concern if the expected value is a sequence. In every + # other case, the approx() function ensures that the expected value has + # a numeric type. For this reason, the default is to do nothing. The + # classes that deal with sequences should reimplement this method to + # raise if there are any non-numeric elements in the sequence. + + +def _recursive_sequence_map(f, x): + """Recursively map a function over a sequence of arbitrary depth""" + if isinstance(x, (list, tuple)): + seq_type = type(x) + return seq_type(_recursive_sequence_map(f, xi) for xi in x) + else: + return f(x) + + +class ApproxNumpy(ApproxBase): + """Perform approximate comparisons where the expected value is numpy array.""" + + def __repr__(self) -> str: + list_scalars = _recursive_sequence_map( + self._approx_scalar, self.expected.tolist() + ) + return f"approx({list_scalars!r})" + + def _repr_compare(self, other_side: Union["ndarray", List[Any]]) -> List[str]: + import itertools + import math + + def get_value_from_nested_list( + nested_list: List[Any], nd_index: Tuple[Any, ...] + ) -> Any: + """ + Helper function to get the value out of a nested list, given an n-dimensional index. + This mimics numpy's indexing, but for raw nested python lists. + """ + value: Any = nested_list + for i in nd_index: + value = value[i] + return value + + np_array_shape = self.expected.shape + approx_side_as_seq = _recursive_sequence_map( + self._approx_scalar, self.expected.tolist() + ) + + # convert other_side to numpy array to ensure shape attribute is available + other_side_as_array = _as_numpy_array(other_side) + assert other_side_as_array is not None + + if np_array_shape != other_side_as_array.shape: + return [ + "Impossible to compare arrays with different shapes.", + f"Shapes: {np_array_shape} and {other_side_as_array.shape}", + ] + + number_of_elements = self.expected.size + max_abs_diff = -math.inf + max_rel_diff = -math.inf + different_ids = [] + for index in itertools.product(*(range(i) for i in np_array_shape)): + approx_value = get_value_from_nested_list(approx_side_as_seq, index) + other_value = get_value_from_nested_list(other_side_as_array, index) + if approx_value != other_value: + abs_diff = abs(approx_value.expected - other_value) + max_abs_diff = max(max_abs_diff, abs_diff) + if other_value == 0.0: + max_rel_diff = math.inf + else: + max_rel_diff = max(max_rel_diff, abs_diff / abs(other_value)) + different_ids.append(index) + + message_data = [ + ( + str(index), + str(get_value_from_nested_list(other_side_as_array, index)), + str(get_value_from_nested_list(approx_side_as_seq, index)), + ) + for index in different_ids + ] + return _compare_approx( + self.expected, + message_data, + number_of_elements, + different_ids, + max_abs_diff, + max_rel_diff, + ) + + def __eq__(self, actual) -> bool: + import numpy as np + + # self.expected is supposed to always be an array here. + + if not np.isscalar(actual): + try: + actual = np.asarray(actual) + except Exception as e: + raise TypeError(f"cannot compare '{actual}' to numpy.ndarray") from e + + if not np.isscalar(actual) and actual.shape != self.expected.shape: + return False + + return super().__eq__(actual) + + def _yield_comparisons(self, actual): + import numpy as np + + # `actual` can either be a numpy array or a scalar, it is treated in + # `__eq__` before being passed to `ApproxBase.__eq__`, which is the + # only method that calls this one. + + if np.isscalar(actual): + for i in np.ndindex(self.expected.shape): + yield actual, self.expected[i].item() + else: + for i in np.ndindex(self.expected.shape): + yield actual[i].item(), self.expected[i].item() + + +class ApproxMapping(ApproxBase): + """Perform approximate comparisons where the expected value is a mapping + with numeric values (the keys can be anything).""" + + def __repr__(self) -> str: + return f"approx({({k: self._approx_scalar(v) for k, v in self.expected.items()})!r})" + + def _repr_compare(self, other_side: Mapping[object, float]) -> List[str]: + import math + + approx_side_as_map = { + k: self._approx_scalar(v) for k, v in self.expected.items() + } + + number_of_elements = len(approx_side_as_map) + max_abs_diff = -math.inf + max_rel_diff = -math.inf + different_ids = [] + for (approx_key, approx_value), other_value in zip( + approx_side_as_map.items(), other_side.values() + ): + if approx_value != other_value: + if approx_value.expected is not None and other_value is not None: + max_abs_diff = max( + max_abs_diff, abs(approx_value.expected - other_value) + ) + if approx_value.expected == 0.0: + max_rel_diff = math.inf + else: + max_rel_diff = max( + max_rel_diff, + abs( + (approx_value.expected - other_value) + / approx_value.expected + ), + ) + different_ids.append(approx_key) + + message_data = [ + (str(key), str(other_side[key]), str(approx_side_as_map[key])) + for key in different_ids + ] + + return _compare_approx( + self.expected, + message_data, + number_of_elements, + different_ids, + max_abs_diff, + max_rel_diff, + ) + + def __eq__(self, actual) -> bool: + try: + if set(actual.keys()) != set(self.expected.keys()): + return False + except AttributeError: + return False + + return super().__eq__(actual) + + def _yield_comparisons(self, actual): + for k in self.expected.keys(): + yield actual[k], self.expected[k] + + def _check_type(self) -> None: + __tracebackhide__ = True + for key, value in self.expected.items(): + if isinstance(value, type(self.expected)): + msg = "pytest.approx() does not support nested dictionaries: key={!r} value={!r}\n full mapping={}" + raise TypeError(msg.format(key, value, pprint.pformat(self.expected))) + + +class ApproxSequenceLike(ApproxBase): + """Perform approximate comparisons where the expected value is a sequence of numbers.""" + + def __repr__(self) -> str: + seq_type = type(self.expected) + if seq_type not in (tuple, list): + seq_type = list + return f"approx({seq_type(self._approx_scalar(x) for x in self.expected)!r})" + + def _repr_compare(self, other_side: Sequence[float]) -> List[str]: + import math + + if len(self.expected) != len(other_side): + return [ + "Impossible to compare lists with different sizes.", + f"Lengths: {len(self.expected)} and {len(other_side)}", + ] + + approx_side_as_map = _recursive_sequence_map(self._approx_scalar, self.expected) + + number_of_elements = len(approx_side_as_map) + max_abs_diff = -math.inf + max_rel_diff = -math.inf + different_ids = [] + for i, (approx_value, other_value) in enumerate( + zip(approx_side_as_map, other_side) + ): + if approx_value != other_value: + abs_diff = abs(approx_value.expected - other_value) + max_abs_diff = max(max_abs_diff, abs_diff) + if other_value == 0.0: + max_rel_diff = math.inf + else: + max_rel_diff = max(max_rel_diff, abs_diff / abs(other_value)) + different_ids.append(i) + + message_data = [ + (str(i), str(other_side[i]), str(approx_side_as_map[i])) + for i in different_ids + ] + + return _compare_approx( + self.expected, + message_data, + number_of_elements, + different_ids, + max_abs_diff, + max_rel_diff, + ) + + def __eq__(self, actual) -> bool: + try: + if len(actual) != len(self.expected): + return False + except TypeError: + return False + return super().__eq__(actual) + + def _yield_comparisons(self, actual): + return zip(actual, self.expected) + + def _check_type(self) -> None: + __tracebackhide__ = True + for index, x in enumerate(self.expected): + if isinstance(x, type(self.expected)): + msg = "pytest.approx() does not support nested data structures: {!r} at index {}\n full sequence: {}" + raise TypeError(msg.format(x, index, pprint.pformat(self.expected))) + + +class ApproxScalar(ApproxBase): + """Perform approximate comparisons where the expected value is a single number.""" + + # Using Real should be better than this Union, but not possible yet: + # https://github.com/python/typeshed/pull/3108 + DEFAULT_ABSOLUTE_TOLERANCE: Union[float, Decimal] = 1e-12 + DEFAULT_RELATIVE_TOLERANCE: Union[float, Decimal] = 1e-6 + + def __repr__(self) -> str: + """Return a string communicating both the expected value and the + tolerance for the comparison being made. + + For example, ``1.0 ± 1e-6``, ``(3+4j) ± 5e-6 ∠ ±180°``. + """ + # Don't show a tolerance for values that aren't compared using + # tolerances, i.e. non-numerics and infinities. Need to call abs to + # handle complex numbers, e.g. (inf + 1j). + if (not isinstance(self.expected, (Complex, Decimal))) or math.isinf( + abs(self.expected) + ): + return str(self.expected) + + # If a sensible tolerance can't be calculated, self.tolerance will + # raise a ValueError. In this case, display '???'. + try: + vetted_tolerance = f"{self.tolerance:.1e}" + if ( + isinstance(self.expected, Complex) + and self.expected.imag + and not math.isinf(self.tolerance) + ): + vetted_tolerance += " ∠ ±180°" + except ValueError: + vetted_tolerance = "???" + + return f"{self.expected} ± {vetted_tolerance}" + + def __eq__(self, actual) -> bool: + """Return whether the given value is equal to the expected value + within the pre-specified tolerance.""" + asarray = _as_numpy_array(actual) + if asarray is not None: + # Call ``__eq__()`` manually to prevent infinite-recursion with + # numpy<1.13. See #3748. + return all(self.__eq__(a) for a in asarray.flat) + + # Short-circuit exact equality. + if actual == self.expected: + return True + + # If either type is non-numeric, fall back to strict equality. + # NB: we need Complex, rather than just Number, to ensure that __abs__, + # __sub__, and __float__ are defined. + if not ( + isinstance(self.expected, (Complex, Decimal)) + and isinstance(actual, (Complex, Decimal)) + ): + return False + + # Allow the user to control whether NaNs are considered equal to each + # other or not. The abs() calls are for compatibility with complex + # numbers. + if math.isnan(abs(self.expected)): + return self.nan_ok and math.isnan(abs(actual)) + + # Infinity shouldn't be approximately equal to anything but itself, but + # if there's a relative tolerance, it will be infinite and infinity + # will seem approximately equal to everything. The equal-to-itself + # case would have been short circuited above, so here we can just + # return false if the expected value is infinite. The abs() call is + # for compatibility with complex numbers. + if math.isinf(abs(self.expected)): + return False + + # Return true if the two numbers are within the tolerance. + result: bool = abs(self.expected - actual) <= self.tolerance # type: ignore[arg-type] + return result + + # Ignore type because of https://github.com/python/mypy/issues/4266. + __hash__ = None # type: ignore + + @property + def tolerance(self): + """Return the tolerance for the comparison. + + This could be either an absolute tolerance or a relative tolerance, + depending on what the user specified or which would be larger. + """ + + def set_default(x, default): + return x if x is not None else default + + # Figure out what the absolute tolerance should be. ``self.abs`` is + # either None or a value specified by the user. + absolute_tolerance = set_default(self.abs, self.DEFAULT_ABSOLUTE_TOLERANCE) + + if absolute_tolerance < 0: + raise ValueError( + f"absolute tolerance can't be negative: {absolute_tolerance}" + ) + if math.isnan(absolute_tolerance): + raise ValueError("absolute tolerance can't be NaN.") + + # If the user specified an absolute tolerance but not a relative one, + # just return the absolute tolerance. + if self.rel is None: + if self.abs is not None: + return absolute_tolerance + + # Figure out what the relative tolerance should be. ``self.rel`` is + # either None or a value specified by the user. This is done after + # we've made sure the user didn't ask for an absolute tolerance only, + # because we don't want to raise errors about the relative tolerance if + # we aren't even going to use it. + relative_tolerance = set_default( + self.rel, self.DEFAULT_RELATIVE_TOLERANCE + ) * abs(self.expected) + + if relative_tolerance < 0: + raise ValueError( + f"relative tolerance can't be negative: {relative_tolerance}" + ) + if math.isnan(relative_tolerance): + raise ValueError("relative tolerance can't be NaN.") + + # Return the larger of the relative and absolute tolerances. + return max(relative_tolerance, absolute_tolerance) + + +class ApproxDecimal(ApproxScalar): + """Perform approximate comparisons where the expected value is a Decimal.""" + + DEFAULT_ABSOLUTE_TOLERANCE = Decimal("1e-12") + DEFAULT_RELATIVE_TOLERANCE = Decimal("1e-6") + + +def approx(expected, rel=None, abs=None, nan_ok: bool = False) -> ApproxBase: + """Assert that two numbers (or two ordered sequences of numbers) are equal to each other + within some tolerance. + + Due to the :doc:`python:tutorial/floatingpoint`, numbers that we + would intuitively expect to be equal are not always so:: + + >>> 0.1 + 0.2 == 0.3 + False + + This problem is commonly encountered when writing tests, e.g. when making + sure that floating-point values are what you expect them to be. One way to + deal with this problem is to assert that two floating-point numbers are + equal to within some appropriate tolerance:: + + >>> abs((0.1 + 0.2) - 0.3) < 1e-6 + True + + However, comparisons like this are tedious to write and difficult to + understand. Furthermore, absolute comparisons like the one above are + usually discouraged because there's no tolerance that works well for all + situations. ``1e-6`` is good for numbers around ``1``, but too small for + very big numbers and too big for very small ones. It's better to express + the tolerance as a fraction of the expected value, but relative comparisons + like that are even more difficult to write correctly and concisely. + + The ``approx`` class performs floating-point comparisons using a syntax + that's as intuitive as possible:: + + >>> from pytest import approx + >>> 0.1 + 0.2 == approx(0.3) + True + + The same syntax also works for ordered sequences of numbers:: + + >>> (0.1 + 0.2, 0.2 + 0.4) == approx((0.3, 0.6)) + True + + ``numpy`` arrays:: + + >>> import numpy as np # doctest: +SKIP + >>> np.array([0.1, 0.2]) + np.array([0.2, 0.4]) == approx(np.array([0.3, 0.6])) # doctest: +SKIP + True + + And for a ``numpy`` array against a scalar:: + + >>> import numpy as np # doctest: +SKIP + >>> np.array([0.1, 0.2]) + np.array([0.2, 0.1]) == approx(0.3) # doctest: +SKIP + True + + Only ordered sequences are supported, because ``approx`` needs + to infer the relative position of the sequences without ambiguity. This means + ``sets`` and other unordered sequences are not supported. + + Finally, dictionary *values* can also be compared:: + + >>> {'a': 0.1 + 0.2, 'b': 0.2 + 0.4} == approx({'a': 0.3, 'b': 0.6}) + True + + The comparison will be true if both mappings have the same keys and their + respective values match the expected tolerances. + + **Tolerances** + + By default, ``approx`` considers numbers within a relative tolerance of + ``1e-6`` (i.e. one part in a million) of its expected value to be equal. + This treatment would lead to surprising results if the expected value was + ``0.0``, because nothing but ``0.0`` itself is relatively close to ``0.0``. + To handle this case less surprisingly, ``approx`` also considers numbers + within an absolute tolerance of ``1e-12`` of its expected value to be + equal. Infinity and NaN are special cases. Infinity is only considered + equal to itself, regardless of the relative tolerance. NaN is not + considered equal to anything by default, but you can make it be equal to + itself by setting the ``nan_ok`` argument to True. (This is meant to + facilitate comparing arrays that use NaN to mean "no data".) + + Both the relative and absolute tolerances can be changed by passing + arguments to the ``approx`` constructor:: + + >>> 1.0001 == approx(1) + False + >>> 1.0001 == approx(1, rel=1e-3) + True + >>> 1.0001 == approx(1, abs=1e-3) + True + + If you specify ``abs`` but not ``rel``, the comparison will not consider + the relative tolerance at all. In other words, two numbers that are within + the default relative tolerance of ``1e-6`` will still be considered unequal + if they exceed the specified absolute tolerance. If you specify both + ``abs`` and ``rel``, the numbers will be considered equal if either + tolerance is met:: + + >>> 1 + 1e-8 == approx(1) + True + >>> 1 + 1e-8 == approx(1, abs=1e-12) + False + >>> 1 + 1e-8 == approx(1, rel=1e-6, abs=1e-12) + True + + You can also use ``approx`` to compare nonnumeric types, or dicts and + sequences containing nonnumeric types, in which case it falls back to + strict equality. This can be useful for comparing dicts and sequences that + can contain optional values:: + + >>> {"required": 1.0000005, "optional": None} == approx({"required": 1, "optional": None}) + True + >>> [None, 1.0000005] == approx([None,1]) + True + >>> ["foo", 1.0000005] == approx([None,1]) + False + + If you're thinking about using ``approx``, then you might want to know how + it compares to other good ways of comparing floating-point numbers. All of + these algorithms are based on relative and absolute tolerances and should + agree for the most part, but they do have meaningful differences: + + - ``math.isclose(a, b, rel_tol=1e-9, abs_tol=0.0)``: True if the relative + tolerance is met w.r.t. either ``a`` or ``b`` or if the absolute + tolerance is met. Because the relative tolerance is calculated w.r.t. + both ``a`` and ``b``, this test is symmetric (i.e. neither ``a`` nor + ``b`` is a "reference value"). You have to specify an absolute tolerance + if you want to compare to ``0.0`` because there is no tolerance by + default. More information: :py:func:`math.isclose`. + + - ``numpy.isclose(a, b, rtol=1e-5, atol=1e-8)``: True if the difference + between ``a`` and ``b`` is less that the sum of the relative tolerance + w.r.t. ``b`` and the absolute tolerance. Because the relative tolerance + is only calculated w.r.t. ``b``, this test is asymmetric and you can + think of ``b`` as the reference value. Support for comparing sequences + is provided by :py:func:`numpy.allclose`. More information: + :std:doc:`numpy:reference/generated/numpy.isclose`. + + - ``unittest.TestCase.assertAlmostEqual(a, b)``: True if ``a`` and ``b`` + are within an absolute tolerance of ``1e-7``. No relative tolerance is + considered , so this function is not appropriate for very large or very + small numbers. Also, it's only available in subclasses of ``unittest.TestCase`` + and it's ugly because it doesn't follow PEP8. More information: + :py:meth:`unittest.TestCase.assertAlmostEqual`. + + - ``a == pytest.approx(b, rel=1e-6, abs=1e-12)``: True if the relative + tolerance is met w.r.t. ``b`` or if the absolute tolerance is met. + Because the relative tolerance is only calculated w.r.t. ``b``, this test + is asymmetric and you can think of ``b`` as the reference value. In the + special case that you explicitly specify an absolute tolerance but not a + relative tolerance, only the absolute tolerance is considered. + + .. note:: + + ``approx`` can handle numpy arrays, but we recommend the + specialised test helpers in :std:doc:`numpy:reference/routines.testing` + if you need support for comparisons, NaNs, or ULP-based tolerances. + + To match strings using regex, you can use + `Matches `_ + from the + `re_assert package `_. + + .. warning:: + + .. versionchanged:: 3.2 + + In order to avoid inconsistent behavior, :py:exc:`TypeError` is + raised for ``>``, ``>=``, ``<`` and ``<=`` comparisons. + The example below illustrates the problem:: + + assert approx(0.1) > 0.1 + 1e-10 # calls approx(0.1).__gt__(0.1 + 1e-10) + assert 0.1 + 1e-10 > approx(0.1) # calls approx(0.1).__lt__(0.1 + 1e-10) + + In the second example one expects ``approx(0.1).__le__(0.1 + 1e-10)`` + to be called. But instead, ``approx(0.1).__lt__(0.1 + 1e-10)`` is used to + comparison. This is because the call hierarchy of rich comparisons + follows a fixed behavior. More information: :py:meth:`object.__ge__` + + .. versionchanged:: 3.7.1 + ``approx`` raises ``TypeError`` when it encounters a dict value or + sequence element of nonnumeric type. + + .. versionchanged:: 6.1.0 + ``approx`` falls back to strict equality for nonnumeric types instead + of raising ``TypeError``. + """ + # Delegate the comparison to a class that knows how to deal with the type + # of the expected value (e.g. int, float, list, dict, numpy.array, etc). + # + # The primary responsibility of these classes is to implement ``__eq__()`` + # and ``__repr__()``. The former is used to actually check if some + # "actual" value is equivalent to the given expected value within the + # allowed tolerance. The latter is used to show the user the expected + # value and tolerance, in the case that a test failed. + # + # The actual logic for making approximate comparisons can be found in + # ApproxScalar, which is used to compare individual numbers. All of the + # other Approx classes eventually delegate to this class. The ApproxBase + # class provides some convenient methods and overloads, but isn't really + # essential. + + __tracebackhide__ = True + + if isinstance(expected, Decimal): + cls: Type[ApproxBase] = ApproxDecimal + elif isinstance(expected, Mapping): + cls = ApproxMapping + elif _is_numpy_array(expected): + expected = _as_numpy_array(expected) + cls = ApproxNumpy + elif ( + hasattr(expected, "__getitem__") + and isinstance(expected, Sized) + and not isinstance(expected, (str, bytes)) + ): + cls = ApproxSequenceLike + elif isinstance(expected, Collection) and not isinstance(expected, (str, bytes)): + msg = f"pytest.approx() only supports ordered sequences, but got: {expected!r}" + raise TypeError(msg) + else: + cls = ApproxScalar + + return cls(expected, rel, abs, nan_ok) + + +def _is_numpy_array(obj: object) -> bool: + """ + Return true if the given object is implicitly convertible to ndarray, + and numpy is already imported. + """ + return _as_numpy_array(obj) is not None + + +def _as_numpy_array(obj: object) -> Optional["ndarray"]: + """ + Return an ndarray if the given object is implicitly convertible to ndarray, + and numpy is already imported, otherwise None. + """ + import sys + + np: Any = sys.modules.get("numpy") + if np is not None: + # avoid infinite recursion on numpy scalars, which have __array__ + if np.isscalar(obj): + return None + elif isinstance(obj, np.ndarray): + return obj + elif hasattr(obj, "__array__") or hasattr("obj", "__array_interface__"): + return np.asarray(obj) + return None + + +# builtin pytest.raises helper + +E = TypeVar("E", bound=BaseException) + + +@overload +def raises( + expected_exception: Union[Type[E], Tuple[Type[E], ...]], + *, + match: Optional[Union[str, Pattern[str]]] = ..., +) -> "RaisesContext[E]": ... + + +@overload +def raises( + expected_exception: Union[Type[E], Tuple[Type[E], ...]], + func: Callable[..., Any], + *args: Any, + **kwargs: Any, +) -> _pytest._code.ExceptionInfo[E]: ... + + +def raises( + expected_exception: Union[Type[E], Tuple[Type[E], ...]], *args: Any, **kwargs: Any +) -> Union["RaisesContext[E]", _pytest._code.ExceptionInfo[E]]: + r"""Assert that a code block/function call raises an exception type, or one of its subclasses. + + :param expected_exception: + The expected exception type, or a tuple if one of multiple possible + exception types are expected. Note that subclasses of the passed exceptions + will also match. + + :kwparam str | re.Pattern[str] | None match: + If specified, a string containing a regular expression, + or a regular expression object, that is tested against the string + representation of the exception and its :pep:`678` `__notes__` + using :func:`re.search`. + + To match a literal string that may contain :ref:`special characters + `, the pattern can first be escaped with :func:`re.escape`. + + (This is only used when ``pytest.raises`` is used as a context manager, + and passed through to the function otherwise. + When using ``pytest.raises`` as a function, you can use: + ``pytest.raises(Exc, func, match="passed on").match("my pattern")``.) + + Use ``pytest.raises`` as a context manager, which will capture the exception of the given + type, or any of its subclasses:: + + >>> import pytest + >>> with pytest.raises(ZeroDivisionError): + ... 1/0 + + If the code block does not raise the expected exception (:class:`ZeroDivisionError` in the example + above), or no exception at all, the check will fail instead. + + You can also use the keyword argument ``match`` to assert that the + exception matches a text or regex:: + + >>> with pytest.raises(ValueError, match='must be 0 or None'): + ... raise ValueError("value must be 0 or None") + + >>> with pytest.raises(ValueError, match=r'must be \d+$'): + ... raise ValueError("value must be 42") + + The ``match`` argument searches the formatted exception string, which includes any + `PEP-678 `__ ``__notes__``: + + >>> with pytest.raises(ValueError, match=r"had a note added"): # doctest: +SKIP + ... e = ValueError("value must be 42") + ... e.add_note("had a note added") + ... raise e + + The context manager produces an :class:`ExceptionInfo` object which can be used to inspect the + details of the captured exception:: + + >>> with pytest.raises(ValueError) as exc_info: + ... raise ValueError("value must be 42") + >>> assert exc_info.type is ValueError + >>> assert exc_info.value.args[0] == "value must be 42" + + .. warning:: + + Given that ``pytest.raises`` matches subclasses, be wary of using it to match :class:`Exception` like this:: + + with pytest.raises(Exception): # Careful, this will catch ANY exception raised. + some_function() + + Because :class:`Exception` is the base class of almost all exceptions, it is easy for this to hide + real bugs, where the user wrote this expecting a specific exception, but some other exception is being + raised due to a bug introduced during a refactoring. + + Avoid using ``pytest.raises`` to catch :class:`Exception` unless certain that you really want to catch + **any** exception raised. + + .. note:: + + When using ``pytest.raises`` as a context manager, it's worthwhile to + note that normal context manager rules apply and that the exception + raised *must* be the final line in the scope of the context manager. + Lines of code after that, within the scope of the context manager will + not be executed. For example:: + + >>> value = 15 + >>> with pytest.raises(ValueError) as exc_info: + ... if value > 10: + ... raise ValueError("value must be <= 10") + ... assert exc_info.type is ValueError # This will not execute. + + Instead, the following approach must be taken (note the difference in + scope):: + + >>> with pytest.raises(ValueError) as exc_info: + ... if value > 10: + ... raise ValueError("value must be <= 10") + ... + >>> assert exc_info.type is ValueError + + **Using with** ``pytest.mark.parametrize`` + + When using :ref:`pytest.mark.parametrize ref` + it is possible to parametrize tests such that + some runs raise an exception and others do not. + + See :ref:`parametrizing_conditional_raising` for an example. + + .. seealso:: + + :ref:`assertraises` for more examples and detailed discussion. + + **Legacy form** + + It is possible to specify a callable by passing a to-be-called lambda:: + + >>> raises(ZeroDivisionError, lambda: 1/0) + + + or you can specify an arbitrary callable with arguments:: + + >>> def f(x): return 1/x + ... + >>> raises(ZeroDivisionError, f, 0) + + >>> raises(ZeroDivisionError, f, x=0) + + + The form above is fully supported but discouraged for new code because the + context manager form is regarded as more readable and less error-prone. + + .. note:: + Similar to caught exception objects in Python, explicitly clearing + local references to returned ``ExceptionInfo`` objects can + help the Python interpreter speed up its garbage collection. + + Clearing those references breaks a reference cycle + (``ExceptionInfo`` --> caught exception --> frame stack raising + the exception --> current frame stack --> local variables --> + ``ExceptionInfo``) which makes Python keep all objects referenced + from that cycle (including all local variables in the current + frame) alive until the next cyclic garbage collection run. + More detailed information can be found in the official Python + documentation for :ref:`the try statement `. + """ + __tracebackhide__ = True + + if not expected_exception: + raise ValueError( + f"Expected an exception type or a tuple of exception types, but got `{expected_exception!r}`. " + f"Raising exceptions is already understood as failing the test, so you don't need " + f"any special code to say 'this should never raise an exception'." + ) + if isinstance(expected_exception, type): + expected_exceptions: Tuple[Type[E], ...] = (expected_exception,) + else: + expected_exceptions = expected_exception + for exc in expected_exceptions: + if not isinstance(exc, type) or not issubclass(exc, BaseException): + msg = "expected exception must be a BaseException type, not {}" # type: ignore[unreachable] + not_a = exc.__name__ if isinstance(exc, type) else type(exc).__name__ + raise TypeError(msg.format(not_a)) + + message = f"DID NOT RAISE {expected_exception}" + + if not args: + match: Optional[Union[str, Pattern[str]]] = kwargs.pop("match", None) + if kwargs: + msg = "Unexpected keyword arguments passed to pytest.raises: " + msg += ", ".join(sorted(kwargs)) + msg += "\nUse context-manager form instead?" + raise TypeError(msg) + return RaisesContext(expected_exception, message, match) + else: + func = args[0] + if not callable(func): + raise TypeError(f"{func!r} object (type: {type(func)}) must be callable") + try: + func(*args[1:], **kwargs) + except expected_exception as e: + return _pytest._code.ExceptionInfo.from_exception(e) + fail(message) + + +# This doesn't work with mypy for now. Use fail.Exception instead. +raises.Exception = fail.Exception # type: ignore + + +@final +class RaisesContext(ContextManager[_pytest._code.ExceptionInfo[E]]): + def __init__( + self, + expected_exception: Union[Type[E], Tuple[Type[E], ...]], + message: str, + match_expr: Optional[Union[str, Pattern[str]]] = None, + ) -> None: + self.expected_exception = expected_exception + self.message = message + self.match_expr = match_expr + self.excinfo: Optional[_pytest._code.ExceptionInfo[E]] = None + + def __enter__(self) -> _pytest._code.ExceptionInfo[E]: + self.excinfo = _pytest._code.ExceptionInfo.for_later() + return self.excinfo + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> bool: + __tracebackhide__ = True + if exc_type is None: + fail(self.message) + assert self.excinfo is not None + if not issubclass(exc_type, self.expected_exception): + return False + # Cast to narrow the exception type now that it's verified. + exc_info = cast(Tuple[Type[E], E, TracebackType], (exc_type, exc_val, exc_tb)) + self.excinfo.fill_unfilled(exc_info) + if self.match_expr is not None: + self.excinfo.match(self.match_expr) + return True diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/python_path.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/python_path.py new file mode 100644 index 000000000..cceabbca1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/python_path.py @@ -0,0 +1,24 @@ +import sys + +import pytest +from pytest import Config +from pytest import Parser + + +def pytest_addoption(parser: Parser) -> None: + parser.addini("pythonpath", type="paths", help="Add paths to sys.path", default=[]) + + +@pytest.hookimpl(tryfirst=True) +def pytest_load_initial_conftests(early_config: Config) -> None: + # `pythonpath = a b` will set `sys.path` to `[a, b, x, y, z, ...]` + for path in reversed(early_config.getini("pythonpath")): + sys.path.insert(0, str(path)) + + +@pytest.hookimpl(trylast=True) +def pytest_unconfigure(config: Config) -> None: + for path in config.getini("pythonpath"): + path_str = str(path) + if path_str in sys.path: + sys.path.remove(path_str) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/recwarn.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/recwarn.py new file mode 100644 index 000000000..63e7a4bd6 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/recwarn.py @@ -0,0 +1,366 @@ +# mypy: allow-untyped-defs +"""Record warnings during test function execution.""" + +from pprint import pformat +import re +from types import TracebackType +from typing import Any +from typing import Callable +from typing import final +from typing import Generator +from typing import Iterator +from typing import List +from typing import Optional +from typing import overload +from typing import Pattern +from typing import Tuple +from typing import Type +from typing import TypeVar +from typing import Union +import warnings + +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.outcomes import Exit +from _pytest.outcomes import fail + + +T = TypeVar("T") + + +@fixture +def recwarn() -> Generator["WarningsRecorder", None, None]: + """Return a :class:`WarningsRecorder` instance that records all warnings emitted by test functions. + + See https://docs.pytest.org/en/latest/how-to/capture-warnings.html for information + on warning categories. + """ + wrec = WarningsRecorder(_ispytest=True) + with wrec: + warnings.simplefilter("default") + yield wrec + + +@overload +def deprecated_call( + *, match: Optional[Union[str, Pattern[str]]] = ... +) -> "WarningsRecorder": ... + + +@overload +def deprecated_call(func: Callable[..., T], *args: Any, **kwargs: Any) -> T: ... + + +def deprecated_call( + func: Optional[Callable[..., Any]] = None, *args: Any, **kwargs: Any +) -> Union["WarningsRecorder", Any]: + """Assert that code produces a ``DeprecationWarning`` or ``PendingDeprecationWarning`` or ``FutureWarning``. + + This function can be used as a context manager:: + + >>> import warnings + >>> def api_call_v2(): + ... warnings.warn('use v3 of this api', DeprecationWarning) + ... return 200 + + >>> import pytest + >>> with pytest.deprecated_call(): + ... assert api_call_v2() == 200 + + It can also be used by passing a function and ``*args`` and ``**kwargs``, + in which case it will ensure calling ``func(*args, **kwargs)`` produces one of + the warnings types above. The return value is the return value of the function. + + In the context manager form you may use the keyword argument ``match`` to assert + that the warning matches a text or regex. + + The context manager produces a list of :class:`warnings.WarningMessage` objects, + one for each warning raised. + """ + __tracebackhide__ = True + if func is not None: + args = (func, *args) + return warns( + (DeprecationWarning, PendingDeprecationWarning, FutureWarning), *args, **kwargs + ) + + +@overload +def warns( + expected_warning: Union[Type[Warning], Tuple[Type[Warning], ...]] = ..., + *, + match: Optional[Union[str, Pattern[str]]] = ..., +) -> "WarningsChecker": ... + + +@overload +def warns( + expected_warning: Union[Type[Warning], Tuple[Type[Warning], ...]], + func: Callable[..., T], + *args: Any, + **kwargs: Any, +) -> T: ... + + +def warns( + expected_warning: Union[Type[Warning], Tuple[Type[Warning], ...]] = Warning, + *args: Any, + match: Optional[Union[str, Pattern[str]]] = None, + **kwargs: Any, +) -> Union["WarningsChecker", Any]: + r"""Assert that code raises a particular class of warning. + + Specifically, the parameter ``expected_warning`` can be a warning class or tuple + of warning classes, and the code inside the ``with`` block must issue at least one + warning of that class or classes. + + This helper produces a list of :class:`warnings.WarningMessage` objects, one for + each warning emitted (regardless of whether it is an ``expected_warning`` or not). + Since pytest 8.0, unmatched warnings are also re-emitted when the context closes. + + This function can be used as a context manager:: + + >>> import pytest + >>> with pytest.warns(RuntimeWarning): + ... warnings.warn("my warning", RuntimeWarning) + + In the context manager form you may use the keyword argument ``match`` to assert + that the warning matches a text or regex:: + + >>> with pytest.warns(UserWarning, match='must be 0 or None'): + ... warnings.warn("value must be 0 or None", UserWarning) + + >>> with pytest.warns(UserWarning, match=r'must be \d+$'): + ... warnings.warn("value must be 42", UserWarning) + + >>> with pytest.warns(UserWarning): # catch re-emitted warning + ... with pytest.warns(UserWarning, match=r'must be \d+$'): + ... warnings.warn("this is not here", UserWarning) + Traceback (most recent call last): + ... + Failed: DID NOT WARN. No warnings of type ...UserWarning... were emitted... + + **Using with** ``pytest.mark.parametrize`` + + When using :ref:`pytest.mark.parametrize ref` it is possible to parametrize tests + such that some runs raise a warning and others do not. + + This could be achieved in the same way as with exceptions, see + :ref:`parametrizing_conditional_raising` for an example. + + """ + __tracebackhide__ = True + if not args: + if kwargs: + argnames = ", ".join(sorted(kwargs)) + raise TypeError( + f"Unexpected keyword arguments passed to pytest.warns: {argnames}" + "\nUse context-manager form instead?" + ) + return WarningsChecker(expected_warning, match_expr=match, _ispytest=True) + else: + func = args[0] + if not callable(func): + raise TypeError(f"{func!r} object (type: {type(func)}) must be callable") + with WarningsChecker(expected_warning, _ispytest=True): + return func(*args[1:], **kwargs) + + +class WarningsRecorder(warnings.catch_warnings): # type:ignore[type-arg] + """A context manager to record raised warnings. + + Each recorded warning is an instance of :class:`warnings.WarningMessage`. + + Adapted from `warnings.catch_warnings`. + + .. note:: + ``DeprecationWarning`` and ``PendingDeprecationWarning`` are treated + differently; see :ref:`ensuring_function_triggers`. + + """ + + def __init__(self, *, _ispytest: bool = False) -> None: + check_ispytest(_ispytest) + super().__init__(record=True) + self._entered = False + self._list: List[warnings.WarningMessage] = [] + + @property + def list(self) -> List["warnings.WarningMessage"]: + """The list of recorded warnings.""" + return self._list + + def __getitem__(self, i: int) -> "warnings.WarningMessage": + """Get a recorded warning by index.""" + return self._list[i] + + def __iter__(self) -> Iterator["warnings.WarningMessage"]: + """Iterate through the recorded warnings.""" + return iter(self._list) + + def __len__(self) -> int: + """The number of recorded warnings.""" + return len(self._list) + + def pop(self, cls: Type[Warning] = Warning) -> "warnings.WarningMessage": + """Pop the first recorded warning which is an instance of ``cls``, + but not an instance of a child class of any other match. + Raises ``AssertionError`` if there is no match. + """ + best_idx: Optional[int] = None + for i, w in enumerate(self._list): + if w.category == cls: + return self._list.pop(i) # exact match, stop looking + if issubclass(w.category, cls) and ( + best_idx is None + or not issubclass(w.category, self._list[best_idx].category) + ): + best_idx = i + if best_idx is not None: + return self._list.pop(best_idx) + __tracebackhide__ = True + raise AssertionError(f"{cls!r} not found in warning list") + + def clear(self) -> None: + """Clear the list of recorded warnings.""" + self._list[:] = [] + + # Type ignored because it doesn't exactly warnings.catch_warnings.__enter__ + # -- it returns a List but we only emulate one. + def __enter__(self) -> "WarningsRecorder": # type: ignore + if self._entered: + __tracebackhide__ = True + raise RuntimeError(f"Cannot enter {self!r} twice") + _list = super().__enter__() + # record=True means it's None. + assert _list is not None + self._list = _list + warnings.simplefilter("always") + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + if not self._entered: + __tracebackhide__ = True + raise RuntimeError(f"Cannot exit {self!r} without entering first") + + super().__exit__(exc_type, exc_val, exc_tb) + + # Built-in catch_warnings does not reset entered state so we do it + # manually here for this context manager to become reusable. + self._entered = False + + +@final +class WarningsChecker(WarningsRecorder): + def __init__( + self, + expected_warning: Union[Type[Warning], Tuple[Type[Warning], ...]] = Warning, + match_expr: Optional[Union[str, Pattern[str]]] = None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + super().__init__(_ispytest=True) + + msg = "exceptions must be derived from Warning, not %s" + if isinstance(expected_warning, tuple): + for exc in expected_warning: + if not issubclass(exc, Warning): + raise TypeError(msg % type(exc)) + expected_warning_tup = expected_warning + elif isinstance(expected_warning, type) and issubclass( + expected_warning, Warning + ): + expected_warning_tup = (expected_warning,) + else: + raise TypeError(msg % type(expected_warning)) + + self.expected_warning = expected_warning_tup + self.match_expr = match_expr + + def matches(self, warning: warnings.WarningMessage) -> bool: + assert self.expected_warning is not None + return issubclass(warning.category, self.expected_warning) and bool( + self.match_expr is None or re.search(self.match_expr, str(warning.message)) + ) + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + super().__exit__(exc_type, exc_val, exc_tb) + + __tracebackhide__ = True + + # BaseExceptions like pytest.{skip,fail,xfail,exit} or Ctrl-C within + # pytest.warns should *not* trigger "DID NOT WARN" and get suppressed + # when the warning doesn't happen. Control-flow exceptions should always + # propagate. + if exc_val is not None and ( + not isinstance(exc_val, Exception) + # Exit is an Exception, not a BaseException, for some reason. + or isinstance(exc_val, Exit) + ): + return + + def found_str() -> str: + return pformat([record.message for record in self], indent=2) + + try: + if not any(issubclass(w.category, self.expected_warning) for w in self): + fail( + f"DID NOT WARN. No warnings of type {self.expected_warning} were emitted.\n" + f" Emitted warnings: {found_str()}." + ) + elif not any(self.matches(w) for w in self): + fail( + f"DID NOT WARN. No warnings of type {self.expected_warning} matching the regex were emitted.\n" + f" Regex: {self.match_expr}\n" + f" Emitted warnings: {found_str()}." + ) + finally: + # Whether or not any warnings matched, we want to re-emit all unmatched warnings. + for w in self: + if not self.matches(w): + warnings.warn_explicit( + message=w.message, + category=w.category, + filename=w.filename, + lineno=w.lineno, + module=w.__module__, + source=w.source, + ) + + # Currently in Python it is possible to pass other types than an + # `str` message when creating `Warning` instances, however this + # causes an exception when :func:`warnings.filterwarnings` is used + # to filter those warnings. See + # https://github.com/python/cpython/issues/103577 for a discussion. + # While this can be considered a bug in CPython, we put guards in + # pytest as the error message produced without this check in place + # is confusing (#10865). + for w in self: + if type(w.message) is not UserWarning: + # If the warning was of an incorrect type then `warnings.warn()` + # creates a UserWarning. Any other warning must have been specified + # explicitly. + continue + if not w.message.args: + # UserWarning() without arguments must have been specified explicitly. + continue + msg = w.message.args[0] + if isinstance(msg, str): + continue + # It's possible that UserWarning was explicitly specified, and + # its first argument was not a string. But that case can't be + # distinguished from an invalid type. + raise TypeError( + f"Warning must be str or Warning, got {msg!r} (type {type(msg).__name__})" + ) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/reports.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/reports.py new file mode 100644 index 000000000..70f3212ce --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/reports.py @@ -0,0 +1,621 @@ +# mypy: allow-untyped-defs +import dataclasses +from io import StringIO +import os +from pprint import pprint +from typing import Any +from typing import cast +from typing import Dict +from typing import final +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Literal +from typing import Mapping +from typing import NoReturn +from typing import Optional +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from _pytest._code.code import ExceptionChainRepr +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import ExceptionRepr +from _pytest._code.code import ReprEntry +from _pytest._code.code import ReprEntryNative +from _pytest._code.code import ReprExceptionInfo +from _pytest._code.code import ReprFileLocation +from _pytest._code.code import ReprFuncArgs +from _pytest._code.code import ReprLocals +from _pytest._code.code import ReprTraceback +from _pytest._code.code import TerminalRepr +from _pytest._io import TerminalWriter +from _pytest.config import Config +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.outcomes import skip + + +if TYPE_CHECKING: + from _pytest.runner import CallInfo + + +def getworkerinfoline(node): + try: + return node._workerinfocache + except AttributeError: + d = node.workerinfo + ver = "{}.{}.{}".format(*d["version_info"][:3]) + node._workerinfocache = s = "[{}] {} -- Python {} {}".format( + d["id"], d["sysplatform"], ver, d["executable"] + ) + return s + + +_R = TypeVar("_R", bound="BaseReport") + + +class BaseReport: + when: Optional[str] + location: Optional[Tuple[str, Optional[int], str]] + longrepr: Union[ + None, ExceptionInfo[BaseException], Tuple[str, int, str], str, TerminalRepr + ] + sections: List[Tuple[str, str]] + nodeid: str + outcome: Literal["passed", "failed", "skipped"] + + def __init__(self, **kw: Any) -> None: + self.__dict__.update(kw) + + if TYPE_CHECKING: + # Can have arbitrary fields given to __init__(). + def __getattr__(self, key: str) -> Any: ... + + def toterminal(self, out: TerminalWriter) -> None: + if hasattr(self, "node"): + worker_info = getworkerinfoline(self.node) + if worker_info: + out.line(worker_info) + + longrepr = self.longrepr + if longrepr is None: + return + + if hasattr(longrepr, "toterminal"): + longrepr_terminal = cast(TerminalRepr, longrepr) + longrepr_terminal.toterminal(out) + else: + try: + s = str(longrepr) + except UnicodeEncodeError: + s = "" + out.line(s) + + def get_sections(self, prefix: str) -> Iterator[Tuple[str, str]]: + for name, content in self.sections: + if name.startswith(prefix): + yield prefix, content + + @property + def longreprtext(self) -> str: + """Read-only property that returns the full string representation of + ``longrepr``. + + .. versionadded:: 3.0 + """ + file = StringIO() + tw = TerminalWriter(file) + tw.hasmarkup = False + self.toterminal(tw) + exc = file.getvalue() + return exc.strip() + + @property + def caplog(self) -> str: + """Return captured log lines, if log capturing is enabled. + + .. versionadded:: 3.5 + """ + return "\n".join( + content for (prefix, content) in self.get_sections("Captured log") + ) + + @property + def capstdout(self) -> str: + """Return captured text from stdout, if capturing is enabled. + + .. versionadded:: 3.0 + """ + return "".join( + content for (prefix, content) in self.get_sections("Captured stdout") + ) + + @property + def capstderr(self) -> str: + """Return captured text from stderr, if capturing is enabled. + + .. versionadded:: 3.0 + """ + return "".join( + content for (prefix, content) in self.get_sections("Captured stderr") + ) + + @property + def passed(self) -> bool: + """Whether the outcome is passed.""" + return self.outcome == "passed" + + @property + def failed(self) -> bool: + """Whether the outcome is failed.""" + return self.outcome == "failed" + + @property + def skipped(self) -> bool: + """Whether the outcome is skipped.""" + return self.outcome == "skipped" + + @property + def fspath(self) -> str: + """The path portion of the reported node, as a string.""" + return self.nodeid.split("::")[0] + + @property + def count_towards_summary(self) -> bool: + """**Experimental** Whether this report should be counted towards the + totals shown at the end of the test session: "1 passed, 1 failure, etc". + + .. note:: + + This function is considered **experimental**, so beware that it is subject to changes + even in patch releases. + """ + return True + + @property + def head_line(self) -> Optional[str]: + """**Experimental** The head line shown with longrepr output for this + report, more commonly during traceback representation during + failures:: + + ________ Test.foo ________ + + + In the example above, the head_line is "Test.foo". + + .. note:: + + This function is considered **experimental**, so beware that it is subject to changes + even in patch releases. + """ + if self.location is not None: + fspath, lineno, domain = self.location + return domain + return None + + def _get_verbose_word(self, config: Config): + _category, _short, verbose = config.hook.pytest_report_teststatus( + report=self, config=config + ) + return verbose + + def _to_json(self) -> Dict[str, Any]: + """Return the contents of this report as a dict of builtin entries, + suitable for serialization. + + This was originally the serialize_report() function from xdist (ca03269). + + Experimental method. + """ + return _report_to_json(self) + + @classmethod + def _from_json(cls: Type[_R], reportdict: Dict[str, object]) -> _R: + """Create either a TestReport or CollectReport, depending on the calling class. + + It is the callers responsibility to know which class to pass here. + + This was originally the serialize_report() function from xdist (ca03269). + + Experimental method. + """ + kwargs = _report_kwargs_from_json(reportdict) + return cls(**kwargs) + + +def _report_unserialization_failure( + type_name: str, report_class: Type[BaseReport], reportdict +) -> NoReturn: + url = "https://github.com/pytest-dev/pytest/issues" + stream = StringIO() + pprint("-" * 100, stream=stream) + pprint("INTERNALERROR: Unknown entry type returned: %s" % type_name, stream=stream) + pprint("report_name: %s" % report_class, stream=stream) + pprint(reportdict, stream=stream) + pprint("Please report this bug at %s" % url, stream=stream) + pprint("-" * 100, stream=stream) + raise RuntimeError(stream.getvalue()) + + +@final +class TestReport(BaseReport): + """Basic test report object (also used for setup and teardown calls if + they fail). + + Reports can contain arbitrary extra attributes. + """ + + __test__ = False + # Defined by skipping plugin. + # xfail reason if xfailed, otherwise not defined. Use hasattr to distinguish. + wasxfail: str + + def __init__( + self, + nodeid: str, + location: Tuple[str, Optional[int], str], + keywords: Mapping[str, Any], + outcome: Literal["passed", "failed", "skipped"], + longrepr: Union[ + None, ExceptionInfo[BaseException], Tuple[str, int, str], str, TerminalRepr + ], + when: Literal["setup", "call", "teardown"], + sections: Iterable[Tuple[str, str]] = (), + duration: float = 0, + start: float = 0, + stop: float = 0, + user_properties: Optional[Iterable[Tuple[str, object]]] = None, + **extra, + ) -> None: + #: Normalized collection nodeid. + self.nodeid = nodeid + + #: A (filesystempath, lineno, domaininfo) tuple indicating the + #: actual location of a test item - it might be different from the + #: collected one e.g. if a method is inherited from a different module. + #: The filesystempath may be relative to ``config.rootdir``. + #: The line number is 0-based. + self.location: Tuple[str, Optional[int], str] = location + + #: A name -> value dictionary containing all keywords and + #: markers associated with a test invocation. + self.keywords: Mapping[str, Any] = keywords + + #: Test outcome, always one of "passed", "failed", "skipped". + self.outcome = outcome + + #: None or a failure representation. + self.longrepr = longrepr + + #: One of 'setup', 'call', 'teardown' to indicate runtest phase. + self.when = when + + #: User properties is a list of tuples (name, value) that holds user + #: defined properties of the test. + self.user_properties = list(user_properties or []) + + #: Tuples of str ``(heading, content)`` with extra information + #: for the test report. Used by pytest to add text captured + #: from ``stdout``, ``stderr``, and intercepted logging events. May + #: be used by other plugins to add arbitrary information to reports. + self.sections = list(sections) + + #: Time it took to run just the test. + self.duration: float = duration + + #: The system time when the call started, in seconds since the epoch. + self.start: float = start + #: The system time when the call ended, in seconds since the epoch. + self.stop: float = stop + + self.__dict__.update(extra) + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.nodeid!r} when={self.when!r} outcome={self.outcome!r}>" + + @classmethod + def from_item_and_call(cls, item: Item, call: "CallInfo[None]") -> "TestReport": + """Create and fill a TestReport with standard item and call info. + + :param item: The item. + :param call: The call info. + """ + when = call.when + # Remove "collect" from the Literal type -- only for collection calls. + assert when != "collect" + duration = call.duration + start = call.start + stop = call.stop + keywords = {x: 1 for x in item.keywords} + excinfo = call.excinfo + sections = [] + if not call.excinfo: + outcome: Literal["passed", "failed", "skipped"] = "passed" + longrepr: Union[ + None, + ExceptionInfo[BaseException], + Tuple[str, int, str], + str, + TerminalRepr, + ] = None + else: + if not isinstance(excinfo, ExceptionInfo): + outcome = "failed" + longrepr = excinfo + elif isinstance(excinfo.value, skip.Exception): + outcome = "skipped" + r = excinfo._getreprcrash() + assert ( + r is not None + ), "There should always be a traceback entry for skipping a test." + if excinfo.value._use_item_location: + path, line = item.reportinfo()[:2] + assert line is not None + longrepr = os.fspath(path), line + 1, r.message + else: + longrepr = (str(r.path), r.lineno, r.message) + else: + outcome = "failed" + if call.when == "call": + longrepr = item.repr_failure(excinfo) + else: # exception in setup or teardown + longrepr = item._repr_failure_py( + excinfo, style=item.config.getoption("tbstyle", "auto") + ) + for rwhen, key, content in item._report_sections: + sections.append((f"Captured {key} {rwhen}", content)) + return cls( + item.nodeid, + item.location, + keywords, + outcome, + longrepr, + when, + sections, + duration, + start, + stop, + user_properties=item.user_properties, + ) + + +@final +class CollectReport(BaseReport): + """Collection report object. + + Reports can contain arbitrary extra attributes. + """ + + when = "collect" + + def __init__( + self, + nodeid: str, + outcome: "Literal['passed', 'failed', 'skipped']", + longrepr: Union[ + None, ExceptionInfo[BaseException], Tuple[str, int, str], str, TerminalRepr + ], + result: Optional[List[Union[Item, Collector]]], + sections: Iterable[Tuple[str, str]] = (), + **extra, + ) -> None: + #: Normalized collection nodeid. + self.nodeid = nodeid + + #: Test outcome, always one of "passed", "failed", "skipped". + self.outcome = outcome + + #: None or a failure representation. + self.longrepr = longrepr + + #: The collected items and collection nodes. + self.result = result or [] + + #: Tuples of str ``(heading, content)`` with extra information + #: for the test report. Used by pytest to add text captured + #: from ``stdout``, ``stderr``, and intercepted logging events. May + #: be used by other plugins to add arbitrary information to reports. + self.sections = list(sections) + + self.__dict__.update(extra) + + @property + def location( # type:ignore[override] + self, + ) -> Optional[Tuple[str, Optional[int], str]]: + return (self.fspath, None, self.fspath) + + def __repr__(self) -> str: + return f"" + + +class CollectErrorRepr(TerminalRepr): + def __init__(self, msg: str) -> None: + self.longrepr = msg + + def toterminal(self, out: TerminalWriter) -> None: + out.line(self.longrepr, red=True) + + +def pytest_report_to_serializable( + report: Union[CollectReport, TestReport], +) -> Optional[Dict[str, Any]]: + if isinstance(report, (TestReport, CollectReport)): + data = report._to_json() + data["$report_type"] = report.__class__.__name__ + return data + # TODO: Check if this is actually reachable. + return None # type: ignore[unreachable] + + +def pytest_report_from_serializable( + data: Dict[str, Any], +) -> Optional[Union[CollectReport, TestReport]]: + if "$report_type" in data: + if data["$report_type"] == "TestReport": + return TestReport._from_json(data) + elif data["$report_type"] == "CollectReport": + return CollectReport._from_json(data) + assert False, "Unknown report_type unserialize data: {}".format( + data["$report_type"] + ) + return None + + +def _report_to_json(report: BaseReport) -> Dict[str, Any]: + """Return the contents of this report as a dict of builtin entries, + suitable for serialization. + + This was originally the serialize_report() function from xdist (ca03269). + """ + + def serialize_repr_entry( + entry: Union[ReprEntry, ReprEntryNative], + ) -> Dict[str, Any]: + data = dataclasses.asdict(entry) + for key, value in data.items(): + if hasattr(value, "__dict__"): + data[key] = dataclasses.asdict(value) + entry_data = {"type": type(entry).__name__, "data": data} + return entry_data + + def serialize_repr_traceback(reprtraceback: ReprTraceback) -> Dict[str, Any]: + result = dataclasses.asdict(reprtraceback) + result["reprentries"] = [ + serialize_repr_entry(x) for x in reprtraceback.reprentries + ] + return result + + def serialize_repr_crash( + reprcrash: Optional[ReprFileLocation], + ) -> Optional[Dict[str, Any]]: + if reprcrash is not None: + return dataclasses.asdict(reprcrash) + else: + return None + + def serialize_exception_longrepr(rep: BaseReport) -> Dict[str, Any]: + assert rep.longrepr is not None + # TODO: Investigate whether the duck typing is really necessary here. + longrepr = cast(ExceptionRepr, rep.longrepr) + result: Dict[str, Any] = { + "reprcrash": serialize_repr_crash(longrepr.reprcrash), + "reprtraceback": serialize_repr_traceback(longrepr.reprtraceback), + "sections": longrepr.sections, + } + if isinstance(longrepr, ExceptionChainRepr): + result["chain"] = [] + for repr_traceback, repr_crash, description in longrepr.chain: + result["chain"].append( + ( + serialize_repr_traceback(repr_traceback), + serialize_repr_crash(repr_crash), + description, + ) + ) + else: + result["chain"] = None + return result + + d = report.__dict__.copy() + if hasattr(report.longrepr, "toterminal"): + if hasattr(report.longrepr, "reprtraceback") and hasattr( + report.longrepr, "reprcrash" + ): + d["longrepr"] = serialize_exception_longrepr(report) + else: + d["longrepr"] = str(report.longrepr) + else: + d["longrepr"] = report.longrepr + for name in d: + if isinstance(d[name], os.PathLike): + d[name] = os.fspath(d[name]) + elif name == "result": + d[name] = None # for now + return d + + +def _report_kwargs_from_json(reportdict: Dict[str, Any]) -> Dict[str, Any]: + """Return **kwargs that can be used to construct a TestReport or + CollectReport instance. + + This was originally the serialize_report() function from xdist (ca03269). + """ + + def deserialize_repr_entry(entry_data): + data = entry_data["data"] + entry_type = entry_data["type"] + if entry_type == "ReprEntry": + reprfuncargs = None + reprfileloc = None + reprlocals = None + if data["reprfuncargs"]: + reprfuncargs = ReprFuncArgs(**data["reprfuncargs"]) + if data["reprfileloc"]: + reprfileloc = ReprFileLocation(**data["reprfileloc"]) + if data["reprlocals"]: + reprlocals = ReprLocals(data["reprlocals"]["lines"]) + + reprentry: Union[ReprEntry, ReprEntryNative] = ReprEntry( + lines=data["lines"], + reprfuncargs=reprfuncargs, + reprlocals=reprlocals, + reprfileloc=reprfileloc, + style=data["style"], + ) + elif entry_type == "ReprEntryNative": + reprentry = ReprEntryNative(data["lines"]) + else: + _report_unserialization_failure(entry_type, TestReport, reportdict) + return reprentry + + def deserialize_repr_traceback(repr_traceback_dict): + repr_traceback_dict["reprentries"] = [ + deserialize_repr_entry(x) for x in repr_traceback_dict["reprentries"] + ] + return ReprTraceback(**repr_traceback_dict) + + def deserialize_repr_crash(repr_crash_dict: Optional[Dict[str, Any]]): + if repr_crash_dict is not None: + return ReprFileLocation(**repr_crash_dict) + else: + return None + + if ( + reportdict["longrepr"] + and "reprcrash" in reportdict["longrepr"] + and "reprtraceback" in reportdict["longrepr"] + ): + reprtraceback = deserialize_repr_traceback( + reportdict["longrepr"]["reprtraceback"] + ) + reprcrash = deserialize_repr_crash(reportdict["longrepr"]["reprcrash"]) + if reportdict["longrepr"]["chain"]: + chain = [] + for repr_traceback_data, repr_crash_data, description in reportdict[ + "longrepr" + ]["chain"]: + chain.append( + ( + deserialize_repr_traceback(repr_traceback_data), + deserialize_repr_crash(repr_crash_data), + description, + ) + ) + exception_info: Union[ExceptionChainRepr, ReprExceptionInfo] = ( + ExceptionChainRepr(chain) + ) + else: + exception_info = ReprExceptionInfo( + reprtraceback=reprtraceback, + reprcrash=reprcrash, + ) + + for section in reportdict["longrepr"]["sections"]: + exception_info.addsection(*section) + reportdict["longrepr"] = exception_info + + return reportdict diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/runner.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/runner.py new file mode 100644 index 000000000..9bc544ea7 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/runner.py @@ -0,0 +1,569 @@ +# mypy: allow-untyped-defs +"""Basic collect and runtest protocol implementations.""" + +import bdb +import dataclasses +import os +import sys +from typing import Callable +from typing import cast +from typing import Dict +from typing import final +from typing import Generic +from typing import List +from typing import Literal +from typing import Optional +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from .reports import BaseReport +from .reports import CollectErrorRepr +from .reports import CollectReport +from .reports import TestReport +from _pytest import timing +from _pytest._code.code import ExceptionChainRepr +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import TerminalRepr +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.nodes import Collector +from _pytest.nodes import Directory +from _pytest.nodes import Item +from _pytest.nodes import Node +from _pytest.outcomes import Exit +from _pytest.outcomes import OutcomeException +from _pytest.outcomes import Skipped +from _pytest.outcomes import TEST_OUTCOME + + +if sys.version_info < (3, 11): + from exceptiongroup import BaseExceptionGroup + +if TYPE_CHECKING: + from _pytest.main import Session + from _pytest.terminal import TerminalReporter + +# +# pytest plugin hooks. + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("terminal reporting", "Reporting", after="general") + group.addoption( + "--durations", + action="store", + type=int, + default=None, + metavar="N", + help="Show N slowest setup/test durations (N=0 for all)", + ) + group.addoption( + "--durations-min", + action="store", + type=float, + default=0.005, + metavar="N", + help="Minimal duration in seconds for inclusion in slowest list. " + "Default: 0.005.", + ) + + +def pytest_terminal_summary(terminalreporter: "TerminalReporter") -> None: + durations = terminalreporter.config.option.durations + durations_min = terminalreporter.config.option.durations_min + verbose = terminalreporter.config.getvalue("verbose") + if durations is None: + return + tr = terminalreporter + dlist = [] + for replist in tr.stats.values(): + for rep in replist: + if hasattr(rep, "duration"): + dlist.append(rep) + if not dlist: + return + dlist.sort(key=lambda x: x.duration, reverse=True) + if not durations: + tr.write_sep("=", "slowest durations") + else: + tr.write_sep("=", "slowest %s durations" % durations) + dlist = dlist[:durations] + + for i, rep in enumerate(dlist): + if verbose < 2 and rep.duration < durations_min: + tr.write_line("") + tr.write_line( + f"({len(dlist) - i} durations < {durations_min:g}s hidden. Use -vv to show these durations.)" + ) + break + tr.write_line(f"{rep.duration:02.2f}s {rep.when:<8} {rep.nodeid}") + + +def pytest_sessionstart(session: "Session") -> None: + session._setupstate = SetupState() + + +def pytest_sessionfinish(session: "Session") -> None: + session._setupstate.teardown_exact(None) + + +def pytest_runtest_protocol(item: Item, nextitem: Optional[Item]) -> bool: + ihook = item.ihook + ihook.pytest_runtest_logstart(nodeid=item.nodeid, location=item.location) + runtestprotocol(item, nextitem=nextitem) + ihook.pytest_runtest_logfinish(nodeid=item.nodeid, location=item.location) + return True + + +def runtestprotocol( + item: Item, log: bool = True, nextitem: Optional[Item] = None +) -> List[TestReport]: + hasrequest = hasattr(item, "_request") + if hasrequest and not item._request: # type: ignore[attr-defined] + # This only happens if the item is re-run, as is done by + # pytest-rerunfailures. + item._initrequest() # type: ignore[attr-defined] + rep = call_and_report(item, "setup", log) + reports = [rep] + if rep.passed: + if item.config.getoption("setupshow", False): + show_test_item(item) + if not item.config.getoption("setuponly", False): + reports.append(call_and_report(item, "call", log)) + reports.append(call_and_report(item, "teardown", log, nextitem=nextitem)) + # After all teardown hooks have been called + # want funcargs and request info to go away. + if hasrequest: + item._request = False # type: ignore[attr-defined] + item.funcargs = None # type: ignore[attr-defined] + return reports + + +def show_test_item(item: Item) -> None: + """Show test function, parameters and the fixtures of the test item.""" + tw = item.config.get_terminal_writer() + tw.line() + tw.write(" " * 8) + tw.write(item.nodeid) + used_fixtures = sorted(getattr(item, "fixturenames", [])) + if used_fixtures: + tw.write(" (fixtures used: {})".format(", ".join(used_fixtures))) + tw.flush() + + +def pytest_runtest_setup(item: Item) -> None: + _update_current_test_var(item, "setup") + item.session._setupstate.setup(item) + + +def pytest_runtest_call(item: Item) -> None: + _update_current_test_var(item, "call") + try: + del sys.last_type + del sys.last_value + del sys.last_traceback + if sys.version_info >= (3, 12, 0): + del sys.last_exc # type: ignore[attr-defined] + except AttributeError: + pass + try: + item.runtest() + except Exception as e: + # Store trace info to allow postmortem debugging + sys.last_type = type(e) + sys.last_value = e + if sys.version_info >= (3, 12, 0): + sys.last_exc = e # type: ignore[attr-defined] + assert e.__traceback__ is not None + # Skip *this* frame + sys.last_traceback = e.__traceback__.tb_next + raise e + + +def pytest_runtest_teardown(item: Item, nextitem: Optional[Item]) -> None: + _update_current_test_var(item, "teardown") + item.session._setupstate.teardown_exact(nextitem) + _update_current_test_var(item, None) + + +def _update_current_test_var( + item: Item, when: Optional[Literal["setup", "call", "teardown"]] +) -> None: + """Update :envvar:`PYTEST_CURRENT_TEST` to reflect the current item and stage. + + If ``when`` is None, delete ``PYTEST_CURRENT_TEST`` from the environment. + """ + var_name = "PYTEST_CURRENT_TEST" + if when: + value = f"{item.nodeid} ({when})" + # don't allow null bytes on environment variables (see #2644, #2957) + value = value.replace("\x00", "(null)") + os.environ[var_name] = value + else: + os.environ.pop(var_name) + + +def pytest_report_teststatus(report: BaseReport) -> Optional[Tuple[str, str, str]]: + if report.when in ("setup", "teardown"): + if report.failed: + # category, shortletter, verbose-word + return "error", "E", "ERROR" + elif report.skipped: + return "skipped", "s", "SKIPPED" + else: + return "", "", "" + return None + + +# +# Implementation + + +def call_and_report( + item: Item, when: Literal["setup", "call", "teardown"], log: bool = True, **kwds +) -> TestReport: + ihook = item.ihook + if when == "setup": + runtest_hook: Callable[..., None] = ihook.pytest_runtest_setup + elif when == "call": + runtest_hook = ihook.pytest_runtest_call + elif when == "teardown": + runtest_hook = ihook.pytest_runtest_teardown + else: + assert False, f"Unhandled runtest hook case: {when}" + reraise: Tuple[Type[BaseException], ...] = (Exit,) + if not item.config.getoption("usepdb", False): + reraise += (KeyboardInterrupt,) + call = CallInfo.from_call( + lambda: runtest_hook(item=item, **kwds), when=when, reraise=reraise + ) + report: TestReport = ihook.pytest_runtest_makereport(item=item, call=call) + if log: + ihook.pytest_runtest_logreport(report=report) + if check_interactive_exception(call, report): + ihook.pytest_exception_interact(node=item, call=call, report=report) + return report + + +def check_interactive_exception(call: "CallInfo[object]", report: BaseReport) -> bool: + """Check whether the call raised an exception that should be reported as + interactive.""" + if call.excinfo is None: + # Didn't raise. + return False + if hasattr(report, "wasxfail"): + # Exception was expected. + return False + if isinstance(call.excinfo.value, (Skipped, bdb.BdbQuit)): + # Special control flow exception. + return False + return True + + +TResult = TypeVar("TResult", covariant=True) + + +@final +@dataclasses.dataclass +class CallInfo(Generic[TResult]): + """Result/Exception info of a function invocation.""" + + _result: Optional[TResult] + #: The captured exception of the call, if it raised. + excinfo: Optional[ExceptionInfo[BaseException]] + #: The system time when the call started, in seconds since the epoch. + start: float + #: The system time when the call ended, in seconds since the epoch. + stop: float + #: The call duration, in seconds. + duration: float + #: The context of invocation: "collect", "setup", "call" or "teardown". + when: Literal["collect", "setup", "call", "teardown"] + + def __init__( + self, + result: Optional[TResult], + excinfo: Optional[ExceptionInfo[BaseException]], + start: float, + stop: float, + duration: float, + when: Literal["collect", "setup", "call", "teardown"], + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self._result = result + self.excinfo = excinfo + self.start = start + self.stop = stop + self.duration = duration + self.when = when + + @property + def result(self) -> TResult: + """The return value of the call, if it didn't raise. + + Can only be accessed if excinfo is None. + """ + if self.excinfo is not None: + raise AttributeError(f"{self!r} has no valid result") + # The cast is safe because an exception wasn't raised, hence + # _result has the expected function return type (which may be + # None, that's why a cast and not an assert). + return cast(TResult, self._result) + + @classmethod + def from_call( + cls, + func: Callable[[], TResult], + when: Literal["collect", "setup", "call", "teardown"], + reraise: Optional[ + Union[Type[BaseException], Tuple[Type[BaseException], ...]] + ] = None, + ) -> "CallInfo[TResult]": + """Call func, wrapping the result in a CallInfo. + + :param func: + The function to call. Called without arguments. + :param when: + The phase in which the function is called. + :param reraise: + Exception or exceptions that shall propagate if raised by the + function, instead of being wrapped in the CallInfo. + """ + excinfo = None + start = timing.time() + precise_start = timing.perf_counter() + try: + result: Optional[TResult] = func() + except BaseException: + excinfo = ExceptionInfo.from_current() + if reraise is not None and isinstance(excinfo.value, reraise): + raise + result = None + # use the perf counter + precise_stop = timing.perf_counter() + duration = precise_stop - precise_start + stop = timing.time() + return cls( + start=start, + stop=stop, + duration=duration, + when=when, + result=result, + excinfo=excinfo, + _ispytest=True, + ) + + def __repr__(self) -> str: + if self.excinfo is None: + return f"" + return f"" + + +def pytest_runtest_makereport(item: Item, call: CallInfo[None]) -> TestReport: + return TestReport.from_item_and_call(item, call) + + +def pytest_make_collect_report(collector: Collector) -> CollectReport: + def collect() -> List[Union[Item, Collector]]: + # Before collecting, if this is a Directory, load the conftests. + # If a conftest import fails to load, it is considered a collection + # error of the Directory collector. This is why it's done inside of the + # CallInfo wrapper. + # + # Note: initial conftests are loaded early, not here. + if isinstance(collector, Directory): + collector.config.pluginmanager._loadconftestmodules( + collector.path, + collector.config.getoption("importmode"), + rootpath=collector.config.rootpath, + consider_namespace_packages=collector.config.getini( + "consider_namespace_packages" + ), + ) + + return list(collector.collect()) + + call = CallInfo.from_call(collect, "collect") + longrepr: Union[None, Tuple[str, int, str], str, TerminalRepr] = None + if not call.excinfo: + outcome: Literal["passed", "skipped", "failed"] = "passed" + else: + skip_exceptions = [Skipped] + unittest = sys.modules.get("unittest") + if unittest is not None: + skip_exceptions.append(unittest.SkipTest) + if isinstance(call.excinfo.value, tuple(skip_exceptions)): + outcome = "skipped" + r_ = collector._repr_failure_py(call.excinfo, "line") + assert isinstance(r_, ExceptionChainRepr), repr(r_) + r = r_.reprcrash + assert r + longrepr = (str(r.path), r.lineno, r.message) + else: + outcome = "failed" + errorinfo = collector.repr_failure(call.excinfo) + if not hasattr(errorinfo, "toterminal"): + assert isinstance(errorinfo, str) + errorinfo = CollectErrorRepr(errorinfo) + longrepr = errorinfo + result = call.result if not call.excinfo else None + rep = CollectReport(collector.nodeid, outcome, longrepr, result) + rep.call = call # type: ignore # see collect_one_node + return rep + + +class SetupState: + """Shared state for setting up/tearing down test items or collectors + in a session. + + Suppose we have a collection tree as follows: + + + + + + + + The SetupState maintains a stack. The stack starts out empty: + + [] + + During the setup phase of item1, setup(item1) is called. What it does + is: + + push session to stack, run session.setup() + push mod1 to stack, run mod1.setup() + push item1 to stack, run item1.setup() + + The stack is: + + [session, mod1, item1] + + While the stack is in this shape, it is allowed to add finalizers to + each of session, mod1, item1 using addfinalizer(). + + During the teardown phase of item1, teardown_exact(item2) is called, + where item2 is the next item to item1. What it does is: + + pop item1 from stack, run its teardowns + pop mod1 from stack, run its teardowns + + mod1 was popped because it ended its purpose with item1. The stack is: + + [session] + + During the setup phase of item2, setup(item2) is called. What it does + is: + + push mod2 to stack, run mod2.setup() + push item2 to stack, run item2.setup() + + Stack: + + [session, mod2, item2] + + During the teardown phase of item2, teardown_exact(None) is called, + because item2 is the last item. What it does is: + + pop item2 from stack, run its teardowns + pop mod2 from stack, run its teardowns + pop session from stack, run its teardowns + + Stack: + + [] + + The end! + """ + + def __init__(self) -> None: + # The stack is in the dict insertion order. + self.stack: Dict[ + Node, + Tuple[ + # Node's finalizers. + List[Callable[[], object]], + # Node's exception, if its setup raised. + Optional[Union[OutcomeException, Exception]], + ], + ] = {} + + def setup(self, item: Item) -> None: + """Setup objects along the collector chain to the item.""" + needed_collectors = item.listchain() + + # If a collector fails its setup, fail its entire subtree of items. + # The setup is not retried for each item - the same exception is used. + for col, (finalizers, exc) in self.stack.items(): + assert col in needed_collectors, "previous item was not torn down properly" + if exc: + raise exc + + for col in needed_collectors[len(self.stack) :]: + assert col not in self.stack + # Push onto the stack. + self.stack[col] = ([col.teardown], None) + try: + col.setup() + except TEST_OUTCOME as exc: + self.stack[col] = (self.stack[col][0], exc) + raise exc + + def addfinalizer(self, finalizer: Callable[[], object], node: Node) -> None: + """Attach a finalizer to the given node. + + The node must be currently active in the stack. + """ + assert node and not isinstance(node, tuple) + assert callable(finalizer) + assert node in self.stack, (node, self.stack) + self.stack[node][0].append(finalizer) + + def teardown_exact(self, nextitem: Optional[Item]) -> None: + """Teardown the current stack up until reaching nodes that nextitem + also descends from. + + When nextitem is None (meaning we're at the last item), the entire + stack is torn down. + """ + needed_collectors = nextitem and nextitem.listchain() or [] + exceptions: List[BaseException] = [] + while self.stack: + if list(self.stack.keys()) == needed_collectors[: len(self.stack)]: + break + node, (finalizers, _) = self.stack.popitem() + these_exceptions = [] + while finalizers: + fin = finalizers.pop() + try: + fin() + except TEST_OUTCOME as e: + these_exceptions.append(e) + + if len(these_exceptions) == 1: + exceptions.extend(these_exceptions) + elif these_exceptions: + msg = f"errors while tearing down {node!r}" + exceptions.append(BaseExceptionGroup(msg, these_exceptions[::-1])) + + if len(exceptions) == 1: + raise exceptions[0] + elif exceptions: + raise BaseExceptionGroup("errors during test teardown", exceptions[::-1]) + if nextitem is None: + assert not self.stack + + +def collect_one_node(collector: Collector) -> CollectReport: + ihook = collector.ihook + ihook.pytest_collectstart(collector=collector) + rep: CollectReport = ihook.pytest_make_collect_report(collector=collector) + call = rep.__dict__.pop("call", None) + if call and check_interactive_exception(call, rep): + ihook.pytest_exception_interact(node=collector, call=call, report=rep) + return rep diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/scope.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/scope.py new file mode 100644 index 000000000..2c6e23208 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/scope.py @@ -0,0 +1,90 @@ +""" +Scope definition and related utilities. + +Those are defined here, instead of in the 'fixtures' module because +their use is spread across many other pytest modules, and centralizing it in 'fixtures' +would cause circular references. + +Also this makes the module light to import, as it should. +""" + +from enum import Enum +from functools import total_ordering +from typing import Literal +from typing import Optional + + +_ScopeName = Literal["session", "package", "module", "class", "function"] + + +@total_ordering +class Scope(Enum): + """ + Represents one of the possible fixture scopes in pytest. + + Scopes are ordered from lower to higher, that is: + + ->>> higher ->>> + + Function < Class < Module < Package < Session + + <<<- lower <<<- + """ + + # Scopes need to be listed from lower to higher. + Function: _ScopeName = "function" + Class: _ScopeName = "class" + Module: _ScopeName = "module" + Package: _ScopeName = "package" + Session: _ScopeName = "session" + + def next_lower(self) -> "Scope": + """Return the next lower scope.""" + index = _SCOPE_INDICES[self] + if index == 0: + raise ValueError(f"{self} is the lower-most scope") + return _ALL_SCOPES[index - 1] + + def next_higher(self) -> "Scope": + """Return the next higher scope.""" + index = _SCOPE_INDICES[self] + if index == len(_SCOPE_INDICES) - 1: + raise ValueError(f"{self} is the upper-most scope") + return _ALL_SCOPES[index + 1] + + def __lt__(self, other: "Scope") -> bool: + self_index = _SCOPE_INDICES[self] + other_index = _SCOPE_INDICES[other] + return self_index < other_index + + @classmethod + def from_user( + cls, scope_name: _ScopeName, descr: str, where: Optional[str] = None + ) -> "Scope": + """ + Given a scope name from the user, return the equivalent Scope enum. Should be used + whenever we want to convert a user provided scope name to its enum object. + + If the scope name is invalid, construct a user friendly message and call pytest.fail. + """ + from _pytest.outcomes import fail + + try: + # Holding this reference is necessary for mypy at the moment. + scope = Scope(scope_name) + except ValueError: + fail( + "{} {}got an unexpected scope value '{}'".format( + descr, f"from {where} " if where else "", scope_name + ), + pytrace=False, + ) + return scope + + +_ALL_SCOPES = list(Scope) +_SCOPE_INDICES = {scope: index for index, scope in enumerate(_ALL_SCOPES)} + + +# Ordered list of scopes which can contain many tests (in practice all except Function). +HIGH_SCOPES = [x for x in Scope if x is not Scope.Function] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/setuponly.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/setuponly.py new file mode 100644 index 000000000..39ab28b46 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/setuponly.py @@ -0,0 +1,102 @@ +from typing import Generator +from typing import Optional +from typing import Union + +from _pytest._io.saferepr import saferepr +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config.argparsing import Parser +from _pytest.fixtures import FixtureDef +from _pytest.fixtures import SubRequest +from _pytest.scope import Scope +import pytest + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("debugconfig") + group.addoption( + "--setuponly", + "--setup-only", + action="store_true", + help="Only setup fixtures, do not execute tests", + ) + group.addoption( + "--setupshow", + "--setup-show", + action="store_true", + help="Show setup of fixtures while executing tests", + ) + + +@pytest.hookimpl(wrapper=True) +def pytest_fixture_setup( + fixturedef: FixtureDef[object], request: SubRequest +) -> Generator[None, object, object]: + try: + return (yield) + finally: + if request.config.option.setupshow: + if hasattr(request, "param"): + # Save the fixture parameter so ._show_fixture_action() can + # display it now and during the teardown (in .finish()). + if fixturedef.ids: + if callable(fixturedef.ids): + param = fixturedef.ids(request.param) + else: + param = fixturedef.ids[request.param_index] + else: + param = request.param + fixturedef.cached_param = param # type: ignore[attr-defined] + _show_fixture_action(fixturedef, request.config, "SETUP") + + +def pytest_fixture_post_finalizer( + fixturedef: FixtureDef[object], request: SubRequest +) -> None: + if fixturedef.cached_result is not None: + config = request.config + if config.option.setupshow: + _show_fixture_action(fixturedef, request.config, "TEARDOWN") + if hasattr(fixturedef, "cached_param"): + del fixturedef.cached_param + + +def _show_fixture_action( + fixturedef: FixtureDef[object], config: Config, msg: str +) -> None: + capman = config.pluginmanager.getplugin("capturemanager") + if capman: + capman.suspend_global_capture() + + tw = config.get_terminal_writer() + tw.line() + # Use smaller indentation the higher the scope: Session = 0, Package = 1, etc. + scope_indent = list(reversed(Scope)).index(fixturedef._scope) + tw.write(" " * 2 * scope_indent) + tw.write( + "{step} {scope} {fixture}".format( # noqa: UP032 (Readability) + step=msg.ljust(8), # align the output to TEARDOWN + scope=fixturedef.scope[0].upper(), + fixture=fixturedef.argname, + ) + ) + + if msg == "SETUP": + deps = sorted(arg for arg in fixturedef.argnames if arg != "request") + if deps: + tw.write(" (fixtures used: {})".format(", ".join(deps))) + + if hasattr(fixturedef, "cached_param"): + tw.write(f"[{saferepr(fixturedef.cached_param, maxsize=42)}]") + + tw.flush() + + if capman: + capman.resume_global_capture() + + +@pytest.hookimpl(tryfirst=True) +def pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]: + if config.option.setuponly: + config.option.setupshow = True + return None diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/setupplan.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/setupplan.py new file mode 100644 index 000000000..13c0df84e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/setupplan.py @@ -0,0 +1,40 @@ +from typing import Optional +from typing import Union + +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config.argparsing import Parser +from _pytest.fixtures import FixtureDef +from _pytest.fixtures import SubRequest +import pytest + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("debugconfig") + group.addoption( + "--setupplan", + "--setup-plan", + action="store_true", + help="Show what fixtures and tests would be executed but " + "don't execute anything", + ) + + +@pytest.hookimpl(tryfirst=True) +def pytest_fixture_setup( + fixturedef: FixtureDef[object], request: SubRequest +) -> Optional[object]: + # Will return a dummy fixture if the setuponly option is provided. + if request.config.option.setupplan: + my_cache_key = fixturedef.cache_key(request) + fixturedef.cached_result = (None, my_cache_key, None) + return fixturedef.cached_result + return None + + +@pytest.hookimpl(tryfirst=True) +def pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]: + if config.option.setupplan: + config.option.setuponly = True + config.option.setupshow = True + return None diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/skipping.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/skipping.py new file mode 100644 index 000000000..188dcae3f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/skipping.py @@ -0,0 +1,301 @@ +# mypy: allow-untyped-defs +"""Support for skip/xfail functions and markers.""" + +from collections.abc import Mapping +import dataclasses +import os +import platform +import sys +import traceback +from typing import Generator +from typing import Optional +from typing import Tuple +from typing import Type + +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.mark.structures import Mark +from _pytest.nodes import Item +from _pytest.outcomes import fail +from _pytest.outcomes import skip +from _pytest.outcomes import xfail +from _pytest.reports import BaseReport +from _pytest.reports import TestReport +from _pytest.runner import CallInfo +from _pytest.stash import StashKey + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group.addoption( + "--runxfail", + action="store_true", + dest="runxfail", + default=False, + help="Report the results of xfail tests as if they were not marked", + ) + + parser.addini( + "xfail_strict", + "Default for the strict parameter of xfail " + "markers when not given explicitly (default: False)", + default=False, + type="bool", + ) + + +def pytest_configure(config: Config) -> None: + if config.option.runxfail: + # yay a hack + import pytest + + old = pytest.xfail + config.add_cleanup(lambda: setattr(pytest, "xfail", old)) + + def nop(*args, **kwargs): + pass + + nop.Exception = xfail.Exception # type: ignore[attr-defined] + setattr(pytest, "xfail", nop) + + config.addinivalue_line( + "markers", + "skip(reason=None): skip the given test function with an optional reason. " + 'Example: skip(reason="no way of currently testing this") skips the ' + "test.", + ) + config.addinivalue_line( + "markers", + "skipif(condition, ..., *, reason=...): " + "skip the given test function if any of the conditions evaluate to True. " + "Example: skipif(sys.platform == 'win32') skips the test if we are on the win32 platform. " + "See https://docs.pytest.org/en/stable/reference/reference.html#pytest-mark-skipif", + ) + config.addinivalue_line( + "markers", + "xfail(condition, ..., *, reason=..., run=True, raises=None, strict=xfail_strict): " + "mark the test function as an expected failure if any of the conditions " + "evaluate to True. Optionally specify a reason for better reporting " + "and run=False if you don't even want to execute the test function. " + "If only specific exception(s) are expected, you can list them in " + "raises, and if the test fails in other ways, it will be reported as " + "a true failure. See https://docs.pytest.org/en/stable/reference/reference.html#pytest-mark-xfail", + ) + + +def evaluate_condition(item: Item, mark: Mark, condition: object) -> Tuple[bool, str]: + """Evaluate a single skipif/xfail condition. + + If an old-style string condition is given, it is eval()'d, otherwise the + condition is bool()'d. If this fails, an appropriately formatted pytest.fail + is raised. + + Returns (result, reason). The reason is only relevant if the result is True. + """ + # String condition. + if isinstance(condition, str): + globals_ = { + "os": os, + "sys": sys, + "platform": platform, + "config": item.config, + } + for dictionary in reversed( + item.ihook.pytest_markeval_namespace(config=item.config) + ): + if not isinstance(dictionary, Mapping): + raise ValueError( + f"pytest_markeval_namespace() needs to return a dict, got {dictionary!r}" + ) + globals_.update(dictionary) + if hasattr(item, "obj"): + globals_.update(item.obj.__globals__) + try: + filename = f"<{mark.name} condition>" + condition_code = compile(condition, filename, "eval") + result = eval(condition_code, globals_) + except SyntaxError as exc: + msglines = [ + "Error evaluating %r condition" % mark.name, + " " + condition, + " " + " " * (exc.offset or 0) + "^", + "SyntaxError: invalid syntax", + ] + fail("\n".join(msglines), pytrace=False) + except Exception as exc: + msglines = [ + "Error evaluating %r condition" % mark.name, + " " + condition, + *traceback.format_exception_only(type(exc), exc), + ] + fail("\n".join(msglines), pytrace=False) + + # Boolean condition. + else: + try: + result = bool(condition) + except Exception as exc: + msglines = [ + "Error evaluating %r condition as a boolean" % mark.name, + *traceback.format_exception_only(type(exc), exc), + ] + fail("\n".join(msglines), pytrace=False) + + reason = mark.kwargs.get("reason", None) + if reason is None: + if isinstance(condition, str): + reason = "condition: " + condition + else: + # XXX better be checked at collection time + msg = ( + "Error evaluating %r: " % mark.name + + "you need to specify reason=STRING when using booleans as conditions." + ) + fail(msg, pytrace=False) + + return result, reason + + +@dataclasses.dataclass(frozen=True) +class Skip: + """The result of evaluate_skip_marks().""" + + reason: str = "unconditional skip" + + +def evaluate_skip_marks(item: Item) -> Optional[Skip]: + """Evaluate skip and skipif marks on item, returning Skip if triggered.""" + for mark in item.iter_markers(name="skipif"): + if "condition" not in mark.kwargs: + conditions = mark.args + else: + conditions = (mark.kwargs["condition"],) + + # Unconditional. + if not conditions: + reason = mark.kwargs.get("reason", "") + return Skip(reason) + + # If any of the conditions are true. + for condition in conditions: + result, reason = evaluate_condition(item, mark, condition) + if result: + return Skip(reason) + + for mark in item.iter_markers(name="skip"): + try: + return Skip(*mark.args, **mark.kwargs) + except TypeError as e: + raise TypeError(str(e) + " - maybe you meant pytest.mark.skipif?") from None + + return None + + +@dataclasses.dataclass(frozen=True) +class Xfail: + """The result of evaluate_xfail_marks().""" + + __slots__ = ("reason", "run", "strict", "raises") + + reason: str + run: bool + strict: bool + raises: Optional[Tuple[Type[BaseException], ...]] + + +def evaluate_xfail_marks(item: Item) -> Optional[Xfail]: + """Evaluate xfail marks on item, returning Xfail if triggered.""" + for mark in item.iter_markers(name="xfail"): + run = mark.kwargs.get("run", True) + strict = mark.kwargs.get("strict", item.config.getini("xfail_strict")) + raises = mark.kwargs.get("raises", None) + if "condition" not in mark.kwargs: + conditions = mark.args + else: + conditions = (mark.kwargs["condition"],) + + # Unconditional. + if not conditions: + reason = mark.kwargs.get("reason", "") + return Xfail(reason, run, strict, raises) + + # If any of the conditions are true. + for condition in conditions: + result, reason = evaluate_condition(item, mark, condition) + if result: + return Xfail(reason, run, strict, raises) + + return None + + +# Saves the xfail mark evaluation. Can be refreshed during call if None. +xfailed_key = StashKey[Optional[Xfail]]() + + +@hookimpl(tryfirst=True) +def pytest_runtest_setup(item: Item) -> None: + skipped = evaluate_skip_marks(item) + if skipped: + raise skip.Exception(skipped.reason, _use_item_location=True) + + item.stash[xfailed_key] = xfailed = evaluate_xfail_marks(item) + if xfailed and not item.config.option.runxfail and not xfailed.run: + xfail("[NOTRUN] " + xfailed.reason) + + +@hookimpl(wrapper=True) +def pytest_runtest_call(item: Item) -> Generator[None, None, None]: + xfailed = item.stash.get(xfailed_key, None) + if xfailed is None: + item.stash[xfailed_key] = xfailed = evaluate_xfail_marks(item) + + if xfailed and not item.config.option.runxfail and not xfailed.run: + xfail("[NOTRUN] " + xfailed.reason) + + try: + return (yield) + finally: + # The test run may have added an xfail mark dynamically. + xfailed = item.stash.get(xfailed_key, None) + if xfailed is None: + item.stash[xfailed_key] = xfailed = evaluate_xfail_marks(item) + + +@hookimpl(wrapper=True) +def pytest_runtest_makereport( + item: Item, call: CallInfo[None] +) -> Generator[None, TestReport, TestReport]: + rep = yield + xfailed = item.stash.get(xfailed_key, None) + if item.config.option.runxfail: + pass # don't interfere + elif call.excinfo and isinstance(call.excinfo.value, xfail.Exception): + assert call.excinfo.value.msg is not None + rep.wasxfail = "reason: " + call.excinfo.value.msg + rep.outcome = "skipped" + elif not rep.skipped and xfailed: + if call.excinfo: + raises = xfailed.raises + if raises is not None and not isinstance(call.excinfo.value, raises): + rep.outcome = "failed" + else: + rep.outcome = "skipped" + rep.wasxfail = xfailed.reason + elif call.when == "call": + if xfailed.strict: + rep.outcome = "failed" + rep.longrepr = "[XPASS(strict)] " + xfailed.reason + else: + rep.outcome = "passed" + rep.wasxfail = xfailed.reason + return rep + + +def pytest_report_teststatus(report: BaseReport) -> Optional[Tuple[str, str, str]]: + if hasattr(report, "wasxfail"): + if report.skipped: + return "xfailed", "x", "XFAIL" + elif report.passed: + return "xpassed", "X", "XPASS" + return None diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/stash.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/stash.py new file mode 100644 index 000000000..a4b829fc6 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/stash.py @@ -0,0 +1,116 @@ +from typing import Any +from typing import cast +from typing import Dict +from typing import Generic +from typing import TypeVar +from typing import Union + + +__all__ = ["Stash", "StashKey"] + + +T = TypeVar("T") +D = TypeVar("D") + + +class StashKey(Generic[T]): + """``StashKey`` is an object used as a key to a :class:`Stash`. + + A ``StashKey`` is associated with the type ``T`` of the value of the key. + + A ``StashKey`` is unique and cannot conflict with another key. + + .. versionadded:: 7.0 + """ + + __slots__ = () + + +class Stash: + r"""``Stash`` is a type-safe heterogeneous mutable mapping that + allows keys and value types to be defined separately from + where it (the ``Stash``) is created. + + Usually you will be given an object which has a ``Stash``, for example + :class:`~pytest.Config` or a :class:`~_pytest.nodes.Node`: + + .. code-block:: python + + stash: Stash = some_object.stash + + If a module or plugin wants to store data in this ``Stash``, it creates + :class:`StashKey`\s for its keys (at the module level): + + .. code-block:: python + + # At the top-level of the module + some_str_key = StashKey[str]() + some_bool_key = StashKey[bool]() + + To store information: + + .. code-block:: python + + # Value type must match the key. + stash[some_str_key] = "value" + stash[some_bool_key] = True + + To retrieve the information: + + .. code-block:: python + + # The static type of some_str is str. + some_str = stash[some_str_key] + # The static type of some_bool is bool. + some_bool = stash[some_bool_key] + + .. versionadded:: 7.0 + """ + + __slots__ = ("_storage",) + + def __init__(self) -> None: + self._storage: Dict[StashKey[Any], object] = {} + + def __setitem__(self, key: StashKey[T], value: T) -> None: + """Set a value for key.""" + self._storage[key] = value + + def __getitem__(self, key: StashKey[T]) -> T: + """Get the value for key. + + Raises ``KeyError`` if the key wasn't set before. + """ + return cast(T, self._storage[key]) + + def get(self, key: StashKey[T], default: D) -> Union[T, D]: + """Get the value for key, or return default if the key wasn't set + before.""" + try: + return self[key] + except KeyError: + return default + + def setdefault(self, key: StashKey[T], default: T) -> T: + """Return the value of key if already set, otherwise set the value + of key to default and return default.""" + try: + return self[key] + except KeyError: + self[key] = default + return default + + def __delitem__(self, key: StashKey[T]) -> None: + """Delete the value for key. + + Raises ``KeyError`` if the key wasn't set before. + """ + del self._storage[key] + + def __contains__(self, key: StashKey[T]) -> bool: + """Return whether key was set.""" + return key in self._storage + + def __len__(self) -> int: + """Return how many items exist in the stash.""" + return len(self._storage) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/stepwise.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/stepwise.py new file mode 100644 index 000000000..3ebebc288 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/stepwise.py @@ -0,0 +1,131 @@ +from typing import List +from typing import Optional +from typing import TYPE_CHECKING + +from _pytest import nodes +from _pytest.config import Config +from _pytest.config.argparsing import Parser +from _pytest.main import Session +from _pytest.reports import TestReport +import pytest + + +if TYPE_CHECKING: + from _pytest.cacheprovider import Cache + +STEPWISE_CACHE_DIR = "cache/stepwise" + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group.addoption( + "--sw", + "--stepwise", + action="store_true", + default=False, + dest="stepwise", + help="Exit on test failure and continue from last failing test next time", + ) + group.addoption( + "--sw-skip", + "--stepwise-skip", + action="store_true", + default=False, + dest="stepwise_skip", + help="Ignore the first failing test but stop on the next failing test. " + "Implicitly enables --stepwise.", + ) + + +@pytest.hookimpl +def pytest_configure(config: Config) -> None: + if config.option.stepwise_skip: + # allow --stepwise-skip to work on it's own merits. + config.option.stepwise = True + if config.getoption("stepwise"): + config.pluginmanager.register(StepwisePlugin(config), "stepwiseplugin") + + +def pytest_sessionfinish(session: Session) -> None: + if not session.config.getoption("stepwise"): + assert session.config.cache is not None + if hasattr(session.config, "workerinput"): + # Do not update cache if this process is a xdist worker to prevent + # race conditions (#10641). + return + # Clear the list of failing tests if the plugin is not active. + session.config.cache.set(STEPWISE_CACHE_DIR, []) + + +class StepwisePlugin: + def __init__(self, config: Config) -> None: + self.config = config + self.session: Optional[Session] = None + self.report_status = "" + assert config.cache is not None + self.cache: Cache = config.cache + self.lastfailed: Optional[str] = self.cache.get(STEPWISE_CACHE_DIR, None) + self.skip: bool = config.getoption("stepwise_skip") + + def pytest_sessionstart(self, session: Session) -> None: + self.session = session + + def pytest_collection_modifyitems( + self, config: Config, items: List[nodes.Item] + ) -> None: + if not self.lastfailed: + self.report_status = "no previously failed tests, not skipping." + return + + # check all item nodes until we find a match on last failed + failed_index = None + for index, item in enumerate(items): + if item.nodeid == self.lastfailed: + failed_index = index + break + + # If the previously failed test was not found among the test items, + # do not skip any tests. + if failed_index is None: + self.report_status = "previously failed test not found, not skipping." + else: + self.report_status = f"skipping {failed_index} already passed items." + deselected = items[:failed_index] + del items[:failed_index] + config.hook.pytest_deselected(items=deselected) + + def pytest_runtest_logreport(self, report: TestReport) -> None: + if report.failed: + if self.skip: + # Remove test from the failed ones (if it exists) and unset the skip option + # to make sure the following tests will not be skipped. + if report.nodeid == self.lastfailed: + self.lastfailed = None + + self.skip = False + else: + # Mark test as the last failing and interrupt the test session. + self.lastfailed = report.nodeid + assert self.session is not None + self.session.shouldstop = ( + "Test failed, continuing from this test next run." + ) + + else: + # If the test was actually run and did pass. + if report.when == "call": + # Remove test from the failed ones, if exists. + if report.nodeid == self.lastfailed: + self.lastfailed = None + + def pytest_report_collectionfinish(self) -> Optional[str]: + if self.config.getoption("verbose") >= 0 and self.report_status: + return f"stepwise: {self.report_status}" + return None + + def pytest_sessionfinish(self) -> None: + if hasattr(self.config, "workerinput"): + # Do not update cache if this process is a xdist worker to prevent + # race conditions (#10641). + return + self.cache.set(STEPWISE_CACHE_DIR, self.lastfailed) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/terminal.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/terminal.py new file mode 100644 index 000000000..724d5c54d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/terminal.py @@ -0,0 +1,1516 @@ +# mypy: allow-untyped-defs +"""Terminal reporting of the full testing process. + +This is a good source for looking at the various reporting hooks. +""" + +import argparse +from collections import Counter +import dataclasses +import datetime +from functools import partial +import inspect +from pathlib import Path +import platform +import sys +import textwrap +from typing import Any +from typing import Callable +from typing import ClassVar +from typing import Dict +from typing import final +from typing import Generator +from typing import List +from typing import Literal +from typing import Mapping +from typing import NamedTuple +from typing import Optional +from typing import Sequence +from typing import Set +from typing import TextIO +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union +import warnings + +import pluggy + +from _pytest import nodes +from _pytest import timing +from _pytest._code import ExceptionInfo +from _pytest._code.code import ExceptionRepr +from _pytest._io import TerminalWriter +from _pytest._io.wcwidth import wcswidth +import _pytest._version +from _pytest.assertion.util import running_on_ci +from _pytest.config import _PluggyPlugin +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.nodes import Item +from _pytest.nodes import Node +from _pytest.pathlib import absolutepath +from _pytest.pathlib import bestrelpath +from _pytest.reports import BaseReport +from _pytest.reports import CollectReport +from _pytest.reports import TestReport + + +if TYPE_CHECKING: + from _pytest.main import Session + + +REPORT_COLLECTING_RESOLUTION = 0.5 + +KNOWN_TYPES = ( + "failed", + "passed", + "skipped", + "deselected", + "xfailed", + "xpassed", + "warnings", + "error", +) + +_REPORTCHARS_DEFAULT = "fE" + + +class MoreQuietAction(argparse.Action): + """A modified copy of the argparse count action which counts down and updates + the legacy quiet attribute at the same time. + + Used to unify verbosity handling. + """ + + def __init__( + self, + option_strings: Sequence[str], + dest: str, + default: object = None, + required: bool = False, + help: Optional[str] = None, + ) -> None: + super().__init__( + option_strings=option_strings, + dest=dest, + nargs=0, + default=default, + required=required, + help=help, + ) + + def __call__( + self, + parser: argparse.ArgumentParser, + namespace: argparse.Namespace, + values: Union[str, Sequence[object], None], + option_string: Optional[str] = None, + ) -> None: + new_count = getattr(namespace, self.dest, 0) - 1 + setattr(namespace, self.dest, new_count) + # todo Deprecate config.quiet + namespace.quiet = getattr(namespace, "quiet", 0) + 1 + + +class TestShortLogReport(NamedTuple): + """Used to store the test status result category, shortletter and verbose word. + For example ``"rerun", "R", ("RERUN", {"yellow": True})``. + + :ivar category: + The class of result, for example ``“passed”``, ``“skipped”``, ``“error”``, or the empty string. + + :ivar letter: + The short letter shown as testing progresses, for example ``"."``, ``"s"``, ``"E"``, or the empty string. + + :ivar word: + Verbose word is shown as testing progresses in verbose mode, for example ``"PASSED"``, ``"SKIPPED"``, + ``"ERROR"``, or the empty string. + """ + + category: str + letter: str + word: Union[str, Tuple[str, Mapping[str, bool]]] + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("terminal reporting", "Reporting", after="general") + group._addoption( + "-v", + "--verbose", + action="count", + default=0, + dest="verbose", + help="Increase verbosity", + ) + group._addoption( + "--no-header", + action="store_true", + default=False, + dest="no_header", + help="Disable header", + ) + group._addoption( + "--no-summary", + action="store_true", + default=False, + dest="no_summary", + help="Disable summary", + ) + group._addoption( + "-q", + "--quiet", + action=MoreQuietAction, + default=0, + dest="verbose", + help="Decrease verbosity", + ) + group._addoption( + "--verbosity", + dest="verbose", + type=int, + default=0, + help="Set verbosity. Default: 0.", + ) + group._addoption( + "-r", + action="store", + dest="reportchars", + default=_REPORTCHARS_DEFAULT, + metavar="chars", + help="Show extra test summary info as specified by chars: (f)ailed, " + "(E)rror, (s)kipped, (x)failed, (X)passed, " + "(p)assed, (P)assed with output, (a)ll except passed (p/P), or (A)ll. " + "(w)arnings are enabled by default (see --disable-warnings), " + "'N' can be used to reset the list. (default: 'fE').", + ) + group._addoption( + "--disable-warnings", + "--disable-pytest-warnings", + default=False, + dest="disable_warnings", + action="store_true", + help="Disable warnings summary", + ) + group._addoption( + "-l", + "--showlocals", + action="store_true", + dest="showlocals", + default=False, + help="Show locals in tracebacks (disabled by default)", + ) + group._addoption( + "--no-showlocals", + action="store_false", + dest="showlocals", + help="Hide locals in tracebacks (negate --showlocals passed through addopts)", + ) + group._addoption( + "--tb", + metavar="style", + action="store", + dest="tbstyle", + default="auto", + choices=["auto", "long", "short", "no", "line", "native"], + help="Traceback print mode (auto/long/short/line/native/no)", + ) + group._addoption( + "--show-capture", + action="store", + dest="showcapture", + choices=["no", "stdout", "stderr", "log", "all"], + default="all", + help="Controls how captured stdout/stderr/log is shown on failed tests. " + "Default: all.", + ) + group._addoption( + "--fulltrace", + "--full-trace", + action="store_true", + default=False, + help="Don't cut any tracebacks (default is to cut)", + ) + group._addoption( + "--color", + metavar="color", + action="store", + dest="color", + default="auto", + choices=["yes", "no", "auto"], + help="Color terminal output (yes/no/auto)", + ) + group._addoption( + "--code-highlight", + default="yes", + choices=["yes", "no"], + help="Whether code should be highlighted (only if --color is also enabled). " + "Default: yes.", + ) + + parser.addini( + "console_output_style", + help='Console output: "classic", or with additional progress information ' + '("progress" (percentage) | "count" | "progress-even-when-capture-no" (forces ' + "progress even when capture=no)", + default="progress", + ) + Config._add_verbosity_ini( + parser, + Config.VERBOSITY_TEST_CASES, + help=( + "Specify a verbosity level for test case execution, overriding the main level. " + "Higher levels will provide more detailed information about each test case executed." + ), + ) + + +def pytest_configure(config: Config) -> None: + reporter = TerminalReporter(config, sys.stdout) + config.pluginmanager.register(reporter, "terminalreporter") + if config.option.debug or config.option.traceconfig: + + def mywriter(tags, args): + msg = " ".join(map(str, args)) + reporter.write_line("[traceconfig] " + msg) + + config.trace.root.setprocessor("pytest:config", mywriter) + + +def getreportopt(config: Config) -> str: + reportchars: str = config.option.reportchars + + old_aliases = {"F", "S"} + reportopts = "" + for char in reportchars: + if char in old_aliases: + char = char.lower() + if char == "a": + reportopts = "sxXEf" + elif char == "A": + reportopts = "PpsxXEf" + elif char == "N": + reportopts = "" + elif char not in reportopts: + reportopts += char + + if not config.option.disable_warnings and "w" not in reportopts: + reportopts = "w" + reportopts + elif config.option.disable_warnings and "w" in reportopts: + reportopts = reportopts.replace("w", "") + + return reportopts + + +@hookimpl(trylast=True) # after _pytest.runner +def pytest_report_teststatus(report: BaseReport) -> Tuple[str, str, str]: + letter = "F" + if report.passed: + letter = "." + elif report.skipped: + letter = "s" + + outcome: str = report.outcome + if report.when in ("collect", "setup", "teardown") and outcome == "failed": + outcome = "error" + letter = "E" + + return outcome, letter, outcome.upper() + + +@dataclasses.dataclass +class WarningReport: + """Simple structure to hold warnings information captured by ``pytest_warning_recorded``. + + :ivar str message: + User friendly message about the warning. + :ivar str|None nodeid: + nodeid that generated the warning (see ``get_location``). + :ivar tuple fslocation: + File system location of the source of the warning (see ``get_location``). + """ + + message: str + nodeid: Optional[str] = None + fslocation: Optional[Tuple[str, int]] = None + + count_towards_summary: ClassVar = True + + def get_location(self, config: Config) -> Optional[str]: + """Return the more user-friendly information about the location of a warning, or None.""" + if self.nodeid: + return self.nodeid + if self.fslocation: + filename, linenum = self.fslocation + relpath = bestrelpath(config.invocation_params.dir, absolutepath(filename)) + return f"{relpath}:{linenum}" + return None + + +@final +class TerminalReporter: + def __init__(self, config: Config, file: Optional[TextIO] = None) -> None: + import _pytest.config + + self.config = config + self._numcollected = 0 + self._session: Optional[Session] = None + self._showfspath: Optional[bool] = None + + self.stats: Dict[str, List[Any]] = {} + self._main_color: Optional[str] = None + self._known_types: Optional[List[str]] = None + self.startpath = config.invocation_params.dir + if file is None: + file = sys.stdout + self._tw = _pytest.config.create_terminal_writer(config, file) + self._screen_width = self._tw.fullwidth + self.currentfspath: Union[None, Path, str, int] = None + self.reportchars = getreportopt(config) + self.hasmarkup = self._tw.hasmarkup + self.isatty = file.isatty() + self._progress_nodeids_reported: Set[str] = set() + self._show_progress_info = self._determine_show_progress_info() + self._collect_report_last_write: Optional[float] = None + self._already_displayed_warnings: Optional[int] = None + self._keyboardinterrupt_memo: Optional[ExceptionRepr] = None + + def _determine_show_progress_info(self) -> Literal["progress", "count", False]: + """Return whether we should display progress information based on the current config.""" + # do not show progress if we are not capturing output (#3038) unless explicitly + # overridden by progress-even-when-capture-no + if ( + self.config.getoption("capture", "no") == "no" + and self.config.getini("console_output_style") + != "progress-even-when-capture-no" + ): + return False + # do not show progress if we are showing fixture setup/teardown + if self.config.getoption("setupshow", False): + return False + cfg: str = self.config.getini("console_output_style") + if cfg in {"progress", "progress-even-when-capture-no"}: + return "progress" + elif cfg == "count": + return "count" + else: + return False + + @property + def verbosity(self) -> int: + verbosity: int = self.config.option.verbose + return verbosity + + @property + def showheader(self) -> bool: + return self.verbosity >= 0 + + @property + def no_header(self) -> bool: + return bool(self.config.option.no_header) + + @property + def no_summary(self) -> bool: + return bool(self.config.option.no_summary) + + @property + def showfspath(self) -> bool: + if self._showfspath is None: + return self.config.get_verbosity(Config.VERBOSITY_TEST_CASES) >= 0 + return self._showfspath + + @showfspath.setter + def showfspath(self, value: Optional[bool]) -> None: + self._showfspath = value + + @property + def showlongtestinfo(self) -> bool: + return self.config.get_verbosity(Config.VERBOSITY_TEST_CASES) > 0 + + def hasopt(self, char: str) -> bool: + char = {"xfailed": "x", "skipped": "s"}.get(char, char) + return char in self.reportchars + + def write_fspath_result(self, nodeid: str, res, **markup: bool) -> None: + fspath = self.config.rootpath / nodeid.split("::")[0] + if self.currentfspath is None or fspath != self.currentfspath: + if self.currentfspath is not None and self._show_progress_info: + self._write_progress_information_filling_space() + self.currentfspath = fspath + relfspath = bestrelpath(self.startpath, fspath) + self._tw.line() + self._tw.write(relfspath + " ") + self._tw.write(res, flush=True, **markup) + + def write_ensure_prefix(self, prefix: str, extra: str = "", **kwargs) -> None: + if self.currentfspath != prefix: + self._tw.line() + self.currentfspath = prefix + self._tw.write(prefix) + if extra: + self._tw.write(extra, **kwargs) + self.currentfspath = -2 + + def ensure_newline(self) -> None: + if self.currentfspath: + self._tw.line() + self.currentfspath = None + + def wrap_write( + self, + content: str, + *, + flush: bool = False, + margin: int = 8, + line_sep: str = "\n", + **markup: bool, + ) -> None: + """Wrap message with margin for progress info.""" + width_of_current_line = self._tw.width_of_current_line + wrapped = line_sep.join( + textwrap.wrap( + " " * width_of_current_line + content, + width=self._screen_width - margin, + drop_whitespace=True, + replace_whitespace=False, + ), + ) + wrapped = wrapped[width_of_current_line:] + self._tw.write(wrapped, flush=flush, **markup) + + def write(self, content: str, *, flush: bool = False, **markup: bool) -> None: + self._tw.write(content, flush=flush, **markup) + + def flush(self) -> None: + self._tw.flush() + + def write_line(self, line: Union[str, bytes], **markup: bool) -> None: + if not isinstance(line, str): + line = str(line, errors="replace") + self.ensure_newline() + self._tw.line(line, **markup) + + def rewrite(self, line: str, **markup: bool) -> None: + """Rewinds the terminal cursor to the beginning and writes the given line. + + :param erase: + If True, will also add spaces until the full terminal width to ensure + previous lines are properly erased. + + The rest of the keyword arguments are markup instructions. + """ + erase = markup.pop("erase", False) + if erase: + fill_count = self._tw.fullwidth - len(line) - 1 + fill = " " * fill_count + else: + fill = "" + line = str(line) + self._tw.write("\r" + line + fill, **markup) + + def write_sep( + self, + sep: str, + title: Optional[str] = None, + fullwidth: Optional[int] = None, + **markup: bool, + ) -> None: + self.ensure_newline() + self._tw.sep(sep, title, fullwidth, **markup) + + def section(self, title: str, sep: str = "=", **kw: bool) -> None: + self._tw.sep(sep, title, **kw) + + def line(self, msg: str, **kw: bool) -> None: + self._tw.line(msg, **kw) + + def _add_stats(self, category: str, items: Sequence[Any]) -> None: + set_main_color = category not in self.stats + self.stats.setdefault(category, []).extend(items) + if set_main_color: + self._set_main_color() + + def pytest_internalerror(self, excrepr: ExceptionRepr) -> bool: + for line in str(excrepr).split("\n"): + self.write_line("INTERNALERROR> " + line) + return True + + def pytest_warning_recorded( + self, + warning_message: warnings.WarningMessage, + nodeid: str, + ) -> None: + from _pytest.warnings import warning_record_to_str + + fslocation = warning_message.filename, warning_message.lineno + message = warning_record_to_str(warning_message) + + warning_report = WarningReport( + fslocation=fslocation, message=message, nodeid=nodeid + ) + self._add_stats("warnings", [warning_report]) + + def pytest_plugin_registered(self, plugin: _PluggyPlugin) -> None: + if self.config.option.traceconfig: + msg = f"PLUGIN registered: {plugin}" + # XXX This event may happen during setup/teardown time + # which unfortunately captures our output here + # which garbles our output if we use self.write_line. + self.write_line(msg) + + def pytest_deselected(self, items: Sequence[Item]) -> None: + self._add_stats("deselected", items) + + def pytest_runtest_logstart( + self, nodeid: str, location: Tuple[str, Optional[int], str] + ) -> None: + # Ensure that the path is printed before the + # 1st test of a module starts running. + if self.showlongtestinfo: + line = self._locationline(nodeid, *location) + self.write_ensure_prefix(line, "") + self.flush() + elif self.showfspath: + self.write_fspath_result(nodeid, "") + self.flush() + + def pytest_runtest_logreport(self, report: TestReport) -> None: + self._tests_ran = True + rep = report + + res = TestShortLogReport( + *self.config.hook.pytest_report_teststatus(report=rep, config=self.config) + ) + category, letter, word = res.category, res.letter, res.word + if not isinstance(word, tuple): + markup = None + else: + word, markup = word + self._add_stats(category, [rep]) + if not letter and not word: + # Probably passed setup/teardown. + return + running_xdist = hasattr(rep, "node") + if markup is None: + was_xfail = hasattr(report, "wasxfail") + if rep.passed and not was_xfail: + markup = {"green": True} + elif rep.passed and was_xfail: + markup = {"yellow": True} + elif rep.failed: + markup = {"red": True} + elif rep.skipped: + markup = {"yellow": True} + else: + markup = {} + if self.config.get_verbosity(Config.VERBOSITY_TEST_CASES) <= 0: + self._tw.write(letter, **markup) + else: + self._progress_nodeids_reported.add(rep.nodeid) + line = self._locationline(rep.nodeid, *rep.location) + if not running_xdist: + self.write_ensure_prefix(line, word, **markup) + if rep.skipped or hasattr(report, "wasxfail"): + reason = _get_raw_skip_reason(rep) + if self.config.get_verbosity(Config.VERBOSITY_TEST_CASES) < 2: + available_width = ( + (self._tw.fullwidth - self._tw.width_of_current_line) + - len(" [100%]") + - 1 + ) + formatted_reason = _format_trimmed( + " ({})", reason, available_width + ) + else: + formatted_reason = f" ({reason})" + + if reason and formatted_reason is not None: + self.wrap_write(formatted_reason) + if self._show_progress_info: + self._write_progress_information_filling_space() + else: + self.ensure_newline() + self._tw.write("[%s]" % rep.node.gateway.id) + if self._show_progress_info: + self._tw.write( + self._get_progress_information_message() + " ", cyan=True + ) + else: + self._tw.write(" ") + self._tw.write(word, **markup) + self._tw.write(" " + line) + self.currentfspath = -2 + self.flush() + + @property + def _is_last_item(self) -> bool: + assert self._session is not None + return len(self._progress_nodeids_reported) == self._session.testscollected + + def pytest_runtest_logfinish(self, nodeid: str) -> None: + assert self._session + if ( + self.config.get_verbosity(Config.VERBOSITY_TEST_CASES) <= 0 + and self._show_progress_info + ): + if self._show_progress_info == "count": + num_tests = self._session.testscollected + progress_length = len(f" [{num_tests}/{num_tests}]") + else: + progress_length = len(" [100%]") + + self._progress_nodeids_reported.add(nodeid) + + if self._is_last_item: + self._write_progress_information_filling_space() + else: + main_color, _ = self._get_main_color() + w = self._width_of_current_line + past_edge = w + progress_length + 1 >= self._screen_width + if past_edge: + msg = self._get_progress_information_message() + self._tw.write(msg + "\n", **{main_color: True}) + + def _get_progress_information_message(self) -> str: + assert self._session + collected = self._session.testscollected + if self._show_progress_info == "count": + if collected: + progress = self._progress_nodeids_reported + counter_format = f"{{:{len(str(collected))}d}}" + format_string = f" [{counter_format}/{{}}]" + return format_string.format(len(progress), collected) + return f" [ {collected} / {collected} ]" + else: + if collected: + return ( + f" [{len(self._progress_nodeids_reported) * 100 // collected:3d}%]" + ) + return " [100%]" + + def _write_progress_information_filling_space(self) -> None: + color, _ = self._get_main_color() + msg = self._get_progress_information_message() + w = self._width_of_current_line + fill = self._tw.fullwidth - w - 1 + self.write(msg.rjust(fill), flush=True, **{color: True}) + + @property + def _width_of_current_line(self) -> int: + """Return the width of the current line.""" + return self._tw.width_of_current_line + + def pytest_collection(self) -> None: + if self.isatty: + if self.config.option.verbose >= 0: + self.write("collecting ... ", flush=True, bold=True) + self._collect_report_last_write = timing.time() + elif self.config.option.verbose >= 1: + self.write("collecting ... ", flush=True, bold=True) + + def pytest_collectreport(self, report: CollectReport) -> None: + if report.failed: + self._add_stats("error", [report]) + elif report.skipped: + self._add_stats("skipped", [report]) + items = [x for x in report.result if isinstance(x, Item)] + self._numcollected += len(items) + if self.isatty: + self.report_collect() + + def report_collect(self, final: bool = False) -> None: + if self.config.option.verbose < 0: + return + + if not final: + # Only write "collecting" report every 0.5s. + t = timing.time() + if ( + self._collect_report_last_write is not None + and self._collect_report_last_write > t - REPORT_COLLECTING_RESOLUTION + ): + return + self._collect_report_last_write = t + + errors = len(self.stats.get("error", [])) + skipped = len(self.stats.get("skipped", [])) + deselected = len(self.stats.get("deselected", [])) + selected = self._numcollected - deselected + line = "collected " if final else "collecting " + line += ( + str(self._numcollected) + " item" + ("" if self._numcollected == 1 else "s") + ) + if errors: + line += " / %d error%s" % (errors, "s" if errors != 1 else "") + if deselected: + line += " / %d deselected" % deselected + if skipped: + line += " / %d skipped" % skipped + if self._numcollected > selected: + line += " / %d selected" % selected + if self.isatty: + self.rewrite(line, bold=True, erase=True) + if final: + self.write("\n") + else: + self.write_line(line) + + @hookimpl(trylast=True) + def pytest_sessionstart(self, session: "Session") -> None: + self._session = session + self._sessionstarttime = timing.time() + if not self.showheader: + return + self.write_sep("=", "test session starts", bold=True) + verinfo = platform.python_version() + if not self.no_header: + msg = f"platform {sys.platform} -- Python {verinfo}" + pypy_version_info = getattr(sys, "pypy_version_info", None) + if pypy_version_info: + verinfo = ".".join(map(str, pypy_version_info[:3])) + msg += f"[pypy-{verinfo}-{pypy_version_info[3]}]" + msg += f", pytest-{_pytest._version.version}, pluggy-{pluggy.__version__}" + if ( + self.verbosity > 0 + or self.config.option.debug + or getattr(self.config.option, "pastebin", None) + ): + msg += " -- " + str(sys.executable) + self.write_line(msg) + lines = self.config.hook.pytest_report_header( + config=self.config, start_path=self.startpath + ) + self._write_report_lines_from_hooks(lines) + + def _write_report_lines_from_hooks( + self, lines: Sequence[Union[str, Sequence[str]]] + ) -> None: + for line_or_lines in reversed(lines): + if isinstance(line_or_lines, str): + self.write_line(line_or_lines) + else: + for line in line_or_lines: + self.write_line(line) + + def pytest_report_header(self, config: Config) -> List[str]: + result = [f"rootdir: {config.rootpath}"] + + if config.inipath: + result.append("configfile: " + bestrelpath(config.rootpath, config.inipath)) + + if config.args_source == Config.ArgsSource.TESTPATHS: + testpaths: List[str] = config.getini("testpaths") + result.append("testpaths: {}".format(", ".join(testpaths))) + + plugininfo = config.pluginmanager.list_plugin_distinfo() + if plugininfo: + result.append("plugins: %s" % ", ".join(_plugin_nameversions(plugininfo))) + return result + + def pytest_collection_finish(self, session: "Session") -> None: + self.report_collect(True) + + lines = self.config.hook.pytest_report_collectionfinish( + config=self.config, + start_path=self.startpath, + items=session.items, + ) + self._write_report_lines_from_hooks(lines) + + if self.config.getoption("collectonly"): + if session.items: + if self.config.option.verbose > -1: + self._tw.line("") + self._printcollecteditems(session.items) + + failed = self.stats.get("failed") + if failed: + self._tw.sep("!", "collection failures") + for rep in failed: + rep.toterminal(self._tw) + + def _printcollecteditems(self, items: Sequence[Item]) -> None: + test_cases_verbosity = self.config.get_verbosity(Config.VERBOSITY_TEST_CASES) + if test_cases_verbosity < 0: + if test_cases_verbosity < -1: + counts = Counter(item.nodeid.split("::", 1)[0] for item in items) + for name, count in sorted(counts.items()): + self._tw.line("%s: %d" % (name, count)) + else: + for item in items: + self._tw.line(item.nodeid) + return + stack: List[Node] = [] + indent = "" + for item in items: + needed_collectors = item.listchain()[1:] # strip root node + while stack: + if stack == needed_collectors[: len(stack)]: + break + stack.pop() + for col in needed_collectors[len(stack) :]: + stack.append(col) + indent = (len(stack) - 1) * " " + self._tw.line(f"{indent}{col}") + if test_cases_verbosity >= 1: + obj = getattr(col, "obj", None) + doc = inspect.getdoc(obj) if obj else None + if doc: + for line in doc.splitlines(): + self._tw.line("{}{}".format(indent + " ", line)) + + @hookimpl(wrapper=True) + def pytest_sessionfinish( + self, session: "Session", exitstatus: Union[int, ExitCode] + ) -> Generator[None, None, None]: + result = yield + self._tw.line("") + summary_exit_codes = ( + ExitCode.OK, + ExitCode.TESTS_FAILED, + ExitCode.INTERRUPTED, + ExitCode.USAGE_ERROR, + ExitCode.NO_TESTS_COLLECTED, + ) + if exitstatus in summary_exit_codes and not self.no_summary: + self.config.hook.pytest_terminal_summary( + terminalreporter=self, exitstatus=exitstatus, config=self.config + ) + if session.shouldfail: + self.write_sep("!", str(session.shouldfail), red=True) + if exitstatus == ExitCode.INTERRUPTED: + self._report_keyboardinterrupt() + self._keyboardinterrupt_memo = None + elif session.shouldstop: + self.write_sep("!", str(session.shouldstop), red=True) + self.summary_stats() + return result + + @hookimpl(wrapper=True) + def pytest_terminal_summary(self) -> Generator[None, None, None]: + self.summary_errors() + self.summary_failures() + self.summary_xfailures() + self.summary_warnings() + self.summary_passes() + self.summary_xpasses() + try: + return (yield) + finally: + self.short_test_summary() + # Display any extra warnings from teardown here (if any). + self.summary_warnings() + + def pytest_keyboard_interrupt(self, excinfo: ExceptionInfo[BaseException]) -> None: + self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True) + + def pytest_unconfigure(self) -> None: + if self._keyboardinterrupt_memo is not None: + self._report_keyboardinterrupt() + + def _report_keyboardinterrupt(self) -> None: + excrepr = self._keyboardinterrupt_memo + assert excrepr is not None + assert excrepr.reprcrash is not None + msg = excrepr.reprcrash.message + self.write_sep("!", msg) + if "KeyboardInterrupt" in msg: + if self.config.option.fulltrace: + excrepr.toterminal(self._tw) + else: + excrepr.reprcrash.toterminal(self._tw) + self._tw.line( + "(to show a full traceback on KeyboardInterrupt use --full-trace)", + yellow=True, + ) + + def _locationline( + self, nodeid: str, fspath: str, lineno: Optional[int], domain: str + ) -> str: + def mkrel(nodeid: str) -> str: + line = self.config.cwd_relative_nodeid(nodeid) + if domain and line.endswith(domain): + line = line[: -len(domain)] + values = domain.split("[") + values[0] = values[0].replace(".", "::") # don't replace '.' in params + line += "[".join(values) + return line + + # collect_fspath comes from testid which has a "/"-normalized path. + if fspath: + res = mkrel(nodeid) + if self.verbosity >= 2 and nodeid.split("::")[0] != fspath.replace( + "\\", nodes.SEP + ): + res += " <- " + bestrelpath(self.startpath, Path(fspath)) + else: + res = "[location]" + return res + " " + + def _getfailureheadline(self, rep): + head_line = rep.head_line + if head_line: + return head_line + return "test session" # XXX? + + def _getcrashline(self, rep): + try: + return str(rep.longrepr.reprcrash) + except AttributeError: + try: + return str(rep.longrepr)[:50] + except AttributeError: + return "" + + # + # Summaries for sessionfinish. + # + def getreports(self, name: str): + return [x for x in self.stats.get(name, ()) if not hasattr(x, "_pdbshown")] + + def summary_warnings(self) -> None: + if self.hasopt("w"): + all_warnings: Optional[List[WarningReport]] = self.stats.get("warnings") + if not all_warnings: + return + + final = self._already_displayed_warnings is not None + if final: + warning_reports = all_warnings[self._already_displayed_warnings :] + else: + warning_reports = all_warnings + self._already_displayed_warnings = len(warning_reports) + if not warning_reports: + return + + reports_grouped_by_message: Dict[str, List[WarningReport]] = {} + for wr in warning_reports: + reports_grouped_by_message.setdefault(wr.message, []).append(wr) + + def collapsed_location_report(reports: List[WarningReport]) -> str: + locations = [] + for w in reports: + location = w.get_location(self.config) + if location: + locations.append(location) + + if len(locations) < 10: + return "\n".join(map(str, locations)) + + counts_by_filename = Counter( + str(loc).split("::", 1)[0] for loc in locations + ) + return "\n".join( + "{}: {} warning{}".format(k, v, "s" if v > 1 else "") + for k, v in counts_by_filename.items() + ) + + title = "warnings summary (final)" if final else "warnings summary" + self.write_sep("=", title, yellow=True, bold=False) + for message, message_reports in reports_grouped_by_message.items(): + maybe_location = collapsed_location_report(message_reports) + if maybe_location: + self._tw.line(maybe_location) + lines = message.splitlines() + indented = "\n".join(" " + x for x in lines) + message = indented.rstrip() + else: + message = message.rstrip() + self._tw.line(message) + self._tw.line() + self._tw.line( + "-- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html" + ) + + def summary_passes(self) -> None: + self.summary_passes_combined("passed", "PASSES", "P") + + def summary_xpasses(self) -> None: + self.summary_passes_combined("xpassed", "XPASSES", "X") + + def summary_passes_combined( + self, which_reports: str, sep_title: str, needed_opt: str + ) -> None: + if self.config.option.tbstyle != "no": + if self.hasopt(needed_opt): + reports: List[TestReport] = self.getreports(which_reports) + if not reports: + return + self.write_sep("=", sep_title) + for rep in reports: + if rep.sections: + msg = self._getfailureheadline(rep) + self.write_sep("_", msg, green=True, bold=True) + self._outrep_summary(rep) + self._handle_teardown_sections(rep.nodeid) + + def _get_teardown_reports(self, nodeid: str) -> List[TestReport]: + reports = self.getreports("") + return [ + report + for report in reports + if report.when == "teardown" and report.nodeid == nodeid + ] + + def _handle_teardown_sections(self, nodeid: str) -> None: + for report in self._get_teardown_reports(nodeid): + self.print_teardown_sections(report) + + def print_teardown_sections(self, rep: TestReport) -> None: + showcapture = self.config.option.showcapture + if showcapture == "no": + return + for secname, content in rep.sections: + if showcapture != "all" and showcapture not in secname: + continue + if "teardown" in secname: + self._tw.sep("-", secname) + if content[-1:] == "\n": + content = content[:-1] + self._tw.line(content) + + def summary_failures(self) -> None: + self.summary_failures_combined("failed", "FAILURES") + + def summary_xfailures(self) -> None: + self.summary_failures_combined("xfailed", "XFAILURES", "x") + + def summary_failures_combined( + self, which_reports: str, sep_title: str, needed_opt: Optional[str] = None + ) -> None: + if self.config.option.tbstyle != "no": + if not needed_opt or self.hasopt(needed_opt): + reports: List[BaseReport] = self.getreports(which_reports) + if not reports: + return + self.write_sep("=", sep_title) + if self.config.option.tbstyle == "line": + for rep in reports: + line = self._getcrashline(rep) + self.write_line(line) + else: + for rep in reports: + msg = self._getfailureheadline(rep) + self.write_sep("_", msg, red=True, bold=True) + self._outrep_summary(rep) + self._handle_teardown_sections(rep.nodeid) + + def summary_errors(self) -> None: + if self.config.option.tbstyle != "no": + reports: List[BaseReport] = self.getreports("error") + if not reports: + return + self.write_sep("=", "ERRORS") + for rep in self.stats["error"]: + msg = self._getfailureheadline(rep) + if rep.when == "collect": + msg = "ERROR collecting " + msg + else: + msg = f"ERROR at {rep.when} of {msg}" + self.write_sep("_", msg, red=True, bold=True) + self._outrep_summary(rep) + + def _outrep_summary(self, rep: BaseReport) -> None: + rep.toterminal(self._tw) + showcapture = self.config.option.showcapture + if showcapture == "no": + return + for secname, content in rep.sections: + if showcapture != "all" and showcapture not in secname: + continue + self._tw.sep("-", secname) + if content[-1:] == "\n": + content = content[:-1] + self._tw.line(content) + + def summary_stats(self) -> None: + if self.verbosity < -1: + return + + session_duration = timing.time() - self._sessionstarttime + (parts, main_color) = self.build_summary_stats_line() + line_parts = [] + + display_sep = self.verbosity >= 0 + if display_sep: + fullwidth = self._tw.fullwidth + for text, markup in parts: + with_markup = self._tw.markup(text, **markup) + if display_sep: + fullwidth += len(with_markup) - len(text) + line_parts.append(with_markup) + msg = ", ".join(line_parts) + + main_markup = {main_color: True} + duration = f" in {format_session_duration(session_duration)}" + duration_with_markup = self._tw.markup(duration, **main_markup) + if display_sep: + fullwidth += len(duration_with_markup) - len(duration) + msg += duration_with_markup + + if display_sep: + markup_for_end_sep = self._tw.markup("", **main_markup) + if markup_for_end_sep.endswith("\x1b[0m"): + markup_for_end_sep = markup_for_end_sep[:-4] + fullwidth += len(markup_for_end_sep) + msg += markup_for_end_sep + + if display_sep: + self.write_sep("=", msg, fullwidth=fullwidth, **main_markup) + else: + self.write_line(msg, **main_markup) + + def short_test_summary(self) -> None: + if not self.reportchars: + return + + def show_simple(lines: List[str], *, stat: str) -> None: + failed = self.stats.get(stat, []) + if not failed: + return + config = self.config + for rep in failed: + color = _color_for_type.get(stat, _color_for_type_default) + line = _get_line_with_reprcrash_message( + config, rep, self._tw, {color: True} + ) + lines.append(line) + + def show_xfailed(lines: List[str]) -> None: + xfailed = self.stats.get("xfailed", []) + for rep in xfailed: + verbose_word = rep._get_verbose_word(self.config) + markup_word = self._tw.markup( + verbose_word, **{_color_for_type["warnings"]: True} + ) + nodeid = _get_node_id_with_markup(self._tw, self.config, rep) + line = f"{markup_word} {nodeid}" + reason = rep.wasxfail + if reason: + line += " - " + str(reason) + + lines.append(line) + + def show_xpassed(lines: List[str]) -> None: + xpassed = self.stats.get("xpassed", []) + for rep in xpassed: + verbose_word = rep._get_verbose_word(self.config) + markup_word = self._tw.markup( + verbose_word, **{_color_for_type["warnings"]: True} + ) + nodeid = _get_node_id_with_markup(self._tw, self.config, rep) + line = f"{markup_word} {nodeid}" + reason = rep.wasxfail + if reason: + line += " - " + str(reason) + lines.append(line) + + def show_skipped(lines: List[str]) -> None: + skipped: List[CollectReport] = self.stats.get("skipped", []) + fskips = _folded_skips(self.startpath, skipped) if skipped else [] + if not fskips: + return + verbose_word = skipped[0]._get_verbose_word(self.config) + markup_word = self._tw.markup( + verbose_word, **{_color_for_type["warnings"]: True} + ) + prefix = "Skipped: " + for num, fspath, lineno, reason in fskips: + if reason.startswith(prefix): + reason = reason[len(prefix) :] + if lineno is not None: + lines.append( + "%s [%d] %s:%d: %s" % (markup_word, num, fspath, lineno, reason) + ) + else: + lines.append("%s [%d] %s: %s" % (markup_word, num, fspath, reason)) + + REPORTCHAR_ACTIONS: Mapping[str, Callable[[List[str]], None]] = { + "x": show_xfailed, + "X": show_xpassed, + "f": partial(show_simple, stat="failed"), + "s": show_skipped, + "p": partial(show_simple, stat="passed"), + "E": partial(show_simple, stat="error"), + } + + lines: List[str] = [] + for char in self.reportchars: + action = REPORTCHAR_ACTIONS.get(char) + if action: # skipping e.g. "P" (passed with output) here. + action(lines) + + if lines: + self.write_sep("=", "short test summary info", cyan=True, bold=True) + for line in lines: + self.write_line(line) + + def _get_main_color(self) -> Tuple[str, List[str]]: + if self._main_color is None or self._known_types is None or self._is_last_item: + self._set_main_color() + assert self._main_color + assert self._known_types + return self._main_color, self._known_types + + def _determine_main_color(self, unknown_type_seen: bool) -> str: + stats = self.stats + if "failed" in stats or "error" in stats: + main_color = "red" + elif "warnings" in stats or "xpassed" in stats or unknown_type_seen: + main_color = "yellow" + elif "passed" in stats or not self._is_last_item: + main_color = "green" + else: + main_color = "yellow" + return main_color + + def _set_main_color(self) -> None: + unknown_types: List[str] = [] + for found_type in self.stats: + if found_type: # setup/teardown reports have an empty key, ignore them + if found_type not in KNOWN_TYPES and found_type not in unknown_types: + unknown_types.append(found_type) + self._known_types = list(KNOWN_TYPES) + unknown_types + self._main_color = self._determine_main_color(bool(unknown_types)) + + def build_summary_stats_line(self) -> Tuple[List[Tuple[str, Dict[str, bool]]], str]: + """ + Build the parts used in the last summary stats line. + + The summary stats line is the line shown at the end, "=== 12 passed, 2 errors in Xs===". + + This function builds a list of the "parts" that make up for the text in that line, in + the example above it would be: + + [ + ("12 passed", {"green": True}), + ("2 errors", {"red": True} + ] + + That last dict for each line is a "markup dictionary", used by TerminalWriter to + color output. + + The final color of the line is also determined by this function, and is the second + element of the returned tuple. + """ + if self.config.getoption("collectonly"): + return self._build_collect_only_summary_stats_line() + else: + return self._build_normal_summary_stats_line() + + def _get_reports_to_display(self, key: str) -> List[Any]: + """Get test/collection reports for the given status key, such as `passed` or `error`.""" + reports = self.stats.get(key, []) + return [x for x in reports if getattr(x, "count_towards_summary", True)] + + def _build_normal_summary_stats_line( + self, + ) -> Tuple[List[Tuple[str, Dict[str, bool]]], str]: + main_color, known_types = self._get_main_color() + parts = [] + + for key in known_types: + reports = self._get_reports_to_display(key) + if reports: + count = len(reports) + color = _color_for_type.get(key, _color_for_type_default) + markup = {color: True, "bold": color == main_color} + parts.append(("%d %s" % pluralize(count, key), markup)) + + if not parts: + parts = [("no tests ran", {_color_for_type_default: True})] + + return parts, main_color + + def _build_collect_only_summary_stats_line( + self, + ) -> Tuple[List[Tuple[str, Dict[str, bool]]], str]: + deselected = len(self._get_reports_to_display("deselected")) + errors = len(self._get_reports_to_display("error")) + + if self._numcollected == 0: + parts = [("no tests collected", {"yellow": True})] + main_color = "yellow" + + elif deselected == 0: + main_color = "green" + collected_output = "%d %s collected" % pluralize(self._numcollected, "test") + parts = [(collected_output, {main_color: True})] + else: + all_tests_were_deselected = self._numcollected == deselected + if all_tests_were_deselected: + main_color = "yellow" + collected_output = f"no tests collected ({deselected} deselected)" + else: + main_color = "green" + selected = self._numcollected - deselected + collected_output = f"{selected}/{self._numcollected} tests collected ({deselected} deselected)" + + parts = [(collected_output, {main_color: True})] + + if errors: + main_color = _color_for_type["error"] + parts += [("%d %s" % pluralize(errors, "error"), {main_color: True})] + + return parts, main_color + + +def _get_node_id_with_markup(tw: TerminalWriter, config: Config, rep: BaseReport): + nodeid = config.cwd_relative_nodeid(rep.nodeid) + path, *parts = nodeid.split("::") + if parts: + parts_markup = tw.markup("::".join(parts), bold=True) + return path + "::" + parts_markup + else: + return path + + +def _format_trimmed(format: str, msg: str, available_width: int) -> Optional[str]: + """Format msg into format, ellipsizing it if doesn't fit in available_width. + + Returns None if even the ellipsis can't fit. + """ + # Only use the first line. + i = msg.find("\n") + if i != -1: + msg = msg[:i] + + ellipsis = "..." + format_width = wcswidth(format.format("")) + if format_width + len(ellipsis) > available_width: + return None + + if format_width + wcswidth(msg) > available_width: + available_width -= len(ellipsis) + msg = msg[:available_width] + while format_width + wcswidth(msg) > available_width: + msg = msg[:-1] + msg += ellipsis + + return format.format(msg) + + +def _get_line_with_reprcrash_message( + config: Config, rep: BaseReport, tw: TerminalWriter, word_markup: Dict[str, bool] +) -> str: + """Get summary line for a report, trying to add reprcrash message.""" + verbose_word = rep._get_verbose_word(config) + word = tw.markup(verbose_word, **word_markup) + node = _get_node_id_with_markup(tw, config, rep) + + line = f"{word} {node}" + line_width = wcswidth(line) + + try: + # Type ignored intentionally -- possible AttributeError expected. + msg = rep.longrepr.reprcrash.message # type: ignore[union-attr] + except AttributeError: + pass + else: + if running_on_ci() or config.option.verbose >= 2: + msg = f" - {msg}" + else: + available_width = tw.fullwidth - line_width + msg = _format_trimmed(" - {}", msg, available_width) + if msg is not None: + line += msg + + return line + + +def _folded_skips( + startpath: Path, + skipped: Sequence[CollectReport], +) -> List[Tuple[int, str, Optional[int], str]]: + d: Dict[Tuple[str, Optional[int], str], List[CollectReport]] = {} + for event in skipped: + assert event.longrepr is not None + assert isinstance(event.longrepr, tuple), (event, event.longrepr) + assert len(event.longrepr) == 3, (event, event.longrepr) + fspath, lineno, reason = event.longrepr + # For consistency, report all fspaths in relative form. + fspath = bestrelpath(startpath, Path(fspath)) + keywords = getattr(event, "keywords", {}) + # Folding reports with global pytestmark variable. + # This is a workaround, because for now we cannot identify the scope of a skip marker + # TODO: Revisit after marks scope would be fixed. + if ( + event.when == "setup" + and "skip" in keywords + and "pytestmark" not in keywords + ): + key: Tuple[str, Optional[int], str] = (fspath, None, reason) + else: + key = (fspath, lineno, reason) + d.setdefault(key, []).append(event) + values: List[Tuple[int, str, Optional[int], str]] = [] + for key, events in d.items(): + values.append((len(events), *key)) + return values + + +_color_for_type = { + "failed": "red", + "error": "red", + "warnings": "yellow", + "passed": "green", +} +_color_for_type_default = "yellow" + + +def pluralize(count: int, noun: str) -> Tuple[int, str]: + # No need to pluralize words such as `failed` or `passed`. + if noun not in ["error", "warnings", "test"]: + return count, noun + + # The `warnings` key is plural. To avoid API breakage, we keep it that way but + # set it to singular here so we can determine plurality in the same way as we do + # for `error`. + noun = noun.replace("warnings", "warning") + + return count, noun + "s" if count != 1 else noun + + +def _plugin_nameversions(plugininfo) -> List[str]: + values: List[str] = [] + for plugin, dist in plugininfo: + # Gets us name and version! + name = f"{dist.project_name}-{dist.version}" + # Questionable convenience, but it keeps things short. + if name.startswith("pytest-"): + name = name[7:] + # We decided to print python package names they can have more than one plugin. + if name not in values: + values.append(name) + return values + + +def format_session_duration(seconds: float) -> str: + """Format the given seconds in a human readable manner to show in the final summary.""" + if seconds < 60: + return f"{seconds:.2f}s" + else: + dt = datetime.timedelta(seconds=int(seconds)) + return f"{seconds:.2f}s ({dt})" + + +def _get_raw_skip_reason(report: TestReport) -> str: + """Get the reason string of a skip/xfail/xpass test report. + + The string is just the part given by the user. + """ + if hasattr(report, "wasxfail"): + reason = report.wasxfail + if reason.startswith("reason: "): + reason = reason[len("reason: ") :] + return reason + else: + assert report.skipped + assert isinstance(report.longrepr, tuple) + _, _, reason = report.longrepr + if reason.startswith("Skipped: "): + reason = reason[len("Skipped: ") :] + elif reason == "Skipped": + reason = "" + return reason diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/threadexception.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/threadexception.py new file mode 100644 index 000000000..09faf661b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/threadexception.py @@ -0,0 +1,92 @@ +import threading +import traceback +from types import TracebackType +from typing import Any +from typing import Callable +from typing import Generator +from typing import Optional +from typing import Type +import warnings + +import pytest + + +# Copied from cpython/Lib/test/support/threading_helper.py, with modifications. +class catch_threading_exception: + """Context manager catching threading.Thread exception using + threading.excepthook. + + Storing exc_value using a custom hook can create a reference cycle. The + reference cycle is broken explicitly when the context manager exits. + + Storing thread using a custom hook can resurrect it if it is set to an + object which is being finalized. Exiting the context manager clears the + stored object. + + Usage: + with threading_helper.catch_threading_exception() as cm: + # code spawning a thread which raises an exception + ... + # check the thread exception: use cm.args + ... + # cm.args attribute no longer exists at this point + # (to break a reference cycle) + """ + + def __init__(self) -> None: + self.args: Optional["threading.ExceptHookArgs"] = None + self._old_hook: Optional[Callable[["threading.ExceptHookArgs"], Any]] = None + + def _hook(self, args: "threading.ExceptHookArgs") -> None: + self.args = args + + def __enter__(self) -> "catch_threading_exception": + self._old_hook = threading.excepthook + threading.excepthook = self._hook + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + assert self._old_hook is not None + threading.excepthook = self._old_hook + self._old_hook = None + del self.args + + +def thread_exception_runtest_hook() -> Generator[None, None, None]: + with catch_threading_exception() as cm: + try: + yield + finally: + if cm.args: + thread_name = ( + "" if cm.args.thread is None else cm.args.thread.name + ) + msg = f"Exception in thread {thread_name}\n\n" + msg += "".join( + traceback.format_exception( + cm.args.exc_type, + cm.args.exc_value, + cm.args.exc_traceback, + ) + ) + warnings.warn(pytest.PytestUnhandledThreadExceptionWarning(msg)) + + +@pytest.hookimpl(wrapper=True, trylast=True) +def pytest_runtest_setup() -> Generator[None, None, None]: + yield from thread_exception_runtest_hook() + + +@pytest.hookimpl(wrapper=True, tryfirst=True) +def pytest_runtest_call() -> Generator[None, None, None]: + yield from thread_exception_runtest_hook() + + +@pytest.hookimpl(wrapper=True, tryfirst=True) +def pytest_runtest_teardown() -> Generator[None, None, None]: + yield from thread_exception_runtest_hook() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/timing.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/timing.py new file mode 100644 index 000000000..0541dc8e0 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/timing.py @@ -0,0 +1,14 @@ +"""Indirection for time functions. + +We intentionally grab some "time" functions internally to avoid tests mocking "time" to affect +pytest runtime information (issue #185). + +Fixture "mock_timing" also interacts with this module for pytest's own tests. +""" + +from time import perf_counter +from time import sleep +from time import time + + +__all__ = ["perf_counter", "sleep", "time"] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/tmpdir.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/tmpdir.py new file mode 100644 index 000000000..72efed3e8 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/tmpdir.py @@ -0,0 +1,322 @@ +# mypy: allow-untyped-defs +"""Support for providing temporary directories to test functions.""" + +import dataclasses +import os +from pathlib import Path +import re +from shutil import rmtree +import tempfile +from typing import Any +from typing import Dict +from typing import final +from typing import Generator +from typing import Literal +from typing import Optional +from typing import Union + +from .pathlib import cleanup_dead_symlinks +from .pathlib import LOCK_TIMEOUT +from .pathlib import make_numbered_dir +from .pathlib import make_numbered_dir_with_cleanup +from .pathlib import rm_rf +from _pytest.compat import get_user_id +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.monkeypatch import MonkeyPatch +from _pytest.nodes import Item +from _pytest.reports import TestReport +from _pytest.stash import StashKey + + +tmppath_result_key = StashKey[Dict[str, bool]]() +RetentionType = Literal["all", "failed", "none"] + + +@final +@dataclasses.dataclass +class TempPathFactory: + """Factory for temporary directories under the common base temp directory. + + The base directory can be configured using the ``--basetemp`` option. + """ + + _given_basetemp: Optional[Path] + # pluggy TagTracerSub, not currently exposed, so Any. + _trace: Any + _basetemp: Optional[Path] + _retention_count: int + _retention_policy: RetentionType + + def __init__( + self, + given_basetemp: Optional[Path], + retention_count: int, + retention_policy: RetentionType, + trace, + basetemp: Optional[Path] = None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + if given_basetemp is None: + self._given_basetemp = None + else: + # Use os.path.abspath() to get absolute path instead of resolve() as it + # does not work the same in all platforms (see #4427). + # Path.absolute() exists, but it is not public (see https://bugs.python.org/issue25012). + self._given_basetemp = Path(os.path.abspath(str(given_basetemp))) + self._trace = trace + self._retention_count = retention_count + self._retention_policy = retention_policy + self._basetemp = basetemp + + @classmethod + def from_config( + cls, + config: Config, + *, + _ispytest: bool = False, + ) -> "TempPathFactory": + """Create a factory according to pytest configuration. + + :meta private: + """ + check_ispytest(_ispytest) + count = int(config.getini("tmp_path_retention_count")) + if count < 0: + raise ValueError( + f"tmp_path_retention_count must be >= 0. Current input: {count}." + ) + + policy = config.getini("tmp_path_retention_policy") + if policy not in ("all", "failed", "none"): + raise ValueError( + f"tmp_path_retention_policy must be either all, failed, none. Current input: {policy}." + ) + + return cls( + given_basetemp=config.option.basetemp, + trace=config.trace.get("tmpdir"), + retention_count=count, + retention_policy=policy, + _ispytest=True, + ) + + def _ensure_relative_to_basetemp(self, basename: str) -> str: + basename = os.path.normpath(basename) + if (self.getbasetemp() / basename).resolve().parent != self.getbasetemp(): + raise ValueError(f"{basename} is not a normalized and relative path") + return basename + + def mktemp(self, basename: str, numbered: bool = True) -> Path: + """Create a new temporary directory managed by the factory. + + :param basename: + Directory base name, must be a relative path. + + :param numbered: + If ``True``, ensure the directory is unique by adding a numbered + suffix greater than any existing one: ``basename="foo-"`` and ``numbered=True`` + means that this function will create directories named ``"foo-0"``, + ``"foo-1"``, ``"foo-2"`` and so on. + + :returns: + The path to the new directory. + """ + basename = self._ensure_relative_to_basetemp(basename) + if not numbered: + p = self.getbasetemp().joinpath(basename) + p.mkdir(mode=0o700) + else: + p = make_numbered_dir(root=self.getbasetemp(), prefix=basename, mode=0o700) + self._trace("mktemp", p) + return p + + def getbasetemp(self) -> Path: + """Return the base temporary directory, creating it if needed. + + :returns: + The base temporary directory. + """ + if self._basetemp is not None: + return self._basetemp + + if self._given_basetemp is not None: + basetemp = self._given_basetemp + if basetemp.exists(): + rm_rf(basetemp) + basetemp.mkdir(mode=0o700) + basetemp = basetemp.resolve() + else: + from_env = os.environ.get("PYTEST_DEBUG_TEMPROOT") + temproot = Path(from_env or tempfile.gettempdir()).resolve() + user = get_user() or "unknown" + # use a sub-directory in the temproot to speed-up + # make_numbered_dir() call + rootdir = temproot.joinpath(f"pytest-of-{user}") + try: + rootdir.mkdir(mode=0o700, exist_ok=True) + except OSError: + # getuser() likely returned illegal characters for the platform, use unknown back off mechanism + rootdir = temproot.joinpath("pytest-of-unknown") + rootdir.mkdir(mode=0o700, exist_ok=True) + # Because we use exist_ok=True with a predictable name, make sure + # we are the owners, to prevent any funny business (on unix, where + # temproot is usually shared). + # Also, to keep things private, fixup any world-readable temp + # rootdir's permissions. Historically 0o755 was used, so we can't + # just error out on this, at least for a while. + uid = get_user_id() + if uid is not None: + rootdir_stat = rootdir.stat() + if rootdir_stat.st_uid != uid: + raise OSError( + f"The temporary directory {rootdir} is not owned by the current user. " + "Fix this and try again." + ) + if (rootdir_stat.st_mode & 0o077) != 0: + os.chmod(rootdir, rootdir_stat.st_mode & ~0o077) + keep = self._retention_count + if self._retention_policy == "none": + keep = 0 + basetemp = make_numbered_dir_with_cleanup( + prefix="pytest-", + root=rootdir, + keep=keep, + lock_timeout=LOCK_TIMEOUT, + mode=0o700, + ) + assert basetemp is not None, basetemp + self._basetemp = basetemp + self._trace("new basetemp", basetemp) + return basetemp + + +def get_user() -> Optional[str]: + """Return the current user name, or None if getuser() does not work + in the current environment (see #1010).""" + try: + # In some exotic environments, getpass may not be importable. + import getpass + + return getpass.getuser() + except (ImportError, OSError, KeyError): + return None + + +def pytest_configure(config: Config) -> None: + """Create a TempPathFactory and attach it to the config object. + + This is to comply with existing plugins which expect the handler to be + available at pytest_configure time, but ideally should be moved entirely + to the tmp_path_factory session fixture. + """ + mp = MonkeyPatch() + config.add_cleanup(mp.undo) + _tmp_path_factory = TempPathFactory.from_config(config, _ispytest=True) + mp.setattr(config, "_tmp_path_factory", _tmp_path_factory, raising=False) + + +def pytest_addoption(parser: Parser) -> None: + parser.addini( + "tmp_path_retention_count", + help="How many sessions should we keep the `tmp_path` directories, according to `tmp_path_retention_policy`.", + default=3, + ) + + parser.addini( + "tmp_path_retention_policy", + help="Controls which directories created by the `tmp_path` fixture are kept around, based on test outcome. " + "(all/failed/none)", + default="all", + ) + + +@fixture(scope="session") +def tmp_path_factory(request: FixtureRequest) -> TempPathFactory: + """Return a :class:`pytest.TempPathFactory` instance for the test session.""" + # Set dynamically by pytest_configure() above. + return request.config._tmp_path_factory # type: ignore + + +def _mk_tmp(request: FixtureRequest, factory: TempPathFactory) -> Path: + name = request.node.name + name = re.sub(r"[\W]", "_", name) + MAXVAL = 30 + name = name[:MAXVAL] + return factory.mktemp(name, numbered=True) + + +@fixture +def tmp_path( + request: FixtureRequest, tmp_path_factory: TempPathFactory +) -> Generator[Path, None, None]: + """Return a temporary directory path object which is unique to each test + function invocation, created as a sub directory of the base temporary + directory. + + By default, a new base temporary directory is created each test session, + and old bases are removed after 3 sessions, to aid in debugging. + This behavior can be configured with :confval:`tmp_path_retention_count` and + :confval:`tmp_path_retention_policy`. + If ``--basetemp`` is used then it is cleared each session. See + :ref:`temporary directory location and retention`. + + The returned object is a :class:`pathlib.Path` object. + """ + path = _mk_tmp(request, tmp_path_factory) + yield path + + # Remove the tmpdir if the policy is "failed" and the test passed. + tmp_path_factory: TempPathFactory = request.session.config._tmp_path_factory # type: ignore + policy = tmp_path_factory._retention_policy + result_dict = request.node.stash[tmppath_result_key] + + if policy == "failed" and result_dict.get("call", True): + # We do a "best effort" to remove files, but it might not be possible due to some leaked resource, + # permissions, etc, in which case we ignore it. + rmtree(path, ignore_errors=True) + + del request.node.stash[tmppath_result_key] + + +def pytest_sessionfinish(session, exitstatus: Union[int, ExitCode]): + """After each session, remove base directory if all the tests passed, + the policy is "failed", and the basetemp is not specified by a user. + """ + tmp_path_factory: TempPathFactory = session.config._tmp_path_factory + basetemp = tmp_path_factory._basetemp + if basetemp is None: + return + + policy = tmp_path_factory._retention_policy + if ( + exitstatus == 0 + and policy == "failed" + and tmp_path_factory._given_basetemp is None + ): + if basetemp.is_dir(): + # We do a "best effort" to remove files, but it might not be possible due to some leaked resource, + # permissions, etc, in which case we ignore it. + rmtree(basetemp, ignore_errors=True) + + # Remove dead symlinks. + if basetemp.is_dir(): + cleanup_dead_symlinks(basetemp) + + +@hookimpl(wrapper=True, tryfirst=True) +def pytest_runtest_makereport( + item: Item, call +) -> Generator[None, TestReport, TestReport]: + rep = yield + assert rep.when is not None + empty: Dict[str, bool] = {} + item.stash.setdefault(tmppath_result_key, empty)[rep.when] = rep.passed + return rep diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/unittest.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/unittest.py new file mode 100644 index 000000000..8f1791bf7 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/unittest.py @@ -0,0 +1,427 @@ +# mypy: allow-untyped-defs +"""Discover and run std-library "unittest" style tests.""" + +import sys +import traceback +import types +from typing import Any +from typing import Callable +from typing import Generator +from typing import Iterable +from typing import List +from typing import Optional +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import Union + +import _pytest._code +from _pytest.compat import is_async_function +from _pytest.config import hookimpl +from _pytest.fixtures import FixtureRequest +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.outcomes import exit +from _pytest.outcomes import fail +from _pytest.outcomes import skip +from _pytest.outcomes import xfail +from _pytest.python import Class +from _pytest.python import Function +from _pytest.python import Module +from _pytest.runner import CallInfo +import pytest + + +if sys.version_info[:2] < (3, 11): + from exceptiongroup import ExceptionGroup + +if TYPE_CHECKING: + import unittest + + import twisted.trial.unittest + + _SysExcInfoType = Union[ + Tuple[Type[BaseException], BaseException, types.TracebackType], + Tuple[None, None, None], + ] + + +def pytest_pycollect_makeitem( + collector: Union[Module, Class], name: str, obj: object +) -> Optional["UnitTestCase"]: + # Has unittest been imported and is obj a subclass of its TestCase? + try: + ut = sys.modules["unittest"] + # Type ignored because `ut` is an opaque module. + if not issubclass(obj, ut.TestCase): # type: ignore + return None + except Exception: + return None + # Yes, so let's collect it. + return UnitTestCase.from_parent(collector, name=name, obj=obj) + + +class UnitTestCase(Class): + # Marker for fixturemanger.getfixtureinfo() + # to declare that our children do not support funcargs. + nofuncargs = True + + def newinstance(self): + # TestCase __init__ takes the method (test) name. The TestCase + # constructor treats the name "runTest" as a special no-op, so it can be + # used when a dummy instance is needed. While unittest.TestCase has a + # default, some subclasses omit the default (#9610), so always supply + # it. + return self.obj("runTest") + + def collect(self) -> Iterable[Union[Item, Collector]]: + from unittest import TestLoader + + cls = self.obj + if not getattr(cls, "__test__", True): + return + + skipped = _is_skipped(cls) + if not skipped: + self._register_unittest_setup_method_fixture(cls) + self._register_unittest_setup_class_fixture(cls) + self._register_setup_class_fixture() + + self.session._fixturemanager.parsefactories(self.newinstance(), self.nodeid) + + loader = TestLoader() + foundsomething = False + for name in loader.getTestCaseNames(self.obj): + x = getattr(self.obj, name) + if not getattr(x, "__test__", True): + continue + yield TestCaseFunction.from_parent(self, name=name) + foundsomething = True + + if not foundsomething: + runtest = getattr(self.obj, "runTest", None) + if runtest is not None: + ut = sys.modules.get("twisted.trial.unittest", None) + if ut is None or runtest != ut.TestCase.runTest: + yield TestCaseFunction.from_parent(self, name="runTest") + + def _register_unittest_setup_class_fixture(self, cls: type) -> None: + """Register an auto-use fixture to invoke setUpClass and + tearDownClass (#517).""" + setup = getattr(cls, "setUpClass", None) + teardown = getattr(cls, "tearDownClass", None) + if setup is None and teardown is None: + return None + cleanup = getattr(cls, "doClassCleanups", lambda: None) + + def process_teardown_exceptions() -> None: + # tearDown_exceptions is a list set in the class containing exc_infos for errors during + # teardown for the class. + exc_infos = getattr(cls, "tearDown_exceptions", None) + if not exc_infos: + return + exceptions = [exc for (_, exc, _) in exc_infos] + # If a single exception, raise it directly as this provides a more readable + # error (hopefully this will improve in #12255). + if len(exceptions) == 1: + raise exceptions[0] + else: + raise ExceptionGroup("Unittest class cleanup errors", exceptions) + + def unittest_setup_class_fixture( + request: FixtureRequest, + ) -> Generator[None, None, None]: + cls = request.cls + if _is_skipped(cls): + reason = cls.__unittest_skip_why__ + raise pytest.skip.Exception(reason, _use_item_location=True) + if setup is not None: + try: + setup() + # unittest does not call the cleanup function for every BaseException, so we + # follow this here. + except Exception: + cleanup() + process_teardown_exceptions() + raise + yield + try: + if teardown is not None: + teardown() + finally: + cleanup() + process_teardown_exceptions() + + self.session._fixturemanager._register_fixture( + # Use a unique name to speed up lookup. + name=f"_unittest_setUpClass_fixture_{cls.__qualname__}", + func=unittest_setup_class_fixture, + nodeid=self.nodeid, + scope="class", + autouse=True, + ) + + def _register_unittest_setup_method_fixture(self, cls: type) -> None: + """Register an auto-use fixture to invoke setup_method and + teardown_method (#517).""" + setup = getattr(cls, "setup_method", None) + teardown = getattr(cls, "teardown_method", None) + if setup is None and teardown is None: + return None + + def unittest_setup_method_fixture( + request: FixtureRequest, + ) -> Generator[None, None, None]: + self = request.instance + if _is_skipped(self): + reason = self.__unittest_skip_why__ + raise pytest.skip.Exception(reason, _use_item_location=True) + if setup is not None: + setup(self, request.function) + yield + if teardown is not None: + teardown(self, request.function) + + self.session._fixturemanager._register_fixture( + # Use a unique name to speed up lookup. + name=f"_unittest_setup_method_fixture_{cls.__qualname__}", + func=unittest_setup_method_fixture, + nodeid=self.nodeid, + scope="function", + autouse=True, + ) + + +class TestCaseFunction(Function): + nofuncargs = True + _excinfo: Optional[List[_pytest._code.ExceptionInfo[BaseException]]] = None + + def _getinstance(self): + assert isinstance(self.parent, UnitTestCase) + return self.parent.obj(self.name) + + # Backward compat for pytest-django; can be removed after pytest-django + # updates + some slack. + @property + def _testcase(self): + return self.instance + + def setup(self) -> None: + # A bound method to be called during teardown() if set (see 'runtest()'). + self._explicit_tearDown: Optional[Callable[[], None]] = None + super().setup() + + def teardown(self) -> None: + super().teardown() + if self._explicit_tearDown is not None: + self._explicit_tearDown() + self._explicit_tearDown = None + self._obj = None + + def startTest(self, testcase: "unittest.TestCase") -> None: + pass + + def _addexcinfo(self, rawexcinfo: "_SysExcInfoType") -> None: + # Unwrap potential exception info (see twisted trial support below). + rawexcinfo = getattr(rawexcinfo, "_rawexcinfo", rawexcinfo) + try: + excinfo = _pytest._code.ExceptionInfo[BaseException].from_exc_info( + rawexcinfo # type: ignore[arg-type] + ) + # Invoke the attributes to trigger storing the traceback + # trial causes some issue there. + _ = excinfo.value + _ = excinfo.traceback + except TypeError: + try: + try: + values = traceback.format_exception(*rawexcinfo) + values.insert( + 0, + "NOTE: Incompatible Exception Representation, " + "displaying natively:\n\n", + ) + fail("".join(values), pytrace=False) + except (fail.Exception, KeyboardInterrupt): + raise + except BaseException: + fail( + "ERROR: Unknown Incompatible Exception " + f"representation:\n{rawexcinfo!r}", + pytrace=False, + ) + except KeyboardInterrupt: + raise + except fail.Exception: + excinfo = _pytest._code.ExceptionInfo.from_current() + self.__dict__.setdefault("_excinfo", []).append(excinfo) + + def addError( + self, testcase: "unittest.TestCase", rawexcinfo: "_SysExcInfoType" + ) -> None: + try: + if isinstance(rawexcinfo[1], exit.Exception): + exit(rawexcinfo[1].msg) + except TypeError: + pass + self._addexcinfo(rawexcinfo) + + def addFailure( + self, testcase: "unittest.TestCase", rawexcinfo: "_SysExcInfoType" + ) -> None: + self._addexcinfo(rawexcinfo) + + def addSkip(self, testcase: "unittest.TestCase", reason: str) -> None: + try: + raise pytest.skip.Exception(reason, _use_item_location=True) + except skip.Exception: + self._addexcinfo(sys.exc_info()) + + def addExpectedFailure( + self, + testcase: "unittest.TestCase", + rawexcinfo: "_SysExcInfoType", + reason: str = "", + ) -> None: + try: + xfail(str(reason)) + except xfail.Exception: + self._addexcinfo(sys.exc_info()) + + def addUnexpectedSuccess( + self, + testcase: "unittest.TestCase", + reason: Optional["twisted.trial.unittest.Todo"] = None, + ) -> None: + msg = "Unexpected success" + if reason: + msg += f": {reason.reason}" + # Preserve unittest behaviour - fail the test. Explicitly not an XPASS. + try: + fail(msg, pytrace=False) + except fail.Exception: + self._addexcinfo(sys.exc_info()) + + def addSuccess(self, testcase: "unittest.TestCase") -> None: + pass + + def stopTest(self, testcase: "unittest.TestCase") -> None: + pass + + def addDuration(self, testcase: "unittest.TestCase", elapsed: float) -> None: + pass + + def runtest(self) -> None: + from _pytest.debugging import maybe_wrap_pytest_function_for_tracing + + testcase = self.instance + assert testcase is not None + + maybe_wrap_pytest_function_for_tracing(self) + + # Let the unittest framework handle async functions. + if is_async_function(self.obj): + testcase(result=self) + else: + # When --pdb is given, we want to postpone calling tearDown() otherwise + # when entering the pdb prompt, tearDown() would have probably cleaned up + # instance variables, which makes it difficult to debug. + # Arguably we could always postpone tearDown(), but this changes the moment where the + # TestCase instance interacts with the results object, so better to only do it + # when absolutely needed. + # We need to consider if the test itself is skipped, or the whole class. + assert isinstance(self.parent, UnitTestCase) + skipped = _is_skipped(self.obj) or _is_skipped(self.parent.obj) + if self.config.getoption("usepdb") and not skipped: + self._explicit_tearDown = testcase.tearDown + setattr(testcase, "tearDown", lambda *args: None) + + # We need to update the actual bound method with self.obj, because + # wrap_pytest_function_for_tracing replaces self.obj by a wrapper. + setattr(testcase, self.name, self.obj) + try: + testcase(result=self) + finally: + delattr(testcase, self.name) + + def _traceback_filter( + self, excinfo: _pytest._code.ExceptionInfo[BaseException] + ) -> _pytest._code.Traceback: + traceback = super()._traceback_filter(excinfo) + ntraceback = traceback.filter( + lambda x: not x.frame.f_globals.get("__unittest"), + ) + if not ntraceback: + ntraceback = traceback + return ntraceback + + +@hookimpl(tryfirst=True) +def pytest_runtest_makereport(item: Item, call: CallInfo[None]) -> None: + if isinstance(item, TestCaseFunction): + if item._excinfo: + call.excinfo = item._excinfo.pop(0) + try: + del call.result + except AttributeError: + pass + + # Convert unittest.SkipTest to pytest.skip. + # This is actually only needed for nose, which reuses unittest.SkipTest for + # its own nose.SkipTest. For unittest TestCases, SkipTest is already + # handled internally, and doesn't reach here. + unittest = sys.modules.get("unittest") + if unittest and call.excinfo and isinstance(call.excinfo.value, unittest.SkipTest): + excinfo = call.excinfo + call2 = CallInfo[None].from_call( + lambda: pytest.skip(str(excinfo.value)), call.when + ) + call.excinfo = call2.excinfo + + +# Twisted trial support. +classImplements_has_run = False + + +@hookimpl(wrapper=True) +def pytest_runtest_protocol(item: Item) -> Generator[None, object, object]: + if isinstance(item, TestCaseFunction) and "twisted.trial.unittest" in sys.modules: + ut: Any = sys.modules["twisted.python.failure"] + global classImplements_has_run + Failure__init__ = ut.Failure.__init__ + if not classImplements_has_run: + from twisted.trial.itrial import IReporter + from zope.interface import classImplements + + classImplements(TestCaseFunction, IReporter) + classImplements_has_run = True + + def excstore( + self, exc_value=None, exc_type=None, exc_tb=None, captureVars=None + ): + if exc_value is None: + self._rawexcinfo = sys.exc_info() + else: + if exc_type is None: + exc_type = type(exc_value) + self._rawexcinfo = (exc_type, exc_value, exc_tb) + try: + Failure__init__( + self, exc_value, exc_type, exc_tb, captureVars=captureVars + ) + except TypeError: + Failure__init__(self, exc_value, exc_type, exc_tb) + + ut.Failure.__init__ = excstore + try: + res = yield + finally: + ut.Failure.__init__ = Failure__init__ + else: + res = yield + return res + + +def _is_skipped(obj) -> bool: + """Return True if the given object has been marked with @unittest.skip.""" + return bool(getattr(obj, "__unittest_skip__", False)) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/unraisableexception.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/unraisableexception.py new file mode 100644 index 000000000..f649267ab --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/unraisableexception.py @@ -0,0 +1,95 @@ +import sys +import traceback +from types import TracebackType +from typing import Any +from typing import Callable +from typing import Generator +from typing import Optional +from typing import Type +import warnings + +import pytest + + +# Copied from cpython/Lib/test/support/__init__.py, with modifications. +class catch_unraisable_exception: + """Context manager catching unraisable exception using sys.unraisablehook. + + Storing the exception value (cm.unraisable.exc_value) creates a reference + cycle. The reference cycle is broken explicitly when the context manager + exits. + + Storing the object (cm.unraisable.object) can resurrect it if it is set to + an object which is being finalized. Exiting the context manager clears the + stored object. + + Usage: + with catch_unraisable_exception() as cm: + # code creating an "unraisable exception" + ... + # check the unraisable exception: use cm.unraisable + ... + # cm.unraisable attribute no longer exists at this point + # (to break a reference cycle) + """ + + def __init__(self) -> None: + self.unraisable: Optional["sys.UnraisableHookArgs"] = None + self._old_hook: Optional[Callable[["sys.UnraisableHookArgs"], Any]] = None + + def _hook(self, unraisable: "sys.UnraisableHookArgs") -> None: + # Storing unraisable.object can resurrect an object which is being + # finalized. Storing unraisable.exc_value creates a reference cycle. + self.unraisable = unraisable + + def __enter__(self) -> "catch_unraisable_exception": + self._old_hook = sys.unraisablehook + sys.unraisablehook = self._hook + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + assert self._old_hook is not None + sys.unraisablehook = self._old_hook + self._old_hook = None + del self.unraisable + + +def unraisable_exception_runtest_hook() -> Generator[None, None, None]: + with catch_unraisable_exception() as cm: + try: + yield + finally: + if cm.unraisable: + if cm.unraisable.err_msg is not None: + err_msg = cm.unraisable.err_msg + else: + err_msg = "Exception ignored in" + msg = f"{err_msg}: {cm.unraisable.object!r}\n\n" + msg += "".join( + traceback.format_exception( + cm.unraisable.exc_type, + cm.unraisable.exc_value, + cm.unraisable.exc_traceback, + ) + ) + warnings.warn(pytest.PytestUnraisableExceptionWarning(msg)) + + +@pytest.hookimpl(wrapper=True, tryfirst=True) +def pytest_runtest_setup() -> Generator[None, None, None]: + yield from unraisable_exception_runtest_hook() + + +@pytest.hookimpl(wrapper=True, tryfirst=True) +def pytest_runtest_call() -> Generator[None, None, None]: + yield from unraisable_exception_runtest_hook() + + +@pytest.hookimpl(wrapper=True, tryfirst=True) +def pytest_runtest_teardown() -> Generator[None, None, None]: + yield from unraisable_exception_runtest_hook() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/warning_types.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/warning_types.py new file mode 100644 index 000000000..a5884f295 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/warning_types.py @@ -0,0 +1,165 @@ +import dataclasses +import inspect +from types import FunctionType +from typing import Any +from typing import final +from typing import Generic +from typing import Type +from typing import TypeVar +import warnings + + +class PytestWarning(UserWarning): + """Base class for all warnings emitted by pytest.""" + + __module__ = "pytest" + + +@final +class PytestAssertRewriteWarning(PytestWarning): + """Warning emitted by the pytest assert rewrite module.""" + + __module__ = "pytest" + + +@final +class PytestCacheWarning(PytestWarning): + """Warning emitted by the cache plugin in various situations.""" + + __module__ = "pytest" + + +@final +class PytestConfigWarning(PytestWarning): + """Warning emitted for configuration issues.""" + + __module__ = "pytest" + + +@final +class PytestCollectionWarning(PytestWarning): + """Warning emitted when pytest is not able to collect a file or symbol in a module.""" + + __module__ = "pytest" + + +class PytestDeprecationWarning(PytestWarning, DeprecationWarning): + """Warning class for features that will be removed in a future version.""" + + __module__ = "pytest" + + +class PytestRemovedIn9Warning(PytestDeprecationWarning): + """Warning class for features that will be removed in pytest 9.""" + + __module__ = "pytest" + + +class PytestReturnNotNoneWarning(PytestWarning): + """Warning emitted when a test function is returning value other than None.""" + + __module__ = "pytest" + + +@final +class PytestExperimentalApiWarning(PytestWarning, FutureWarning): + """Warning category used to denote experiments in pytest. + + Use sparingly as the API might change or even be removed completely in a + future version. + """ + + __module__ = "pytest" + + @classmethod + def simple(cls, apiname: str) -> "PytestExperimentalApiWarning": + return cls(f"{apiname} is an experimental api that may change over time") + + +@final +class PytestUnhandledCoroutineWarning(PytestReturnNotNoneWarning): + """Warning emitted for an unhandled coroutine. + + A coroutine was encountered when collecting test functions, but was not + handled by any async-aware plugin. + Coroutine test functions are not natively supported. + """ + + __module__ = "pytest" + + +@final +class PytestUnknownMarkWarning(PytestWarning): + """Warning emitted on use of unknown markers. + + See :ref:`mark` for details. + """ + + __module__ = "pytest" + + +@final +class PytestUnraisableExceptionWarning(PytestWarning): + """An unraisable exception was reported. + + Unraisable exceptions are exceptions raised in :meth:`__del__ ` + implementations and similar situations when the exception cannot be raised + as normal. + """ + + __module__ = "pytest" + + +@final +class PytestUnhandledThreadExceptionWarning(PytestWarning): + """An unhandled exception occurred in a :class:`~threading.Thread`. + + Such exceptions don't propagate normally. + """ + + __module__ = "pytest" + + +_W = TypeVar("_W", bound=PytestWarning) + + +@final +@dataclasses.dataclass +class UnformattedWarning(Generic[_W]): + """A warning meant to be formatted during runtime. + + This is used to hold warnings that need to format their message at runtime, + as opposed to a direct message. + """ + + category: Type["_W"] + template: str + + def format(self, **kwargs: Any) -> _W: + """Return an instance of the warning category, formatted with given kwargs.""" + return self.category(self.template.format(**kwargs)) + + +def warn_explicit_for(method: FunctionType, message: PytestWarning) -> None: + """ + Issue the warning :param:`message` for the definition of the given :param:`method` + + this helps to log warnings for functions defined prior to finding an issue with them + (like hook wrappers being marked in a legacy mechanism) + """ + lineno = method.__code__.co_firstlineno + filename = inspect.getfile(method) + module = method.__module__ + mod_globals = method.__globals__ + try: + warnings.warn_explicit( + message, + type(message), + filename=filename, + module=module, + registry=mod_globals.setdefault("__warningregistry__", {}), + lineno=lineno, + ) + except Warning as w: + # If warnings are errors (e.g. -Werror), location information gets lost, so we add it to the message. + raise type(w)(f"{w}\n at {filename}:{lineno}") from None diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/warnings.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/warnings.py new file mode 100644 index 000000000..22590892f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_pytest/warnings.py @@ -0,0 +1,150 @@ +# mypy: allow-untyped-defs +from contextlib import contextmanager +import sys +from typing import Generator +from typing import Literal +from typing import Optional +import warnings + +from _pytest.config import apply_warning_filters +from _pytest.config import Config +from _pytest.config import parse_warning_filter +from _pytest.main import Session +from _pytest.nodes import Item +from _pytest.terminal import TerminalReporter +import pytest + + +def pytest_configure(config: Config) -> None: + config.addinivalue_line( + "markers", + "filterwarnings(warning): add a warning filter to the given test. " + "see https://docs.pytest.org/en/stable/how-to/capture-warnings.html#pytest-mark-filterwarnings ", + ) + + +@contextmanager +def catch_warnings_for_item( + config: Config, + ihook, + when: Literal["config", "collect", "runtest"], + item: Optional[Item], +) -> Generator[None, None, None]: + """Context manager that catches warnings generated in the contained execution block. + + ``item`` can be None if we are not in the context of an item execution. + + Each warning captured triggers the ``pytest_warning_recorded`` hook. + """ + config_filters = config.getini("filterwarnings") + cmdline_filters = config.known_args_namespace.pythonwarnings or [] + with warnings.catch_warnings(record=True) as log: + # mypy can't infer that record=True means log is not None; help it. + assert log is not None + + if not sys.warnoptions: + # If user is not explicitly configuring warning filters, show deprecation warnings by default (#2908). + warnings.filterwarnings("always", category=DeprecationWarning) + warnings.filterwarnings("always", category=PendingDeprecationWarning) + + # To be enabled in pytest 9.0.0. + # warnings.filterwarnings("error", category=pytest.PytestRemovedIn9Warning) + + apply_warning_filters(config_filters, cmdline_filters) + + # apply filters from "filterwarnings" marks + nodeid = "" if item is None else item.nodeid + if item is not None: + for mark in item.iter_markers(name="filterwarnings"): + for arg in mark.args: + warnings.filterwarnings(*parse_warning_filter(arg, escape=False)) + + try: + yield + finally: + for warning_message in log: + ihook.pytest_warning_recorded.call_historic( + kwargs=dict( + warning_message=warning_message, + nodeid=nodeid, + when=when, + location=None, + ) + ) + + +def warning_record_to_str(warning_message: warnings.WarningMessage) -> str: + """Convert a warnings.WarningMessage to a string.""" + warn_msg = warning_message.message + msg = warnings.formatwarning( + str(warn_msg), + warning_message.category, + warning_message.filename, + warning_message.lineno, + warning_message.line, + ) + if warning_message.source is not None: + try: + import tracemalloc + except ImportError: + pass + else: + tb = tracemalloc.get_object_traceback(warning_message.source) + if tb is not None: + formatted_tb = "\n".join(tb.format()) + # Use a leading new line to better separate the (large) output + # from the traceback to the previous warning text. + msg += f"\nObject allocated at:\n{formatted_tb}" + else: + # No need for a leading new line. + url = "https://docs.pytest.org/en/stable/how-to/capture-warnings.html#resource-warnings" + msg += "Enable tracemalloc to get traceback where the object was allocated.\n" + msg += f"See {url} for more info." + return msg + + +@pytest.hookimpl(wrapper=True, tryfirst=True) +def pytest_runtest_protocol(item: Item) -> Generator[None, object, object]: + with catch_warnings_for_item( + config=item.config, ihook=item.ihook, when="runtest", item=item + ): + return (yield) + + +@pytest.hookimpl(wrapper=True, tryfirst=True) +def pytest_collection(session: Session) -> Generator[None, object, object]: + config = session.config + with catch_warnings_for_item( + config=config, ihook=config.hook, when="collect", item=None + ): + return (yield) + + +@pytest.hookimpl(wrapper=True) +def pytest_terminal_summary( + terminalreporter: TerminalReporter, +) -> Generator[None, None, None]: + config = terminalreporter.config + with catch_warnings_for_item( + config=config, ihook=config.hook, when="config", item=None + ): + return (yield) + + +@pytest.hookimpl(wrapper=True) +def pytest_sessionfinish(session: Session) -> Generator[None, None, None]: + config = session.config + with catch_warnings_for_item( + config=config, ihook=config.hook, when="config", item=None + ): + return (yield) + + +@pytest.hookimpl(wrapper=True) +def pytest_load_initial_conftests( + early_config: "Config", +) -> Generator[None, None, None]: + with catch_warnings_for_item( + config=early_config, ihook=early_config.hook, when="config", item=None + ): + return (yield) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/_yaml/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/_yaml/__init__.py new file mode 100644 index 000000000..7baa8c4b6 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/_yaml/__init__.py @@ -0,0 +1,33 @@ +# This is a stub package designed to roughly emulate the _yaml +# extension module, which previously existed as a standalone module +# and has been moved into the `yaml` package namespace. +# It does not perfectly mimic its old counterpart, but should get +# close enough for anyone who's relying on it even when they shouldn't. +import yaml + +# in some circumstances, the yaml module we imoprted may be from a different version, so we need +# to tread carefully when poking at it here (it may not have the attributes we expect) +if not getattr(yaml, '__with_libyaml__', False): + from sys import version_info + + exc = ModuleNotFoundError if version_info >= (3, 6) else ImportError + raise exc("No module named '_yaml'") +else: + from yaml._yaml import * + import warnings + warnings.warn( + 'The _yaml extension module is now located at yaml._yaml' + ' and its location is subject to change. To use the' + ' LibYAML-based parser and emitter, import from `yaml`:' + ' `from yaml import CLoader as Loader, CDumper as Dumper`.', + DeprecationWarning + ) + del warnings + # Don't `del yaml` here because yaml is actually an existing + # namespace member of _yaml. + +__name__ = '_yaml' +# If the module is top-level (i.e. not a part of any specific package) +# then the attribute should be set to ''. +# https://docs.python.org/3.8/library/types.html +__package__ = '' diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs-2.4.0.dist-info/INSTALLER b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs-2.4.0.dist-info/INSTALLER new file mode 100644 index 000000000..ce3133dc1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs-2.4.0.dist-info/INSTALLER @@ -0,0 +1 @@ +Poetry 2.2.1 \ No newline at end of file diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs-2.4.0.dist-info/LICENSE b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs-2.4.0.dist-info/LICENSE new file mode 100644 index 000000000..f26bcf4d2 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs-2.4.0.dist-info/LICENSE @@ -0,0 +1,279 @@ +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see https://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations, which became +Zope Corporation. In 2001, the Python Software Foundation (PSF, see +https://www.python.org/psf/) was formed, a non-profit organization +created specifically to own Python-related Intellectual Property. +Zope Corporation was a sponsoring member of the PSF. + +All Python releases are Open Source (see https://opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2 and above 2.1.1 2001-now PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +Python software and documentation are licensed under the +Python Software Foundation License Version 2. + +Starting with Python 3.8.6, examples, recipes, and other code in +the documentation are dual licensed under the PSF License Version 2 +and the Zero-Clause BSD license. + +Some software incorporated into Python is under different licenses. +The licenses are listed with code falling under that license. + + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation; +All Rights Reserved" are retained in Python alone or in any derivative version +prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION +---------------------------------------------------------------------- + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs-2.4.0.dist-info/METADATA b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs-2.4.0.dist-info/METADATA new file mode 100644 index 000000000..f4ba195f9 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs-2.4.0.dist-info/METADATA @@ -0,0 +1,122 @@ +Metadata-Version: 2.1 +Name: aiohappyeyeballs +Version: 2.4.0 +Summary: Happy Eyeballs for asyncio +Home-page: https://github.com/aio-libs/aiohappyeyeballs +License: Python-2.0.1 +Author: J. Nick Koston +Author-email: nick@koston.org +Requires-Python: >=3.8 +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Python Software Foundation License +Classifier: License :: Other/Proprietary License +Classifier: Natural Language :: English +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Topic :: Software Development :: Libraries +Project-URL: Bug Tracker, https://github.com/aio-libs/aiohappyeyeballs/issues +Project-URL: Changelog, https://github.com/aio-libs/aiohappyeyeballs/blob/main/CHANGELOG.md +Project-URL: Documentation, https://aiohappyeyeballs.readthedocs.io +Project-URL: Repository, https://github.com/aio-libs/aiohappyeyeballs +Description-Content-Type: text/markdown + +# aiohappyeyeballs + +

+ + CI Status + + + Documentation Status + + + Test coverage percentage + +

+

+ + Poetry + + + black + + + pre-commit + +

+

+ + PyPI Version + + Supported Python versions + License +

+ +--- + +**Documentation**: https://aiohappyeyeballs.readthedocs.io + +**Source Code**: https://github.com/aio-libs/aiohappyeyeballs + +--- + +[Happy Eyeballs](https://en.wikipedia.org/wiki/Happy_Eyeballs) +([RFC 8305](https://www.rfc-editor.org/rfc/rfc8305.html)) + +## Use case + +This library exists to allow connecting with +[Happy Eyeballs](https://en.wikipedia.org/wiki/Happy_Eyeballs) +([RFC 8305](https://www.rfc-editor.org/rfc/rfc8305.html)) +when you +already have a list of addrinfo and not a DNS name. + +The stdlib version of `loop.create_connection()` +will only work when you pass in an unresolved name which +is not a good fit when using DNS caching or resolving +names via another method such as `zeroconf`. + +## Installation + +Install this via pip (or your favourite package manager): + +`pip install aiohappyeyeballs` + +## Example usage + +```python + +addr_infos = await loop.getaddrinfo("example.org", 80) + +socket = await start_connection(addr_infos) +socket = await start_connection(addr_infos, local_addr_infos=local_addr_infos, happy_eyeballs_delay=0.2) + +transport, protocol = await loop.create_connection( + MyProtocol, sock=socket, ...) + +# Remove the first address for each family from addr_info +pop_addr_infos_interleave(addr_info, 1) + +# Remove all matching address from addr_info +remove_addr_infos(addr_info, "dead::beef::") + +# Convert a local_addr to local_addr_infos +local_addr_infos = addr_to_addr_infos(("127.0.0.1",0)) +``` + +## Credits + +This package contains code from cpython and is licensed under the same terms as cpython itself. + +This package was created with +[Copier](https://copier.readthedocs.io/) and the +[browniebroke/pypackage-template](https://github.com/browniebroke/pypackage-template) +project template. + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs-2.4.0.dist-info/RECORD b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs-2.4.0.dist-info/RECORD new file mode 100644 index 000000000..964f5c21b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs-2.4.0.dist-info/RECORD @@ -0,0 +1,10 @@ +aiohappyeyeballs/__init__.py,sha256=OF-GJ4MGsl3QqO_2efe6UL11FAxRmzTDyDiwE6hgGc0,317 +aiohappyeyeballs/impl.py,sha256=dcYq05g3lGkkoxxd1KFnyXj4FKUaLfeplnJ93dLnoQQ,7289 +aiohappyeyeballs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +aiohappyeyeballs/types.py,sha256=iYPiBTl5J7YEjnIqEOVUTRPzz2DwqSHBRhvbAlM0zv0,234 +aiohappyeyeballs/utils.py,sha256=W_Oaf1iP8wYRHo6B95eYx-ZxbjpxyWwYgTdkhWqGF5c,3026 +aiohappyeyeballs-2.4.0.dist-info/LICENSE,sha256=Oy-B_iHRgcSZxZolbI4ZaEVdZonSaaqFNzv7avQdo78,13936 +aiohappyeyeballs-2.4.0.dist-info/METADATA,sha256=kBvy0zd3Thv-cT_rqGcgRCxvVqzXGRh8ghTC3zZfiLY,5901 +aiohappyeyeballs-2.4.0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88 +aiohappyeyeballs-2.4.0.dist-info/INSTALLER,sha256=sz0Tnp99msIbbLB51q7U9nA6AvRKcsOeUKhrHH0Ulg4,12 +aiohappyeyeballs-2.4.0.dist-info/RECORD,, diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs-2.4.0.dist-info/WHEEL b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs-2.4.0.dist-info/WHEEL new file mode 100644 index 000000000..d73ccaae8 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs-2.4.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: poetry-core 1.9.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs/__init__.py new file mode 100644 index 000000000..9c98be9b1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs/__init__.py @@ -0,0 +1,13 @@ +__version__ = "2.4.0" + +from .impl import start_connection +from .types import AddrInfoType +from .utils import addr_to_addr_infos, pop_addr_infos_interleave, remove_addr_infos + +__all__ = ( + "start_connection", + "AddrInfoType", + "remove_addr_infos", + "pop_addr_infos_interleave", + "addr_to_addr_infos", +) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs/impl.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs/impl.py new file mode 100644 index 000000000..64aff8cf9 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs/impl.py @@ -0,0 +1,205 @@ +"""Base implementation.""" + +import asyncio +import collections +import functools +import itertools +import socket +import sys +from asyncio import staggered +from typing import List, Optional, Sequence + +from .types import AddrInfoType + +if sys.version_info < (3, 8, 2): # noqa: UP036 + # asyncio.staggered is broken in Python 3.8.0 and 3.8.1 + # so it must be patched: + # https://github.com/aio-libs/aiohttp/issues/8556 + # https://bugs.python.org/issue39129 + # https://github.com/python/cpython/pull/17693 + import asyncio.futures + + asyncio.futures.TimeoutError = asyncio.TimeoutError # type: ignore[attr-defined] + + +async def start_connection( + addr_infos: Sequence[AddrInfoType], + *, + local_addr_infos: Optional[Sequence[AddrInfoType]] = None, + happy_eyeballs_delay: Optional[float] = None, + interleave: Optional[int] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, +) -> socket.socket: + """ + Connect to a TCP server. + + Create a socket connection to a specified destination. The + destination is specified as a list of AddrInfoType tuples as + returned from getaddrinfo(). + + The arguments are, in order: + + * ``family``: the address family, e.g. ``socket.AF_INET`` or + ``socket.AF_INET6``. + * ``type``: the socket type, e.g. ``socket.SOCK_STREAM`` or + ``socket.SOCK_DGRAM``. + * ``proto``: the protocol, e.g. ``socket.IPPROTO_TCP`` or + ``socket.IPPROTO_UDP``. + * ``canonname``: the canonical name of the address, e.g. + ``"www.python.org"``. + * ``sockaddr``: the socket address + + This method is a coroutine which will try to establish the connection + in the background. When successful, the coroutine returns a + socket. + + The expected use case is to use this method in conjunction with + loop.create_connection() to establish a connection to a server:: + + socket = await start_connection(addr_infos) + transport, protocol = await loop.create_connection( + MyProtocol, sock=socket, ...) + """ + if not (current_loop := loop): + current_loop = asyncio.get_running_loop() + + single_addr_info = len(addr_infos) == 1 + + if happy_eyeballs_delay is not None and interleave is None: + # If using happy eyeballs, default to interleave addresses by family + interleave = 1 + + if interleave and not single_addr_info: + addr_infos = _interleave_addrinfos(addr_infos, interleave) + + sock: Optional[socket.socket] = None + exceptions: List[List[OSError]] = [] + if happy_eyeballs_delay is None or single_addr_info: + # not using happy eyeballs + for addrinfo in addr_infos: + try: + sock = await _connect_sock( + current_loop, exceptions, addrinfo, local_addr_infos + ) + break + except OSError: + continue + else: # using happy eyeballs + sock, _, _ = await staggered.staggered_race( + ( + functools.partial( + _connect_sock, current_loop, exceptions, addrinfo, local_addr_infos + ) + for addrinfo in addr_infos + ), + happy_eyeballs_delay, + loop=current_loop, + ) + + if sock is None: + all_exceptions = [exc for sub in exceptions for exc in sub] + try: + first_exception = all_exceptions[0] + if len(all_exceptions) == 1: + raise first_exception + else: + # If they all have the same str(), raise one. + model = str(first_exception) + if all(str(exc) == model for exc in all_exceptions): + raise first_exception + # Raise a combined exception so the user can see all + # the various error messages. + msg = "Multiple exceptions: {}".format( + ", ".join(str(exc) for exc in all_exceptions) + ) + # If the errno is the same for all exceptions, raise + # an OSError with that errno. + first_errno = first_exception.errno + if all( + isinstance(exc, OSError) and exc.errno == first_errno + for exc in all_exceptions + ): + raise OSError(first_errno, msg) + raise OSError(msg) + finally: + all_exceptions = None # type: ignore[assignment] + exceptions = None # type: ignore[assignment] + + return sock + + +async def _connect_sock( + loop: asyncio.AbstractEventLoop, + exceptions: List[List[OSError]], + addr_info: AddrInfoType, + local_addr_infos: Optional[Sequence[AddrInfoType]] = None, +) -> socket.socket: + """Create, bind and connect one socket.""" + my_exceptions: list[OSError] = [] + exceptions.append(my_exceptions) + family, type_, proto, _, address = addr_info + sock = None + try: + sock = socket.socket(family=family, type=type_, proto=proto) + sock.setblocking(False) + if local_addr_infos is not None: + for lfamily, _, _, _, laddr in local_addr_infos: + # skip local addresses of different family + if lfamily != family: + continue + try: + sock.bind(laddr) + break + except OSError as exc: + msg = ( + f"error while attempting to bind on " + f"address {laddr!r}: " + f"{exc.strerror.lower()}" + ) + exc = OSError(exc.errno, msg) + my_exceptions.append(exc) + else: # all bind attempts failed + if my_exceptions: + raise my_exceptions.pop() + else: + raise OSError(f"no matching local address with {family=} found") + await loop.sock_connect(sock, address) + return sock + except OSError as exc: + my_exceptions.append(exc) + if sock is not None: + sock.close() + raise + except: + if sock is not None: + sock.close() + raise + finally: + exceptions = my_exceptions = None # type: ignore[assignment] + + +def _interleave_addrinfos( + addrinfos: Sequence[AddrInfoType], first_address_family_count: int = 1 +) -> List[AddrInfoType]: + """Interleave list of addrinfo tuples by family.""" + # Group addresses by family + addrinfos_by_family: collections.OrderedDict[int, List[AddrInfoType]] = ( + collections.OrderedDict() + ) + for addr in addrinfos: + family = addr[0] + if family not in addrinfos_by_family: + addrinfos_by_family[family] = [] + addrinfos_by_family[family].append(addr) + addrinfos_lists = list(addrinfos_by_family.values()) + + reordered: List[AddrInfoType] = [] + if first_address_family_count > 1: + reordered.extend(addrinfos_lists[0][: first_address_family_count - 1]) + del addrinfos_lists[0][: first_address_family_count - 1] + reordered.extend( + a + for a in itertools.chain.from_iterable(itertools.zip_longest(*addrinfos_lists)) + if a is not None + ) + return reordered diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs/py.typed b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs/types.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs/types.py new file mode 100644 index 000000000..01d79a28e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs/types.py @@ -0,0 +1,12 @@ +"""Types for aiohappyeyeballs.""" + +import socket +from typing import Tuple, Union + +AddrInfoType = Tuple[ + Union[int, socket.AddressFamily], + Union[int, socket.SocketKind], + int, + str, + Tuple, # type: ignore[type-arg] +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs/utils.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs/utils.py new file mode 100644 index 000000000..b5745ae7f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohappyeyeballs/utils.py @@ -0,0 +1,97 @@ +"""Utility functions for aiohappyeyeballs.""" + +import ipaddress +import socket +from typing import Dict, List, Optional, Tuple, Union + +from .types import AddrInfoType + + +def addr_to_addr_infos( + addr: Optional[ + Union[Tuple[str, int, int, int], Tuple[str, int, int], Tuple[str, int]] + ] +) -> Optional[List[AddrInfoType]]: + """Convert an address tuple to a list of addr_info tuples.""" + if addr is None: + return None + host = addr[0] + port = addr[1] + is_ipv6 = ":" in host + if is_ipv6: + flowinfo = 0 + scopeid = 0 + addr_len = len(addr) + if addr_len >= 4: + scopeid = addr[3] # type: ignore[misc] + if addr_len >= 3: + flowinfo = addr[2] # type: ignore[misc] + addr = (host, port, flowinfo, scopeid) + family = socket.AF_INET6 + else: + addr = (host, port) + family = socket.AF_INET + return [(family, socket.SOCK_STREAM, socket.IPPROTO_TCP, "", addr)] + + +def pop_addr_infos_interleave( + addr_infos: List[AddrInfoType], interleave: Optional[int] = None +) -> None: + """ + Pop addr_info from the list of addr_infos by family up to interleave times. + + The interleave parameter is used to know how many addr_infos for + each family should be popped of the top of the list. + """ + seen: Dict[int, int] = {} + if interleave is None: + interleave = 1 + to_remove: List[AddrInfoType] = [] + for addr_info in addr_infos: + family = addr_info[0] + if family not in seen: + seen[family] = 0 + if seen[family] < interleave: + to_remove.append(addr_info) + seen[family] += 1 + for addr_info in to_remove: + addr_infos.remove(addr_info) + + +def _addr_tuple_to_ip_address( + addr: Union[Tuple[str, int], Tuple[str, int, int, int]] +) -> Union[ + Tuple[ipaddress.IPv4Address, int], Tuple[ipaddress.IPv6Address, int, int, int] +]: + """Convert an address tuple to an IPv4Address.""" + return (ipaddress.ip_address(addr[0]), *addr[1:]) + + +def remove_addr_infos( + addr_infos: List[AddrInfoType], + addr: Union[Tuple[str, int], Tuple[str, int, int, int]], +) -> None: + """ + Remove an address from the list of addr_infos. + + The addr value is typically the return value of + sock.getpeername(). + """ + bad_addrs_infos: List[AddrInfoType] = [] + for addr_info in addr_infos: + if addr_info[-1] == addr: + bad_addrs_infos.append(addr_info) + if bad_addrs_infos: + for bad_addr_info in bad_addrs_infos: + addr_infos.remove(bad_addr_info) + return + # Slow path in case addr is formatted differently + match_addr = _addr_tuple_to_ip_address(addr) + for addr_info in addr_infos: + if match_addr == _addr_tuple_to_ip_address(addr_info[-1]): + bad_addrs_infos.append(addr_info) + if bad_addrs_infos: + for bad_addr_info in bad_addrs_infos: + addr_infos.remove(bad_addr_info) + return + raise ValueError(f"Address {addr} not found in addr_infos") diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp-3.10.2.dist-info/INSTALLER b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp-3.10.2.dist-info/INSTALLER new file mode 100644 index 000000000..ce3133dc1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp-3.10.2.dist-info/INSTALLER @@ -0,0 +1 @@ +Poetry 2.2.1 \ No newline at end of file diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp-3.10.2.dist-info/LICENSE.txt b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp-3.10.2.dist-info/LICENSE.txt new file mode 100644 index 000000000..e497a322f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp-3.10.2.dist-info/LICENSE.txt @@ -0,0 +1,13 @@ + Copyright aio-libs contributors. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp-3.10.2.dist-info/METADATA b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp-3.10.2.dist-info/METADATA new file mode 100644 index 000000000..29d03690f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp-3.10.2.dist-info/METADATA @@ -0,0 +1,246 @@ +Metadata-Version: 2.1 +Name: aiohttp +Version: 3.10.2 +Summary: Async http client/server framework (asyncio) +Home-page: https://github.com/aio-libs/aiohttp +Maintainer: aiohttp team +Maintainer-email: team@aiohttp.org +License: Apache 2 +Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org +Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org +Project-URL: CI: GitHub Actions, https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI +Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp +Project-URL: Docs: Changelog, https://docs.aiohttp.org/en/stable/changes.html +Project-URL: Docs: RTD, https://docs.aiohttp.org +Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues +Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp +Classifier: Development Status :: 5 - Production/Stable +Classifier: Framework :: AsyncIO +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: POSIX +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Internet :: WWW/HTTP +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +License-File: LICENSE.txt +Requires-Dist: aiohappyeyeballs >=2.3.0 +Requires-Dist: aiosignal >=1.1.2 +Requires-Dist: attrs >=17.3.0 +Requires-Dist: frozenlist >=1.1.1 +Requires-Dist: multidict <7.0,>=4.5 +Requires-Dist: yarl <2.0,>=1.0 +Requires-Dist: async-timeout <5.0,>=4.0 ; python_version < "3.11" +Provides-Extra: speedups +Requires-Dist: brotlicffi ; (platform_python_implementation != "CPython") and extra == 'speedups' +Requires-Dist: Brotli ; (platform_python_implementation == "CPython") and extra == 'speedups' +Requires-Dist: aiodns >=3.2.0 ; (sys_platform == "linux" or sys_platform == "darwin") and extra == 'speedups' + +================================== +Async http client/server framework +================================== + +.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/aiohttp-plain.svg + :height: 64px + :width: 64px + :alt: aiohttp logo + +| + +.. image:: https://github.com/aio-libs/aiohttp/workflows/CI/badge.svg + :target: https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI + :alt: GitHub Actions status for master branch + +.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg + :target: https://codecov.io/gh/aio-libs/aiohttp + :alt: codecov.io status for master branch + +.. image:: https://badge.fury.io/py/aiohttp.svg + :target: https://pypi.org/project/aiohttp + :alt: Latest PyPI package version + +.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest + :target: https://docs.aiohttp.org/ + :alt: Latest Read The Docs + +.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat + :target: https://matrix.to/#/%23aio-libs:matrix.org + :alt: Matrix Room — #aio-libs:matrix.org + +.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat + :target: https://matrix.to/#/%23aio-libs-space:matrix.org + :alt: Matrix Space — #aio-libs-space:matrix.org + + +Key Features +============ + +- Supports both client and server side of HTTP protocol. +- Supports both client and server Web-Sockets out-of-the-box and avoids + Callback Hell. +- Provides Web-server with middleware and pluggable routing. + + +Getting started +=============== + +Client +------ + +To get something from the web: + +.. code-block:: python + + import aiohttp + import asyncio + + async def main(): + + async with aiohttp.ClientSession() as session: + async with session.get('http://python.org') as response: + + print("Status:", response.status) + print("Content-type:", response.headers['content-type']) + + html = await response.text() + print("Body:", html[:15], "...") + + asyncio.run(main()) + +This prints: + +.. code-block:: + + Status: 200 + Content-type: text/html; charset=utf-8 + Body: ... + +Coming from `requests `_ ? Read `why we need so many lines `_. + +Server +------ + +An example using a simple server: + +.. code-block:: python + + # examples/server_simple.py + from aiohttp import web + + async def handle(request): + name = request.match_info.get('name', "Anonymous") + text = "Hello, " + name + return web.Response(text=text) + + async def wshandle(request): + ws = web.WebSocketResponse() + await ws.prepare(request) + + async for msg in ws: + if msg.type == web.WSMsgType.text: + await ws.send_str("Hello, {}".format(msg.data)) + elif msg.type == web.WSMsgType.binary: + await ws.send_bytes(msg.data) + elif msg.type == web.WSMsgType.close: + break + + return ws + + + app = web.Application() + app.add_routes([web.get('/', handle), + web.get('/echo', wshandle), + web.get('/{name}', handle)]) + + if __name__ == '__main__': + web.run_app(app) + + +Documentation +============= + +https://aiohttp.readthedocs.io/ + + +Demos +===== + +https://github.com/aio-libs/aiohttp-demos + + +External links +============== + +* `Third party libraries + `_ +* `Built with aiohttp + `_ +* `Powered by aiohttp + `_ + +Feel free to make a Pull Request for adding your link to these pages! + + +Communication channels +====================== + +*aio-libs Discussions*: https://github.com/aio-libs/aiohttp/discussions + +*Matrix*: `#aio-libs:matrix.org `_ + +We support `Stack Overflow +`_. +Please add *aiohttp* tag to your question there. + +Requirements +============ + +- async-timeout_ +- attrs_ +- multidict_ +- yarl_ +- frozenlist_ + +Optionally you may install the aiodns_ library (highly recommended for sake of speed). + +.. _aiodns: https://pypi.python.org/pypi/aiodns +.. _attrs: https://github.com/python-attrs/attrs +.. _multidict: https://pypi.python.org/pypi/multidict +.. _frozenlist: https://pypi.org/project/frozenlist/ +.. _yarl: https://pypi.python.org/pypi/yarl +.. _async-timeout: https://pypi.python.org/pypi/async_timeout + +License +======= + +``aiohttp`` is offered under the Apache 2 license. + + +Keepsafe +======== + +The aiohttp community would like to thank Keepsafe +(https://www.getkeepsafe.com) for its support in the early days of +the project. + + +Source code +=========== + +The latest developer version is available in a GitHub repository: +https://github.com/aio-libs/aiohttp + +Benchmarks +========== + +If you are interested in efficiency, the AsyncIO community maintains a +list of benchmarks on the official wiki: +https://github.com/python/asyncio/wiki/Benchmarks diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp-3.10.2.dist-info/RECORD b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp-3.10.2.dist-info/RECORD new file mode 100644 index 000000000..f8bd357f5 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp-3.10.2.dist-info/RECORD @@ -0,0 +1,73 @@ +aiohttp/_headers.pxi,sha256=n701k28dVPjwRnx5j6LpJhLTfj7dqu2vJt7f0O60Oyg,2007 +aiohttp/_http_writer.pyx,sha256=aIHAp8g4ZV5kbGRdmZce-vXjELw2M6fGKyJuOdgYQqw,4575 +aiohttp/connector.py,sha256=ManOc795qXH7EGuQfRzSmopuA_GP6QV28zqItwI4JgU,57268 +aiohttp/web_request.py,sha256=GOllhrVqEUqNuAgnPluCnjzWVeUL1NH295JQ8O_Q-CI,29544 +aiohttp/_http_parser.pyx,sha256=4tli5RoYO24nI8HLl7nxHHlb7ccJOuHrA4pwQN2PTXA,28395 +aiohttp/web_server.py,sha256=jx3sQSQhyEP3V4loJZpqIiPXctXk7sTVFEkXcrxnjTw,2764 +aiohttp/log.py,sha256=BbNKx9e3VMIm0xYjZI0IcBBoS7wjdeIeSaiJE7-qK2g,325 +aiohttp/web_protocol.py,sha256=teHn1D7weKDI2-gXayhUqauRKeukqRnrji94zmC1NkQ,23530 +aiohttp/tcp_helpers.py,sha256=BSadqVWaBpMFDRWnhaaR941N9MiDZ7bdTrxgCb0CW-M,961 +aiohttp/_helpers.pyx,sha256=XeLbNft5X_4ifi8QB8i6TyrRuayijMSO3IDHeSA89uM,1049 +aiohttp/http_writer.py,sha256=fxpyRj_S3WcBl9fxxF05t8YYAUA-0jW5b_PjVSluT3Y,5933 +aiohttp/helpers.py,sha256=d08TmGpzks_a96Bns1gEVsudPUePRrF1Ccikhlkdwmc,30276 +aiohttp/http_websocket.py,sha256=9Kfp5e4TU1JJfEvJ7l1Kt6Cr2HZX3z_RIojN8BAriPI,26732 +aiohttp/_http_parser.cpython-38-x86_64-linux-gnu.so,sha256=CEpsFEXxz74xccG37MEcZwELVhlVWFt3yxvTwT2dPTw,2771872 +aiohttp/_helpers.pyi,sha256=ZoKiJSS51PxELhI2cmIr5737YjjZcJt7FbIRO3ym1Ss,202 +aiohttp/client_reqrep.py,sha256=ldCfLMCIdVZaTU-BUK-cTMxrARWoaAIduytb5cqAbj0,40897 +aiohttp/client_ws.py,sha256=71eMCVvVHB3obJeHJgdqpc4rgJlWeDbyBcJ8TMZZ12Q,13214 +aiohttp/_cparser.pxd,sha256=8jGIg-VJ9p3llwCakUYDsPGxA4HiZe9dmK9Jmtlz-5g,4318 +aiohttp/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7 +aiohttp/_websocket.cpython-38-x86_64-linux-gnu.so,sha256=dx0ZqwR0h-IyAFm13fu1RXuhmX0oD1SMUONTH2qWfIs,237608 +aiohttp/formdata.py,sha256=WjHA1mieKlWwI5O3hi3-siqN0dWz_X04oXNNZje2z7Q,6521 +aiohttp/http_exceptions.py,sha256=rw6EER4AresvBZw0_V6eifa_3jrNZUnysMT2GLEWzSE,2715 +aiohttp/web_ws.py,sha256=sZONnwu5bWKgLHNxWaz3atUjpT25D0NTVJTOKYVlprE,20774 +aiohttp/web_routedef.py,sha256=6PpuqK4SOFFdbYx_jNx-EvauiXO8K_qQeh4oruVFW7A,6116 +aiohttp/multipart.py,sha256=xBmudauxLHuXajWQMVl647sNS9IzzMYeEMKay4C0qVw,34937 +aiohttp/pytest_plugin.py,sha256=ZdWZnXl-z5VPvhuR1HjA7GyOgNLGTguqu3JWQJXVf6c,11921 +aiohttp/client_proto.py,sha256=AJdFbmWOfNuQH9AA16N3Z5C_rmdhxSLRc7i5Qr3DSXc,10141 +aiohttp/_websocket.pyx,sha256=1XuOSNDCbyDrzF5uMA2isqausSs8l2jWTLDlNDLM9Io,1561 +aiohttp/abc.py,sha256=NaAMNASsemeZ-42PcN8w3O50u5h2e2qKX6ITk9C5ub4,6097 +aiohttp/client_exceptions.py,sha256=LtQgU4YQ2K43cua4MnCiYAYhGHFleOtmsTrmbA2uFnE,10729 +aiohttp/payload_streamer.py,sha256=eAS8S-UWfLkEMavRjP2Uu9amC3PnbV79wHTNDoRmYn8,2087 +aiohttp/typedefs.py,sha256=SWnfMWhKri_t8y3UVf3XqNsemfCZvYKXYlIg_wFafWw,1624 +aiohttp/__init__.py,sha256=GrY2mHVh7uoE9uQcnJeRCi7_aIbaTzsg-iIHSaTFeh0,7527 +aiohttp/http.py,sha256=8o8j8xH70OWjnfTWA9V44NR785QPxEPrUtzMXiAVpwc,1842 +aiohttp/compression_utils.py,sha256=Yp6TfmFKrKwDcQqSY9mIPoeXvRGA1fWSm6FdHJ1jEnY,5055 +aiohttp/web.py,sha256=2nGTxJDXC8qpbueI_0NP22DqajRMouMqCBGLIEBex0Y,18051 +aiohttp/_helpers.cpython-38-x86_64-linux-gnu.so,sha256=PTv7XltQHzHO1Ug6-wDQ7E2GixBa-cVCq7itH4JU_NQ,517480 +aiohttp/web_runner.py,sha256=pZLhqfAAOWo0oRW0_OyUKUj4pNoWS3UjR_vW03AagVI,11687 +aiohttp/payload.py,sha256=22jQdD6IbTAo8tHB_6cPY-Iqswrl1Zz_5xK0YoRenrA,13565 +aiohttp/streams.py,sha256=LWlr0gE44cjKzBU9I15vWwlorPW8ZAU-M2Sgz_UdjWM,21128 +aiohttp/web_middlewares.py,sha256=q6i0GGiVvUlpGtsbZmp88-zFIKQHwYtDd5SpBvKFdEY,4032 +aiohttp/web_urldispatcher.py,sha256=c4hPeI6LF_GgZis49fy_tZU4M4oKjareHNCAeInecWQ,43771 +aiohttp/base_protocol.py,sha256=HJ5SxzbzYewj-sjoKMbD6i5rDYEv9Zo7Q_cyV3Wvn6o,2876 +aiohttp/locks.py,sha256=wRYFo1U82LwBBdqwU24JEPaoTAlKaaJd2FtfDKhkTb4,1136 +aiohttp/hdrs.py,sha256=uzn5agn_jXid2h-ky6Y0ZAQ8BrPeTGLDGr-weiMctso,4613 +aiohttp/_http_writer.cpython-38-x86_64-linux-gnu.so,sha256=dXSOoKtnu9S8yjz4aGp5d7qvSXGEjQXFOD1rA-okBMQ,462928 +aiohttp/http_parser.py,sha256=wiVJ_f8fKMGKSyJyiupip5gfFG3d-8vL-ZZ0hC0zL6s,35805 +aiohttp/web_exceptions.py,sha256=7nIuiwhZ39vJJ9KrWqArA5QcWbUdqkz2CLwEpJapeN8,10360 +aiohttp/web_response.py,sha256=U9lUi7wPVHHWtPojGsBIrfZ7nJi8ccxeNPmxGNE-45U,27908 +aiohttp/_find_header.pxd,sha256=0GfwFCPN2zxEKTO1_MA5sYq2UfzsG8kcV3aTqvwlz3g,68 +aiohttp/web_log.py,sha256=DOfOxGyh2U7K5K_w6O7ILdfGcs4qOdzHxOwj2-k3c6c,7801 +aiohttp/test_utils.py,sha256=D0JW3TLsM36mvd2jKmPSeKiOle9OA-03Rm9eLDJkSLk,21883 +aiohttp/web_fileresponse.py,sha256=-XLeNNle6rOoUo413Z9BLjo4i-Mz972VC-eRzuKxgUc,13695 +aiohttp/cookiejar.py,sha256=wVKNqlDq5HsihOhk93WCb0IqNALBfHgZASQ61trN_No,14560 +aiohttp/resolver.py,sha256=c876iBUokf860Aip98fjtUvhfFMiX8p5h9mtsN4xqpk,6397 +aiohttp/web_app.py,sha256=9crC7Wdk4h0ql0W6QD2DzJ1s6hpOTBvmBB1CdLUigBI,18331 +aiohttp/tracing.py,sha256=66XQwtdR5DHv8p953eeNL0l8o6iHDaNwH9bBaybHXD4,15137 +aiohttp/client.py,sha256=0AQQqNHumM7qhYNJlgciVC7QLPZLRB_yZh4Pgdgexcw,52658 +aiohttp/worker.py,sha256=bkozEd2rAzQS0qs4knnnplOmaZ4TNdYtqWXSXx9djEc,7965 +aiohttp/.hash/_http_writer.pyx.hash,sha256=3Qg3T3D-Ud73elzPHBufK0yEu9tP5jsu6g-aPKQY9gE,125 +aiohttp/.hash/_cparser.pxd.hash,sha256=hYa9Vje-oMs2eh_7MfCPOh2QW_1x1yCjcZuc7AmwLd0,121 +aiohttp/.hash/hdrs.py.hash,sha256=2oEszMWjYFTHoF2w4OcFCoM7osv4vY9KLLJCu9HP0xI,116 +aiohttp/.hash/_websocket.pyx.hash,sha256=M97f-Yti-4vnE4GNTD1s_DzKs-fG_ww3jle6EUvixnE,123 +aiohttp/.hash/_helpers.pyx.hash,sha256=5JQ6BlMBE4HnRaCGdkK9_wpL3ZSWpU1gyLYva0Wwx2c,121 +aiohttp/.hash/_helpers.pyi.hash,sha256=Ew4BZDc2LqFwszgZZUHHrJvw5P8HBhJ700n1Ntg52hE,121 +aiohttp/.hash/_http_parser.pyx.hash,sha256=ZBiVMEMMqlfeIhQReFEMF0iOYI3CaKsvyJTRTbo0qXM,125 +aiohttp/.hash/_find_header.pxd.hash,sha256=_mbpD6vM-CVCKq3ulUvsOAz5Wdo88wrDzfpOsMQaMNA,125 +aiohttp-3.10.2.dist-info/WHEEL,sha256=0LUMruz1qQG2-EekVp4I-4jhlItGIjSta7Y1ot6jCXk,147 +aiohttp-3.10.2.dist-info/top_level.txt,sha256=iv-JIaacmTl-hSho3QmphcKnbRRYx1st47yjz_178Ro,8 +aiohttp-3.10.2.dist-info/LICENSE.txt,sha256=n4DQ2311WpQdtFchcsJw7L2PCCuiFd3QlZhZQu2Uqes,588 +aiohttp-3.10.2.dist-info/METADATA,sha256=9h1goMW9HAdCejvTSFxEfd37nEbj0yP1XCiD7rppnxM,7538 +aiohttp-3.10.2.dist-info/INSTALLER,sha256=sz0Tnp99msIbbLB51q7U9nA6AvRKcsOeUKhrHH0Ulg4,12 +aiohttp-3.10.2.dist-info/RECORD,, diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp-3.10.2.dist-info/WHEEL b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp-3.10.2.dist-info/WHEEL new file mode 100644 index 000000000..3f495f617 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp-3.10.2.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (72.1.0) +Root-Is-Purelib: false +Tag: cp38-cp38-manylinux_2_17_x86_64 +Tag: cp38-cp38-manylinux2014_x86_64 + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp-3.10.2.dist-info/top_level.txt b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp-3.10.2.dist-info/top_level.txt new file mode 100644 index 000000000..ee4ba4f3d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp-3.10.2.dist-info/top_level.txt @@ -0,0 +1 @@ +aiohttp diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/.hash/_cparser.pxd.hash b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/.hash/_cparser.pxd.hash new file mode 100644 index 000000000..65e3d4ba4 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/.hash/_cparser.pxd.hash @@ -0,0 +1 @@ +f2318883e549f69de597009a914603b0f1b10381e265ef5d98af499ad973fb98 /home/runner/work/aiohttp/aiohttp/aiohttp/_cparser.pxd diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/.hash/_find_header.pxd.hash b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/.hash/_find_header.pxd.hash new file mode 100644 index 000000000..f006c2de5 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/.hash/_find_header.pxd.hash @@ -0,0 +1 @@ +d067f01423cddb3c442933b5fcc039b18ab651fcec1bc91c577693aafc25cf78 /home/runner/work/aiohttp/aiohttp/aiohttp/_find_header.pxd diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/.hash/_helpers.pyi.hash b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/.hash/_helpers.pyi.hash new file mode 100644 index 000000000..6a30d6325 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/.hash/_helpers.pyi.hash @@ -0,0 +1 @@ +6682a22524b9d4fc442e123672622be7bdfb6238d9709b7b15b2113b7ca6d52b /home/runner/work/aiohttp/aiohttp/aiohttp/_helpers.pyi diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/.hash/_helpers.pyx.hash b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/.hash/_helpers.pyx.hash new file mode 100644 index 000000000..8f38727d7 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/.hash/_helpers.pyx.hash @@ -0,0 +1 @@ +5de2db35fb795ffe227e2f1007c8ba4f2ad1b9aca28cc48edc80c779203cf6e3 /home/runner/work/aiohttp/aiohttp/aiohttp/_helpers.pyx diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/.hash/_http_parser.pyx.hash b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/.hash/_http_parser.pyx.hash new file mode 100644 index 000000000..bfd0edea0 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/.hash/_http_parser.pyx.hash @@ -0,0 +1 @@ +e2d962e51a183b6e2723c1cb97b9f11c795bedc7093ae1eb038a7040dd8f4d70 /home/runner/work/aiohttp/aiohttp/aiohttp/_http_parser.pyx diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/.hash/_http_writer.pyx.hash b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/.hash/_http_writer.pyx.hash new file mode 100644 index 000000000..8e1aaab04 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/.hash/_http_writer.pyx.hash @@ -0,0 +1 @@ +6881c0a7c838655e646c645d99971efaf5e310bc3633a7c62b226e39d81842ac /home/runner/work/aiohttp/aiohttp/aiohttp/_http_writer.pyx diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/.hash/_websocket.pyx.hash b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/.hash/_websocket.pyx.hash new file mode 100644 index 000000000..ddbb4c7a6 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/.hash/_websocket.pyx.hash @@ -0,0 +1 @@ +d57b8e48d0c26f20ebcc5e6e300da2b2a6aeb12b3c9768d64cb0e53432ccf48a /home/runner/work/aiohttp/aiohttp/aiohttp/_websocket.pyx diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/.hash/hdrs.py.hash b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/.hash/hdrs.py.hash new file mode 100644 index 000000000..e0b81d767 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/.hash/hdrs.py.hash @@ -0,0 +1 @@ +bb39f96a09ff8d789dda1fa4cba63464043c06b3de4c62c31abfb07a231cb6ca /home/runner/work/aiohttp/aiohttp/aiohttp/hdrs.py diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/__init__.py new file mode 100644 index 000000000..f050229f0 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/__init__.py @@ -0,0 +1,254 @@ +__version__ = "3.10.2" + +from typing import TYPE_CHECKING, Tuple + +from . import hdrs as hdrs +from .client import ( + BaseConnector, + ClientConnectionError, + ClientConnectorCertificateError, + ClientConnectorError, + ClientConnectorSSLError, + ClientError, + ClientHttpProxyError, + ClientOSError, + ClientPayloadError, + ClientProxyConnectionError, + ClientRequest, + ClientResponse, + ClientResponseError, + ClientSession, + ClientSSLError, + ClientTimeout, + ClientWebSocketResponse, + ConnectionTimeoutError, + ContentTypeError, + Fingerprint, + InvalidURL, + InvalidUrlClientError, + InvalidUrlRedirectClientError, + NamedPipeConnector, + NonHttpUrlClientError, + NonHttpUrlRedirectClientError, + RedirectClientError, + RequestInfo, + ServerConnectionError, + ServerDisconnectedError, + ServerFingerprintMismatch, + ServerTimeoutError, + SocketTimeoutError, + TCPConnector, + TooManyRedirects, + UnixConnector, + WSServerHandshakeError, + request, +) +from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar +from .formdata import FormData as FormData +from .helpers import BasicAuth, ChainMapProxy, ETag +from .http import ( + HttpVersion as HttpVersion, + HttpVersion10 as HttpVersion10, + HttpVersion11 as HttpVersion11, + WebSocketError as WebSocketError, + WSCloseCode as WSCloseCode, + WSMessage as WSMessage, + WSMsgType as WSMsgType, +) +from .multipart import ( + BadContentDispositionHeader as BadContentDispositionHeader, + BadContentDispositionParam as BadContentDispositionParam, + BodyPartReader as BodyPartReader, + MultipartReader as MultipartReader, + MultipartWriter as MultipartWriter, + content_disposition_filename as content_disposition_filename, + parse_content_disposition as parse_content_disposition, +) +from .payload import ( + PAYLOAD_REGISTRY as PAYLOAD_REGISTRY, + AsyncIterablePayload as AsyncIterablePayload, + BufferedReaderPayload as BufferedReaderPayload, + BytesIOPayload as BytesIOPayload, + BytesPayload as BytesPayload, + IOBasePayload as IOBasePayload, + JsonPayload as JsonPayload, + Payload as Payload, + StringIOPayload as StringIOPayload, + StringPayload as StringPayload, + TextIOPayload as TextIOPayload, + get_payload as get_payload, + payload_type as payload_type, +) +from .payload_streamer import streamer as streamer +from .resolver import ( + AsyncResolver as AsyncResolver, + DefaultResolver as DefaultResolver, + ThreadedResolver as ThreadedResolver, +) +from .streams import ( + EMPTY_PAYLOAD as EMPTY_PAYLOAD, + DataQueue as DataQueue, + EofStream as EofStream, + FlowControlDataQueue as FlowControlDataQueue, + StreamReader as StreamReader, +) +from .tracing import ( + TraceConfig as TraceConfig, + TraceConnectionCreateEndParams as TraceConnectionCreateEndParams, + TraceConnectionCreateStartParams as TraceConnectionCreateStartParams, + TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams, + TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams, + TraceConnectionReuseconnParams as TraceConnectionReuseconnParams, + TraceDnsCacheHitParams as TraceDnsCacheHitParams, + TraceDnsCacheMissParams as TraceDnsCacheMissParams, + TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams, + TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams, + TraceRequestChunkSentParams as TraceRequestChunkSentParams, + TraceRequestEndParams as TraceRequestEndParams, + TraceRequestExceptionParams as TraceRequestExceptionParams, + TraceRequestRedirectParams as TraceRequestRedirectParams, + TraceRequestStartParams as TraceRequestStartParams, + TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams, +) + +if TYPE_CHECKING: + # At runtime these are lazy-loaded at the bottom of the file. + from .worker import ( + GunicornUVLoopWebWorker as GunicornUVLoopWebWorker, + GunicornWebWorker as GunicornWebWorker, + ) + +__all__: Tuple[str, ...] = ( + "hdrs", + # client + "BaseConnector", + "ClientConnectionError", + "ClientConnectorCertificateError", + "ClientConnectorError", + "ClientConnectorSSLError", + "ClientError", + "ClientHttpProxyError", + "ClientOSError", + "ClientPayloadError", + "ClientProxyConnectionError", + "ClientResponse", + "ClientRequest", + "ClientResponseError", + "ClientSSLError", + "ClientSession", + "ClientTimeout", + "ClientWebSocketResponse", + "ConnectionTimeoutError", + "ContentTypeError", + "Fingerprint", + "InvalidURL", + "InvalidUrlClientError", + "InvalidUrlRedirectClientError", + "NonHttpUrlClientError", + "NonHttpUrlRedirectClientError", + "RedirectClientError", + "RequestInfo", + "ServerConnectionError", + "ServerDisconnectedError", + "ServerFingerprintMismatch", + "ServerTimeoutError", + "SocketTimeoutError", + "TCPConnector", + "TooManyRedirects", + "UnixConnector", + "NamedPipeConnector", + "WSServerHandshakeError", + "request", + # cookiejar + "CookieJar", + "DummyCookieJar", + # formdata + "FormData", + # helpers + "BasicAuth", + "ChainMapProxy", + "ETag", + # http + "HttpVersion", + "HttpVersion10", + "HttpVersion11", + "WSMsgType", + "WSCloseCode", + "WSMessage", + "WebSocketError", + # multipart + "BadContentDispositionHeader", + "BadContentDispositionParam", + "BodyPartReader", + "MultipartReader", + "MultipartWriter", + "content_disposition_filename", + "parse_content_disposition", + # payload + "AsyncIterablePayload", + "BufferedReaderPayload", + "BytesIOPayload", + "BytesPayload", + "IOBasePayload", + "JsonPayload", + "PAYLOAD_REGISTRY", + "Payload", + "StringIOPayload", + "StringPayload", + "TextIOPayload", + "get_payload", + "payload_type", + # payload_streamer + "streamer", + # resolver + "AsyncResolver", + "DefaultResolver", + "ThreadedResolver", + # streams + "DataQueue", + "EMPTY_PAYLOAD", + "EofStream", + "FlowControlDataQueue", + "StreamReader", + # tracing + "TraceConfig", + "TraceConnectionCreateEndParams", + "TraceConnectionCreateStartParams", + "TraceConnectionQueuedEndParams", + "TraceConnectionQueuedStartParams", + "TraceConnectionReuseconnParams", + "TraceDnsCacheHitParams", + "TraceDnsCacheMissParams", + "TraceDnsResolveHostEndParams", + "TraceDnsResolveHostStartParams", + "TraceRequestChunkSentParams", + "TraceRequestEndParams", + "TraceRequestExceptionParams", + "TraceRequestRedirectParams", + "TraceRequestStartParams", + "TraceResponseChunkReceivedParams", + # workers (imported lazily with __getattr__) + "GunicornUVLoopWebWorker", + "GunicornWebWorker", +) + + +def __dir__() -> Tuple[str, ...]: + return __all__ + ("__author__", "__doc__") + + +def __getattr__(name: str) -> object: + global GunicornUVLoopWebWorker, GunicornWebWorker + + # Importing gunicorn takes a long time (>100ms), so only import if actually needed. + if name in ("GunicornUVLoopWebWorker", "GunicornWebWorker"): + try: + from .worker import GunicornUVLoopWebWorker as guv, GunicornWebWorker as gw + except ImportError: + return None + + GunicornUVLoopWebWorker = guv # type: ignore[misc] + GunicornWebWorker = gw # type: ignore[misc] + return guv if name == "GunicornUVLoopWebWorker" else gw + + raise AttributeError(f"module {__name__} has no attribute {name}") diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_cparser.pxd b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_cparser.pxd new file mode 100644 index 000000000..c2cd5a92f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_cparser.pxd @@ -0,0 +1,158 @@ +from libc.stdint cimport int32_t, uint8_t, uint16_t, uint64_t + + +cdef extern from "../vendor/llhttp/build/llhttp.h": + + struct llhttp__internal_s: + int32_t _index + void* _span_pos0 + void* _span_cb0 + int32_t error + const char* reason + const char* error_pos + void* data + void* _current + uint64_t content_length + uint8_t type + uint8_t method + uint8_t http_major + uint8_t http_minor + uint8_t header_state + uint8_t lenient_flags + uint8_t upgrade + uint8_t finish + uint16_t flags + uint16_t status_code + void* settings + + ctypedef llhttp__internal_s llhttp__internal_t + ctypedef llhttp__internal_t llhttp_t + + ctypedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length) except -1 + ctypedef int (*llhttp_cb)(llhttp_t*) except -1 + + struct llhttp_settings_s: + llhttp_cb on_message_begin + llhttp_data_cb on_url + llhttp_data_cb on_status + llhttp_data_cb on_header_field + llhttp_data_cb on_header_value + llhttp_cb on_headers_complete + llhttp_data_cb on_body + llhttp_cb on_message_complete + llhttp_cb on_chunk_header + llhttp_cb on_chunk_complete + + llhttp_cb on_url_complete + llhttp_cb on_status_complete + llhttp_cb on_header_field_complete + llhttp_cb on_header_value_complete + + ctypedef llhttp_settings_s llhttp_settings_t + + enum llhttp_errno: + HPE_OK, + HPE_INTERNAL, + HPE_STRICT, + HPE_LF_EXPECTED, + HPE_UNEXPECTED_CONTENT_LENGTH, + HPE_CLOSED_CONNECTION, + HPE_INVALID_METHOD, + HPE_INVALID_URL, + HPE_INVALID_CONSTANT, + HPE_INVALID_VERSION, + HPE_INVALID_HEADER_TOKEN, + HPE_INVALID_CONTENT_LENGTH, + HPE_INVALID_CHUNK_SIZE, + HPE_INVALID_STATUS, + HPE_INVALID_EOF_STATE, + HPE_INVALID_TRANSFER_ENCODING, + HPE_CB_MESSAGE_BEGIN, + HPE_CB_HEADERS_COMPLETE, + HPE_CB_MESSAGE_COMPLETE, + HPE_CB_CHUNK_HEADER, + HPE_CB_CHUNK_COMPLETE, + HPE_PAUSED, + HPE_PAUSED_UPGRADE, + HPE_USER + + ctypedef llhttp_errno llhttp_errno_t + + enum llhttp_flags: + F_CHUNKED, + F_CONTENT_LENGTH + + enum llhttp_type: + HTTP_REQUEST, + HTTP_RESPONSE, + HTTP_BOTH + + enum llhttp_method: + HTTP_DELETE, + HTTP_GET, + HTTP_HEAD, + HTTP_POST, + HTTP_PUT, + HTTP_CONNECT, + HTTP_OPTIONS, + HTTP_TRACE, + HTTP_COPY, + HTTP_LOCK, + HTTP_MKCOL, + HTTP_MOVE, + HTTP_PROPFIND, + HTTP_PROPPATCH, + HTTP_SEARCH, + HTTP_UNLOCK, + HTTP_BIND, + HTTP_REBIND, + HTTP_UNBIND, + HTTP_ACL, + HTTP_REPORT, + HTTP_MKACTIVITY, + HTTP_CHECKOUT, + HTTP_MERGE, + HTTP_MSEARCH, + HTTP_NOTIFY, + HTTP_SUBSCRIBE, + HTTP_UNSUBSCRIBE, + HTTP_PATCH, + HTTP_PURGE, + HTTP_MKCALENDAR, + HTTP_LINK, + HTTP_UNLINK, + HTTP_SOURCE, + HTTP_PRI, + HTTP_DESCRIBE, + HTTP_ANNOUNCE, + HTTP_SETUP, + HTTP_PLAY, + HTTP_PAUSE, + HTTP_TEARDOWN, + HTTP_GET_PARAMETER, + HTTP_SET_PARAMETER, + HTTP_REDIRECT, + HTTP_RECORD, + HTTP_FLUSH + + ctypedef llhttp_method llhttp_method_t; + + void llhttp_settings_init(llhttp_settings_t* settings) + void llhttp_init(llhttp_t* parser, llhttp_type type, + const llhttp_settings_t* settings) + + llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len) + + int llhttp_should_keep_alive(const llhttp_t* parser) + + void llhttp_resume_after_upgrade(llhttp_t* parser) + + llhttp_errno_t llhttp_get_errno(const llhttp_t* parser) + const char* llhttp_get_error_reason(const llhttp_t* parser) + const char* llhttp_get_error_pos(const llhttp_t* parser) + + const char* llhttp_method_name(llhttp_method_t method) + + void llhttp_set_lenient_headers(llhttp_t* parser, int enabled) + void llhttp_set_lenient_optional_cr_before_lf(llhttp_t* parser, int enabled) + void llhttp_set_lenient_spaces_after_chunk_size(llhttp_t* parser, int enabled) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_find_header.pxd b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_find_header.pxd new file mode 100644 index 000000000..37a6c3726 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_find_header.pxd @@ -0,0 +1,2 @@ +cdef extern from "_find_header.h": + int find_header(char *, int) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_headers.pxi b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_headers.pxi new file mode 100644 index 000000000..3744721d4 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_headers.pxi @@ -0,0 +1,83 @@ +# The file is autogenerated from aiohttp/hdrs.py +# Run ./tools/gen.py to update it after the origin changing. + +from . import hdrs +cdef tuple headers = ( + hdrs.ACCEPT, + hdrs.ACCEPT_CHARSET, + hdrs.ACCEPT_ENCODING, + hdrs.ACCEPT_LANGUAGE, + hdrs.ACCEPT_RANGES, + hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS, + hdrs.ACCESS_CONTROL_ALLOW_HEADERS, + hdrs.ACCESS_CONTROL_ALLOW_METHODS, + hdrs.ACCESS_CONTROL_ALLOW_ORIGIN, + hdrs.ACCESS_CONTROL_EXPOSE_HEADERS, + hdrs.ACCESS_CONTROL_MAX_AGE, + hdrs.ACCESS_CONTROL_REQUEST_HEADERS, + hdrs.ACCESS_CONTROL_REQUEST_METHOD, + hdrs.AGE, + hdrs.ALLOW, + hdrs.AUTHORIZATION, + hdrs.CACHE_CONTROL, + hdrs.CONNECTION, + hdrs.CONTENT_DISPOSITION, + hdrs.CONTENT_ENCODING, + hdrs.CONTENT_LANGUAGE, + hdrs.CONTENT_LENGTH, + hdrs.CONTENT_LOCATION, + hdrs.CONTENT_MD5, + hdrs.CONTENT_RANGE, + hdrs.CONTENT_TRANSFER_ENCODING, + hdrs.CONTENT_TYPE, + hdrs.COOKIE, + hdrs.DATE, + hdrs.DESTINATION, + hdrs.DIGEST, + hdrs.ETAG, + hdrs.EXPECT, + hdrs.EXPIRES, + hdrs.FORWARDED, + hdrs.FROM, + hdrs.HOST, + hdrs.IF_MATCH, + hdrs.IF_MODIFIED_SINCE, + hdrs.IF_NONE_MATCH, + hdrs.IF_RANGE, + hdrs.IF_UNMODIFIED_SINCE, + hdrs.KEEP_ALIVE, + hdrs.LAST_EVENT_ID, + hdrs.LAST_MODIFIED, + hdrs.LINK, + hdrs.LOCATION, + hdrs.MAX_FORWARDS, + hdrs.ORIGIN, + hdrs.PRAGMA, + hdrs.PROXY_AUTHENTICATE, + hdrs.PROXY_AUTHORIZATION, + hdrs.RANGE, + hdrs.REFERER, + hdrs.RETRY_AFTER, + hdrs.SEC_WEBSOCKET_ACCEPT, + hdrs.SEC_WEBSOCKET_EXTENSIONS, + hdrs.SEC_WEBSOCKET_KEY, + hdrs.SEC_WEBSOCKET_KEY1, + hdrs.SEC_WEBSOCKET_PROTOCOL, + hdrs.SEC_WEBSOCKET_VERSION, + hdrs.SERVER, + hdrs.SET_COOKIE, + hdrs.TE, + hdrs.TRAILER, + hdrs.TRANSFER_ENCODING, + hdrs.URI, + hdrs.UPGRADE, + hdrs.USER_AGENT, + hdrs.VARY, + hdrs.VIA, + hdrs.WWW_AUTHENTICATE, + hdrs.WANT_DIGEST, + hdrs.WARNING, + hdrs.X_FORWARDED_FOR, + hdrs.X_FORWARDED_HOST, + hdrs.X_FORWARDED_PROTO, +) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_helpers.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_helpers.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..b2ae1d4cb Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_helpers.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_helpers.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_helpers.pyi new file mode 100644 index 000000000..1e3589370 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_helpers.pyi @@ -0,0 +1,6 @@ +from typing import Any + +class reify: + def __init__(self, wrapped: Any) -> None: ... + def __get__(self, inst: Any, owner: Any) -> Any: ... + def __set__(self, inst: Any, value: Any) -> None: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_helpers.pyx b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_helpers.pyx new file mode 100644 index 000000000..665f367c5 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_helpers.pyx @@ -0,0 +1,35 @@ +cdef class reify: + """Use as a class method decorator. It operates almost exactly like + the Python `@property` decorator, but it puts the result of the + method it decorates into the instance dict after the first call, + effectively replacing the function it decorates with an instance + variable. It is, in Python parlance, a data descriptor. + + """ + + cdef object wrapped + cdef object name + + def __init__(self, wrapped): + self.wrapped = wrapped + self.name = wrapped.__name__ + + @property + def __doc__(self): + return self.wrapped.__doc__ + + def __get__(self, inst, owner): + try: + try: + return inst._cache[self.name] + except KeyError: + val = self.wrapped(inst) + inst._cache[self.name] = val + return val + except AttributeError: + if inst is None: + return self + raise + + def __set__(self, inst, value): + raise AttributeError("reified property is read-only") diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_http_parser.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_http_parser.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..c5906c02f Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_http_parser.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_http_parser.pyx b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_http_parser.pyx new file mode 100644 index 000000000..dd317edaf --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_http_parser.pyx @@ -0,0 +1,841 @@ +#cython: language_level=3 +# +# Based on https://github.com/MagicStack/httptools +# + +from cpython cimport ( + Py_buffer, + PyBUF_SIMPLE, + PyBuffer_Release, + PyBytes_AsString, + PyBytes_AsStringAndSize, + PyObject_GetBuffer, +) +from cpython.mem cimport PyMem_Free, PyMem_Malloc +from libc.limits cimport ULLONG_MAX +from libc.string cimport memcpy + +from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy +from yarl import URL as _URL + +from aiohttp import hdrs +from aiohttp.helpers import DEBUG, set_exception + +from .http_exceptions import ( + BadHttpMessage, + BadStatusLine, + ContentLengthError, + InvalidHeader, + InvalidURLError, + LineTooLong, + PayloadEncodingError, + TransferEncodingError, +) +from .http_parser import DeflateBuffer as _DeflateBuffer +from .http_writer import ( + HttpVersion as _HttpVersion, + HttpVersion10 as _HttpVersion10, + HttpVersion11 as _HttpVersion11, +) +from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader + +cimport cython + +from aiohttp cimport _cparser as cparser + +include "_headers.pxi" + +from aiohttp cimport _find_header + +ALLOWED_UPGRADES = frozenset({"websocket"}) +DEF DEFAULT_FREELIST_SIZE = 250 + +cdef extern from "Python.h": + int PyByteArray_Resize(object, Py_ssize_t) except -1 + Py_ssize_t PyByteArray_Size(object) except -1 + char* PyByteArray_AsString(object) + +__all__ = ('HttpRequestParser', 'HttpResponseParser', + 'RawRequestMessage', 'RawResponseMessage') + +cdef object URL = _URL +cdef object URL_build = URL.build +cdef object CIMultiDict = _CIMultiDict +cdef object CIMultiDictProxy = _CIMultiDictProxy +cdef object HttpVersion = _HttpVersion +cdef object HttpVersion10 = _HttpVersion10 +cdef object HttpVersion11 = _HttpVersion11 +cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1 +cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING +cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD +cdef object StreamReader = _StreamReader +cdef object DeflateBuffer = _DeflateBuffer + + +cdef inline object extend(object buf, const char* at, size_t length): + cdef Py_ssize_t s + cdef char* ptr + s = PyByteArray_Size(buf) + PyByteArray_Resize(buf, s + length) + ptr = PyByteArray_AsString(buf) + memcpy(ptr + s, at, length) + + +DEF METHODS_COUNT = 46; + +cdef list _http_method = [] + +for i in range(METHODS_COUNT): + _http_method.append( + cparser.llhttp_method_name( i).decode('ascii')) + + +cdef inline str http_method_str(int i): + if i < METHODS_COUNT: + return _http_method[i] + else: + return "" + +cdef inline object find_header(bytes raw_header): + cdef Py_ssize_t size + cdef char *buf + cdef int idx + PyBytes_AsStringAndSize(raw_header, &buf, &size) + idx = _find_header.find_header(buf, size) + if idx == -1: + return raw_header.decode('utf-8', 'surrogateescape') + return headers[idx] + + +@cython.freelist(DEFAULT_FREELIST_SIZE) +cdef class RawRequestMessage: + cdef readonly str method + cdef readonly str path + cdef readonly object version # HttpVersion + cdef readonly object headers # CIMultiDict + cdef readonly object raw_headers # tuple + cdef readonly object should_close + cdef readonly object compression + cdef readonly object upgrade + cdef readonly object chunked + cdef readonly object url # yarl.URL + + def __init__(self, method, path, version, headers, raw_headers, + should_close, compression, upgrade, chunked, url): + self.method = method + self.path = path + self.version = version + self.headers = headers + self.raw_headers = raw_headers + self.should_close = should_close + self.compression = compression + self.upgrade = upgrade + self.chunked = chunked + self.url = url + + def __repr__(self): + info = [] + info.append(("method", self.method)) + info.append(("path", self.path)) + info.append(("version", self.version)) + info.append(("headers", self.headers)) + info.append(("raw_headers", self.raw_headers)) + info.append(("should_close", self.should_close)) + info.append(("compression", self.compression)) + info.append(("upgrade", self.upgrade)) + info.append(("chunked", self.chunked)) + info.append(("url", self.url)) + sinfo = ', '.join(name + '=' + repr(val) for name, val in info) + return '' + + def _replace(self, **dct): + cdef RawRequestMessage ret + ret = _new_request_message(self.method, + self.path, + self.version, + self.headers, + self.raw_headers, + self.should_close, + self.compression, + self.upgrade, + self.chunked, + self.url) + if "method" in dct: + ret.method = dct["method"] + if "path" in dct: + ret.path = dct["path"] + if "version" in dct: + ret.version = dct["version"] + if "headers" in dct: + ret.headers = dct["headers"] + if "raw_headers" in dct: + ret.raw_headers = dct["raw_headers"] + if "should_close" in dct: + ret.should_close = dct["should_close"] + if "compression" in dct: + ret.compression = dct["compression"] + if "upgrade" in dct: + ret.upgrade = dct["upgrade"] + if "chunked" in dct: + ret.chunked = dct["chunked"] + if "url" in dct: + ret.url = dct["url"] + return ret + +cdef _new_request_message(str method, + str path, + object version, + object headers, + object raw_headers, + bint should_close, + object compression, + bint upgrade, + bint chunked, + object url): + cdef RawRequestMessage ret + ret = RawRequestMessage.__new__(RawRequestMessage) + ret.method = method + ret.path = path + ret.version = version + ret.headers = headers + ret.raw_headers = raw_headers + ret.should_close = should_close + ret.compression = compression + ret.upgrade = upgrade + ret.chunked = chunked + ret.url = url + return ret + + +@cython.freelist(DEFAULT_FREELIST_SIZE) +cdef class RawResponseMessage: + cdef readonly object version # HttpVersion + cdef readonly int code + cdef readonly str reason + cdef readonly object headers # CIMultiDict + cdef readonly object raw_headers # tuple + cdef readonly object should_close + cdef readonly object compression + cdef readonly object upgrade + cdef readonly object chunked + + def __init__(self, version, code, reason, headers, raw_headers, + should_close, compression, upgrade, chunked): + self.version = version + self.code = code + self.reason = reason + self.headers = headers + self.raw_headers = raw_headers + self.should_close = should_close + self.compression = compression + self.upgrade = upgrade + self.chunked = chunked + + def __repr__(self): + info = [] + info.append(("version", self.version)) + info.append(("code", self.code)) + info.append(("reason", self.reason)) + info.append(("headers", self.headers)) + info.append(("raw_headers", self.raw_headers)) + info.append(("should_close", self.should_close)) + info.append(("compression", self.compression)) + info.append(("upgrade", self.upgrade)) + info.append(("chunked", self.chunked)) + sinfo = ', '.join(name + '=' + repr(val) for name, val in info) + return '' + + +cdef _new_response_message(object version, + int code, + str reason, + object headers, + object raw_headers, + bint should_close, + object compression, + bint upgrade, + bint chunked): + cdef RawResponseMessage ret + ret = RawResponseMessage.__new__(RawResponseMessage) + ret.version = version + ret.code = code + ret.reason = reason + ret.headers = headers + ret.raw_headers = raw_headers + ret.should_close = should_close + ret.compression = compression + ret.upgrade = upgrade + ret.chunked = chunked + return ret + + +@cython.internal +cdef class HttpParser: + + cdef: + cparser.llhttp_t* _cparser + cparser.llhttp_settings_t* _csettings + + bytearray _raw_name + bytearray _raw_value + bint _has_value + + object _protocol + object _loop + object _timer + + size_t _max_line_size + size_t _max_field_size + size_t _max_headers + bint _response_with_body + bint _read_until_eof + + bint _started + object _url + bytearray _buf + str _path + str _reason + object _headers + list _raw_headers + bint _upgraded + list _messages + object _payload + bint _payload_error + object _payload_exception + object _last_error + bint _auto_decompress + int _limit + + str _content_encoding + + Py_buffer py_buf + + def __cinit__(self): + self._cparser = \ + PyMem_Malloc(sizeof(cparser.llhttp_t)) + if self._cparser is NULL: + raise MemoryError() + + self._csettings = \ + PyMem_Malloc(sizeof(cparser.llhttp_settings_t)) + if self._csettings is NULL: + raise MemoryError() + + def __dealloc__(self): + PyMem_Free(self._cparser) + PyMem_Free(self._csettings) + + cdef _init( + self, cparser.llhttp_type mode, + object protocol, object loop, int limit, + object timer=None, + size_t max_line_size=8190, size_t max_headers=32768, + size_t max_field_size=8190, payload_exception=None, + bint response_with_body=True, bint read_until_eof=False, + bint auto_decompress=True, + ): + cparser.llhttp_settings_init(self._csettings) + cparser.llhttp_init(self._cparser, mode, self._csettings) + self._cparser.data = self + self._cparser.content_length = 0 + + self._protocol = protocol + self._loop = loop + self._timer = timer + + self._buf = bytearray() + self._payload = None + self._payload_error = 0 + self._payload_exception = payload_exception + self._messages = [] + + self._raw_name = bytearray() + self._raw_value = bytearray() + self._has_value = False + + self._max_line_size = max_line_size + self._max_headers = max_headers + self._max_field_size = max_field_size + self._response_with_body = response_with_body + self._read_until_eof = read_until_eof + self._upgraded = False + self._auto_decompress = auto_decompress + self._content_encoding = None + + self._csettings.on_url = cb_on_url + self._csettings.on_status = cb_on_status + self._csettings.on_header_field = cb_on_header_field + self._csettings.on_header_value = cb_on_header_value + self._csettings.on_headers_complete = cb_on_headers_complete + self._csettings.on_body = cb_on_body + self._csettings.on_message_begin = cb_on_message_begin + self._csettings.on_message_complete = cb_on_message_complete + self._csettings.on_chunk_header = cb_on_chunk_header + self._csettings.on_chunk_complete = cb_on_chunk_complete + + self._last_error = None + self._limit = limit + + cdef _process_header(self): + if self._raw_name: + raw_name = bytes(self._raw_name) + raw_value = bytes(self._raw_value) + + name = find_header(raw_name) + value = raw_value.decode('utf-8', 'surrogateescape') + + self._headers.add(name, value) + + if name is CONTENT_ENCODING: + self._content_encoding = value + + PyByteArray_Resize(self._raw_name, 0) + PyByteArray_Resize(self._raw_value, 0) + self._has_value = False + self._raw_headers.append((raw_name, raw_value)) + + cdef _on_header_field(self, char* at, size_t length): + cdef Py_ssize_t size + cdef char *buf + if self._has_value: + self._process_header() + + size = PyByteArray_Size(self._raw_name) + PyByteArray_Resize(self._raw_name, size + length) + buf = PyByteArray_AsString(self._raw_name) + memcpy(buf + size, at, length) + + cdef _on_header_value(self, char* at, size_t length): + cdef Py_ssize_t size + cdef char *buf + + size = PyByteArray_Size(self._raw_value) + PyByteArray_Resize(self._raw_value, size + length) + buf = PyByteArray_AsString(self._raw_value) + memcpy(buf + size, at, length) + self._has_value = True + + cdef _on_headers_complete(self): + self._process_header() + + should_close = not cparser.llhttp_should_keep_alive(self._cparser) + upgrade = self._cparser.upgrade + chunked = self._cparser.flags & cparser.F_CHUNKED + + raw_headers = tuple(self._raw_headers) + headers = CIMultiDictProxy(self._headers) + + if self._cparser.type == cparser.HTTP_REQUEST: + allowed = upgrade and headers.get("upgrade", "").lower() in ALLOWED_UPGRADES + if allowed or self._cparser.method == cparser.HTTP_CONNECT: + self._upgraded = True + else: + if upgrade and self._cparser.status_code == 101: + self._upgraded = True + + # do not support old websocket spec + if SEC_WEBSOCKET_KEY1 in headers: + raise InvalidHeader(SEC_WEBSOCKET_KEY1) + + encoding = None + enc = self._content_encoding + if enc is not None: + self._content_encoding = None + enc = enc.lower() + if enc in ('gzip', 'deflate', 'br'): + encoding = enc + + if self._cparser.type == cparser.HTTP_REQUEST: + method = http_method_str(self._cparser.method) + msg = _new_request_message( + method, self._path, + self.http_version(), headers, raw_headers, + should_close, encoding, upgrade, chunked, self._url) + else: + msg = _new_response_message( + self.http_version(), self._cparser.status_code, self._reason, + headers, raw_headers, should_close, encoding, + upgrade, chunked) + + if ( + ULLONG_MAX > self._cparser.content_length > 0 or chunked or + self._cparser.method == cparser.HTTP_CONNECT or + (self._cparser.status_code >= 199 and + self._cparser.content_length == 0 and + self._read_until_eof) + ): + payload = StreamReader( + self._protocol, timer=self._timer, loop=self._loop, + limit=self._limit) + else: + payload = EMPTY_PAYLOAD + + self._payload = payload + if encoding is not None and self._auto_decompress: + self._payload = DeflateBuffer(payload, encoding) + + if not self._response_with_body: + payload = EMPTY_PAYLOAD + + self._messages.append((msg, payload)) + + cdef _on_message_complete(self): + self._payload.feed_eof() + self._payload = None + + cdef _on_chunk_header(self): + self._payload.begin_http_chunk_receiving() + + cdef _on_chunk_complete(self): + self._payload.end_http_chunk_receiving() + + cdef object _on_status_complete(self): + pass + + cdef inline http_version(self): + cdef cparser.llhttp_t* parser = self._cparser + + if parser.http_major == 1: + if parser.http_minor == 0: + return HttpVersion10 + elif parser.http_minor == 1: + return HttpVersion11 + + return HttpVersion(parser.http_major, parser.http_minor) + + ### Public API ### + + def feed_eof(self): + cdef bytes desc + + if self._payload is not None: + if self._cparser.flags & cparser.F_CHUNKED: + raise TransferEncodingError( + "Not enough data for satisfy transfer length header.") + elif self._cparser.flags & cparser.F_CONTENT_LENGTH: + raise ContentLengthError( + "Not enough data for satisfy content length header.") + elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK: + desc = cparser.llhttp_get_error_reason(self._cparser) + raise PayloadEncodingError(desc.decode('latin-1')) + else: + self._payload.feed_eof() + elif self._started: + self._on_headers_complete() + if self._messages: + return self._messages[-1][0] + + def feed_data(self, data): + cdef: + size_t data_len + size_t nb + cdef cparser.llhttp_errno_t errno + + PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE) + data_len = self.py_buf.len + + errno = cparser.llhttp_execute( + self._cparser, + self.py_buf.buf, + data_len) + + if errno is cparser.HPE_PAUSED_UPGRADE: + cparser.llhttp_resume_after_upgrade(self._cparser) + + nb = cparser.llhttp_get_error_pos(self._cparser) - self.py_buf.buf + + PyBuffer_Release(&self.py_buf) + + if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE): + if self._payload_error == 0: + if self._last_error is not None: + ex = self._last_error + self._last_error = None + else: + after = cparser.llhttp_get_error_pos(self._cparser) + before = data[:after - self.py_buf.buf] + after_b = after.split(b"\r\n", 1)[0] + before = before.rsplit(b"\r\n", 1)[-1] + data = before + after_b + pointer = " " * (len(repr(before))-1) + "^" + ex = parser_error_from_errno(self._cparser, data, pointer) + self._payload = None + raise ex + + if self._messages: + messages = self._messages + self._messages = [] + else: + messages = () + + if self._upgraded: + return messages, True, data[nb:] + else: + return messages, False, b"" + + def set_upgraded(self, val): + self._upgraded = val + + +cdef class HttpRequestParser(HttpParser): + + def __init__( + self, protocol, loop, int limit, timer=None, + size_t max_line_size=8190, size_t max_headers=32768, + size_t max_field_size=8190, payload_exception=None, + bint response_with_body=True, bint read_until_eof=False, + bint auto_decompress=True, + ): + self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer, + max_line_size, max_headers, max_field_size, + payload_exception, response_with_body, read_until_eof, + auto_decompress) + + cdef object _on_status_complete(self): + cdef int idx1, idx2 + if not self._buf: + return + self._path = self._buf.decode('utf-8', 'surrogateescape') + try: + idx3 = len(self._path) + if self._cparser.method == cparser.HTTP_CONNECT: + # authority-form, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3 + self._url = URL.build(authority=self._path, encoded=True) + elif idx3 > 1 and self._path[0] == '/': + # origin-form, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1 + idx1 = self._path.find("?") + if idx1 == -1: + query = "" + idx2 = self._path.find("#") + if idx2 == -1: + path = self._path + fragment = "" + else: + path = self._path[0: idx2] + fragment = self._path[idx2+1:] + + else: + path = self._path[0:idx1] + idx1 += 1 + idx2 = self._path.find("#", idx1+1) + if idx2 == -1: + query = self._path[idx1:] + fragment = "" + else: + query = self._path[idx1: idx2] + fragment = self._path[idx2+1:] + + self._url = URL.build( + path=path, + query_string=query, + fragment=fragment, + encoded=True, + ) + else: + # absolute-form for proxy maybe, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2 + self._url = URL(self._path, encoded=True) + finally: + PyByteArray_Resize(self._buf, 0) + + +cdef class HttpResponseParser(HttpParser): + + def __init__( + self, protocol, loop, int limit, timer=None, + size_t max_line_size=8190, size_t max_headers=32768, + size_t max_field_size=8190, payload_exception=None, + bint response_with_body=True, bint read_until_eof=False, + bint auto_decompress=True + ): + self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer, + max_line_size, max_headers, max_field_size, + payload_exception, response_with_body, read_until_eof, + auto_decompress) + # Use strict parsing on dev mode, so users are warned about broken servers. + if not DEBUG: + cparser.llhttp_set_lenient_headers(self._cparser, 1) + cparser.llhttp_set_lenient_optional_cr_before_lf(self._cparser, 1) + cparser.llhttp_set_lenient_spaces_after_chunk_size(self._cparser, 1) + + cdef object _on_status_complete(self): + if self._buf: + self._reason = self._buf.decode('utf-8', 'surrogateescape') + PyByteArray_Resize(self._buf, 0) + else: + self._reason = self._reason or '' + +cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1: + cdef HttpParser pyparser = parser.data + + pyparser._started = True + pyparser._headers = CIMultiDict() + pyparser._raw_headers = [] + PyByteArray_Resize(pyparser._buf, 0) + pyparser._path = None + pyparser._reason = None + return 0 + + +cdef int cb_on_url(cparser.llhttp_t* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + try: + if length > pyparser._max_line_size: + raise LineTooLong( + 'Status line is too long', pyparser._max_line_size, length) + extend(pyparser._buf, at, length) + except BaseException as ex: + pyparser._last_error = ex + return -1 + else: + return 0 + + +cdef int cb_on_status(cparser.llhttp_t* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + cdef str reason + try: + if length > pyparser._max_line_size: + raise LineTooLong( + 'Status line is too long', pyparser._max_line_size, length) + extend(pyparser._buf, at, length) + except BaseException as ex: + pyparser._last_error = ex + return -1 + else: + return 0 + + +cdef int cb_on_header_field(cparser.llhttp_t* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + cdef Py_ssize_t size + try: + pyparser._on_status_complete() + size = len(pyparser._raw_name) + length + if size > pyparser._max_field_size: + raise LineTooLong( + 'Header name is too long', pyparser._max_field_size, size) + pyparser._on_header_field(at, length) + except BaseException as ex: + pyparser._last_error = ex + return -1 + else: + return 0 + + +cdef int cb_on_header_value(cparser.llhttp_t* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + cdef Py_ssize_t size + try: + size = len(pyparser._raw_value) + length + if size > pyparser._max_field_size: + raise LineTooLong( + 'Header value is too long', pyparser._max_field_size, size) + pyparser._on_header_value(at, length) + except BaseException as ex: + pyparser._last_error = ex + return -1 + else: + return 0 + + +cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._on_status_complete() + pyparser._on_headers_complete() + except BaseException as exc: + pyparser._last_error = exc + return -1 + else: + if pyparser._upgraded or pyparser._cparser.method == cparser.HTTP_CONNECT: + return 2 + else: + return 0 + + +cdef int cb_on_body(cparser.llhttp_t* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + cdef bytes body = at[:length] + try: + pyparser._payload.feed_data(body, length) + except BaseException as underlying_exc: + reraised_exc = underlying_exc + if pyparser._payload_exception is not None: + reraised_exc = pyparser._payload_exception(str(underlying_exc)) + + set_exception(pyparser._payload, reraised_exc, underlying_exc) + + pyparser._payload_error = 1 + return -1 + else: + return 0 + + +cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._started = False + pyparser._on_message_complete() + except BaseException as exc: + pyparser._last_error = exc + return -1 + else: + return 0 + + +cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._on_chunk_header() + except BaseException as exc: + pyparser._last_error = exc + return -1 + else: + return 0 + + +cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._on_chunk_complete() + except BaseException as exc: + pyparser._last_error = exc + return -1 + else: + return 0 + + +cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer): + cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser) + cdef bytes desc = cparser.llhttp_get_error_reason(parser) + + err_msg = "{}:\n\n {!r}\n {}".format(desc.decode("latin-1"), data, pointer) + + if errno in {cparser.HPE_CB_MESSAGE_BEGIN, + cparser.HPE_CB_HEADERS_COMPLETE, + cparser.HPE_CB_MESSAGE_COMPLETE, + cparser.HPE_CB_CHUNK_HEADER, + cparser.HPE_CB_CHUNK_COMPLETE, + cparser.HPE_INVALID_CONSTANT, + cparser.HPE_INVALID_HEADER_TOKEN, + cparser.HPE_INVALID_CONTENT_LENGTH, + cparser.HPE_INVALID_CHUNK_SIZE, + cparser.HPE_INVALID_EOF_STATE, + cparser.HPE_INVALID_TRANSFER_ENCODING}: + return BadHttpMessage(err_msg) + elif errno in {cparser.HPE_INVALID_STATUS, + cparser.HPE_INVALID_METHOD, + cparser.HPE_INVALID_VERSION}: + return BadStatusLine(error=err_msg) + elif errno == cparser.HPE_INVALID_URL: + return InvalidURLError(err_msg) + + return BadHttpMessage(err_msg) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_http_writer.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_http_writer.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..7834f3d82 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_http_writer.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_http_writer.pyx b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_http_writer.pyx new file mode 100644 index 000000000..eff852195 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_http_writer.pyx @@ -0,0 +1,163 @@ +from cpython.bytes cimport PyBytes_FromStringAndSize +from cpython.exc cimport PyErr_NoMemory +from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc +from cpython.object cimport PyObject_Str +from libc.stdint cimport uint8_t, uint64_t +from libc.string cimport memcpy + +from multidict import istr + +DEF BUF_SIZE = 16 * 1024 # 16KiB +cdef char BUFFER[BUF_SIZE] + +cdef object _istr = istr + + +# ----------------- writer --------------------------- + +cdef struct Writer: + char *buf + Py_ssize_t size + Py_ssize_t pos + + +cdef inline void _init_writer(Writer* writer): + writer.buf = &BUFFER[0] + writer.size = BUF_SIZE + writer.pos = 0 + + +cdef inline void _release_writer(Writer* writer): + if writer.buf != BUFFER: + PyMem_Free(writer.buf) + + +cdef inline int _write_byte(Writer* writer, uint8_t ch): + cdef char * buf + cdef Py_ssize_t size + + if writer.pos == writer.size: + # reallocate + size = writer.size + BUF_SIZE + if writer.buf == BUFFER: + buf = PyMem_Malloc(size) + if buf == NULL: + PyErr_NoMemory() + return -1 + memcpy(buf, writer.buf, writer.size) + else: + buf = PyMem_Realloc(writer.buf, size) + if buf == NULL: + PyErr_NoMemory() + return -1 + writer.buf = buf + writer.size = size + writer.buf[writer.pos] = ch + writer.pos += 1 + return 0 + + +cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol): + cdef uint64_t utf = symbol + + if utf < 0x80: + return _write_byte(writer, utf) + elif utf < 0x800: + if _write_byte(writer, (0xc0 | (utf >> 6))) < 0: + return -1 + return _write_byte(writer, (0x80 | (utf & 0x3f))) + elif 0xD800 <= utf <= 0xDFFF: + # surogate pair, ignored + return 0 + elif utf < 0x10000: + if _write_byte(writer, (0xe0 | (utf >> 12))) < 0: + return -1 + if _write_byte(writer, (0x80 | ((utf >> 6) & 0x3f))) < 0: + return -1 + return _write_byte(writer, (0x80 | (utf & 0x3f))) + elif utf > 0x10FFFF: + # symbol is too large + return 0 + else: + if _write_byte(writer, (0xf0 | (utf >> 18))) < 0: + return -1 + if _write_byte(writer, + (0x80 | ((utf >> 12) & 0x3f))) < 0: + return -1 + if _write_byte(writer, + (0x80 | ((utf >> 6) & 0x3f))) < 0: + return -1 + return _write_byte(writer, (0x80 | (utf & 0x3f))) + + +cdef inline int _write_str(Writer* writer, str s): + cdef Py_UCS4 ch + for ch in s: + if _write_utf8(writer, ch) < 0: + return -1 + + +# --------------- _serialize_headers ---------------------- + +cdef str to_str(object s): + typ = type(s) + if typ is str: + return s + elif typ is _istr: + return PyObject_Str(s) + elif not isinstance(s, str): + raise TypeError("Cannot serialize non-str key {!r}".format(s)) + else: + return str(s) + + +cdef void _safe_header(str string) except *: + if "\r" in string or "\n" in string: + raise ValueError( + "Newline or carriage return character detected in HTTP status message or " + "header. This is a potential security issue." + ) + + +def _serialize_headers(str status_line, headers): + cdef Writer writer + cdef object key + cdef object val + cdef bytes ret + + _init_writer(&writer) + + for key, val in headers.items(): + _safe_header(to_str(key)) + _safe_header(to_str(val)) + + try: + if _write_str(&writer, status_line) < 0: + raise + if _write_byte(&writer, b'\r') < 0: + raise + if _write_byte(&writer, b'\n') < 0: + raise + + for key, val in headers.items(): + if _write_str(&writer, to_str(key)) < 0: + raise + if _write_byte(&writer, b':') < 0: + raise + if _write_byte(&writer, b' ') < 0: + raise + if _write_str(&writer, to_str(val)) < 0: + raise + if _write_byte(&writer, b'\r') < 0: + raise + if _write_byte(&writer, b'\n') < 0: + raise + + if _write_byte(&writer, b'\r') < 0: + raise + if _write_byte(&writer, b'\n') < 0: + raise + + return PyBytes_FromStringAndSize(writer.buf, writer.pos) + finally: + _release_writer(&writer) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_websocket.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_websocket.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..3a0afa730 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_websocket.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_websocket.pyx b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_websocket.pyx new file mode 100644 index 000000000..94318d2b1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/_websocket.pyx @@ -0,0 +1,56 @@ +from cpython cimport PyBytes_AsString + + +#from cpython cimport PyByteArray_AsString # cython still not exports that +cdef extern from "Python.h": + char* PyByteArray_AsString(bytearray ba) except NULL + +from libc.stdint cimport uint32_t, uint64_t, uintmax_t + + +def _websocket_mask_cython(object mask, object data): + """Note, this function mutates its `data` argument + """ + cdef: + Py_ssize_t data_len, i + # bit operations on signed integers are implementation-specific + unsigned char * in_buf + const unsigned char * mask_buf + uint32_t uint32_msk + uint64_t uint64_msk + + assert len(mask) == 4 + + if not isinstance(mask, bytes): + mask = bytes(mask) + + if isinstance(data, bytearray): + data = data + else: + data = bytearray(data) + + data_len = len(data) + in_buf = PyByteArray_AsString(data) + mask_buf = PyBytes_AsString(mask) + uint32_msk = (mask_buf)[0] + + # TODO: align in_data ptr to achieve even faster speeds + # does it need in python ?! malloc() always aligns to sizeof(long) bytes + + if sizeof(size_t) >= 8: + uint64_msk = uint32_msk + uint64_msk = (uint64_msk << 32) | uint32_msk + + while data_len >= 8: + (in_buf)[0] ^= uint64_msk + in_buf += 8 + data_len -= 8 + + + while data_len >= 4: + (in_buf)[0] ^= uint32_msk + in_buf += 4 + data_len -= 4 + + for i in range(0, data_len): + in_buf[i] ^= mask_buf[i] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/abc.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/abc.py new file mode 100644 index 000000000..3fb024048 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/abc.py @@ -0,0 +1,234 @@ +import asyncio +import logging +import socket +from abc import ABC, abstractmethod +from collections.abc import Sized +from http.cookies import BaseCookie, Morsel +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Dict, + Generator, + Iterable, + List, + Optional, + Tuple, + TypedDict, +) + +from multidict import CIMultiDict +from yarl import URL + +from .typedefs import LooseCookies + +if TYPE_CHECKING: + from .web_app import Application + from .web_exceptions import HTTPException + from .web_request import BaseRequest, Request + from .web_response import StreamResponse +else: + BaseRequest = Request = Application = StreamResponse = None + HTTPException = None + + +class AbstractRouter(ABC): + def __init__(self) -> None: + self._frozen = False + + def post_init(self, app: Application) -> None: + """Post init stage. + + Not an abstract method for sake of backward compatibility, + but if the router wants to be aware of the application + it can override this. + """ + + @property + def frozen(self) -> bool: + return self._frozen + + def freeze(self) -> None: + """Freeze router.""" + self._frozen = True + + @abstractmethod + async def resolve(self, request: Request) -> "AbstractMatchInfo": + """Return MATCH_INFO for given request""" + + +class AbstractMatchInfo(ABC): + @property # pragma: no branch + @abstractmethod + def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]: + """Execute matched request handler""" + + @property + @abstractmethod + def expect_handler( + self, + ) -> Callable[[Request], Awaitable[Optional[StreamResponse]]]: + """Expect handler for 100-continue processing""" + + @property # pragma: no branch + @abstractmethod + def http_exception(self) -> Optional[HTTPException]: + """HTTPException instance raised on router's resolving, or None""" + + @abstractmethod # pragma: no branch + def get_info(self) -> Dict[str, Any]: + """Return a dict with additional info useful for introspection""" + + @property # pragma: no branch + @abstractmethod + def apps(self) -> Tuple[Application, ...]: + """Stack of nested applications. + + Top level application is left-most element. + + """ + + @abstractmethod + def add_app(self, app: Application) -> None: + """Add application to the nested apps stack.""" + + @abstractmethod + def freeze(self) -> None: + """Freeze the match info. + + The method is called after route resolution. + + After the call .add_app() is forbidden. + + """ + + +class AbstractView(ABC): + """Abstract class based view.""" + + def __init__(self, request: Request) -> None: + self._request = request + + @property + def request(self) -> Request: + """Request instance.""" + return self._request + + @abstractmethod + def __await__(self) -> Generator[Any, None, StreamResponse]: + """Execute the view handler.""" + + +class ResolveResult(TypedDict): + """Resolve result. + + This is the result returned from an AbstractResolver's + resolve method. + + :param hostname: The hostname that was provided. + :param host: The IP address that was resolved. + :param port: The port that was resolved. + :param family: The address family that was resolved. + :param proto: The protocol that was resolved. + :param flags: The flags that were resolved. + """ + + hostname: str + host: str + port: int + family: int + proto: int + flags: int + + +class AbstractResolver(ABC): + """Abstract DNS resolver.""" + + @abstractmethod + async def resolve( + self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET + ) -> List[ResolveResult]: + """Return IP address for given hostname""" + + @abstractmethod + async def close(self) -> None: + """Release resolver""" + + +if TYPE_CHECKING: + IterableBase = Iterable[Morsel[str]] +else: + IterableBase = Iterable + + +ClearCookiePredicate = Callable[["Morsel[str]"], bool] + + +class AbstractCookieJar(Sized, IterableBase): + """Abstract Cookie Jar.""" + + def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: + self._loop = loop or asyncio.get_running_loop() + + @abstractmethod + def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: + """Clear all cookies if no predicate is passed.""" + + @abstractmethod + def clear_domain(self, domain: str) -> None: + """Clear all cookies for domain and all subdomains.""" + + @abstractmethod + def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: + """Update cookies.""" + + @abstractmethod + def filter_cookies(self, request_url: URL) -> "BaseCookie[str]": + """Return the jar's cookies filtered by their attributes.""" + + +class AbstractStreamWriter(ABC): + """Abstract stream writer.""" + + buffer_size = 0 + output_size = 0 + length: Optional[int] = 0 + + @abstractmethod + async def write(self, chunk: bytes) -> None: + """Write chunk into stream.""" + + @abstractmethod + async def write_eof(self, chunk: bytes = b"") -> None: + """Write last chunk.""" + + @abstractmethod + async def drain(self) -> None: + """Flush the write buffer.""" + + @abstractmethod + def enable_compression(self, encoding: str = "deflate") -> None: + """Enable HTTP body compression""" + + @abstractmethod + def enable_chunking(self) -> None: + """Enable HTTP chunked mode""" + + @abstractmethod + async def write_headers( + self, status_line: str, headers: "CIMultiDict[str]" + ) -> None: + """Write HTTP headers""" + + +class AbstractAccessLogger(ABC): + """Abstract writer to access log.""" + + def __init__(self, logger: logging.Logger, log_format: str) -> None: + self.logger = logger + self.log_format = log_format + + @abstractmethod + def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None: + """Emit log to logger.""" diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/base_protocol.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/base_protocol.py new file mode 100644 index 000000000..dc1f24f99 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/base_protocol.py @@ -0,0 +1,95 @@ +import asyncio +from typing import Optional, cast + +from .helpers import set_exception +from .tcp_helpers import tcp_nodelay + + +class BaseProtocol(asyncio.Protocol): + __slots__ = ( + "_loop", + "_paused", + "_drain_waiter", + "_connection_lost", + "_reading_paused", + "transport", + ) + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + self._loop: asyncio.AbstractEventLoop = loop + self._paused = False + self._drain_waiter: Optional[asyncio.Future[None]] = None + self._reading_paused = False + + self.transport: Optional[asyncio.Transport] = None + + @property + def connected(self) -> bool: + """Return True if the connection is open.""" + return self.transport is not None + + def pause_writing(self) -> None: + assert not self._paused + self._paused = True + + def resume_writing(self) -> None: + assert self._paused + self._paused = False + + waiter = self._drain_waiter + if waiter is not None: + self._drain_waiter = None + if not waiter.done(): + waiter.set_result(None) + + def pause_reading(self) -> None: + if not self._reading_paused and self.transport is not None: + try: + self.transport.pause_reading() + except (AttributeError, NotImplementedError, RuntimeError): + pass + self._reading_paused = True + + def resume_reading(self) -> None: + if self._reading_paused and self.transport is not None: + try: + self.transport.resume_reading() + except (AttributeError, NotImplementedError, RuntimeError): + pass + self._reading_paused = False + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + tr = cast(asyncio.Transport, transport) + tcp_nodelay(tr, True) + self.transport = tr + + def connection_lost(self, exc: Optional[BaseException]) -> None: + # Wake up the writer if currently paused. + self.transport = None + if not self._paused: + return + waiter = self._drain_waiter + if waiter is None: + return + self._drain_waiter = None + if waiter.done(): + return + if exc is None: + waiter.set_result(None) + else: + set_exception( + waiter, + ConnectionError("Connection lost"), + exc, + ) + + async def _drain_helper(self) -> None: + if not self.connected: + raise ConnectionResetError("Connection lost") + if not self._paused: + return + waiter = self._drain_waiter + if waiter is None: + waiter = self._loop.create_future() + self._drain_waiter = waiter + await asyncio.shield(waiter) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/client.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/client.py new file mode 100644 index 000000000..3d1045f35 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/client.py @@ -0,0 +1,1522 @@ +"""HTTP Client for asyncio.""" + +import asyncio +import base64 +import hashlib +import json +import os +import sys +import traceback +import warnings +from contextlib import suppress +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Coroutine, + Final, + FrozenSet, + Generator, + Generic, + Iterable, + List, + Mapping, + Optional, + Set, + Tuple, + Type, + TypedDict, + TypeVar, + Union, +) + +import attr +from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr +from yarl import URL + +from . import hdrs, http, payload +from .abc import AbstractCookieJar +from .client_exceptions import ( + ClientConnectionError, + ClientConnectorCertificateError, + ClientConnectorError, + ClientConnectorSSLError, + ClientError, + ClientHttpProxyError, + ClientOSError, + ClientPayloadError, + ClientProxyConnectionError, + ClientResponseError, + ClientSSLError, + ConnectionTimeoutError, + ContentTypeError, + InvalidURL, + InvalidUrlClientError, + InvalidUrlRedirectClientError, + NonHttpUrlClientError, + NonHttpUrlRedirectClientError, + RedirectClientError, + ServerConnectionError, + ServerDisconnectedError, + ServerFingerprintMismatch, + ServerTimeoutError, + SocketTimeoutError, + TooManyRedirects, + WSServerHandshakeError, +) +from .client_reqrep import ( + ClientRequest as ClientRequest, + ClientResponse as ClientResponse, + Fingerprint as Fingerprint, + RequestInfo as RequestInfo, + _merge_ssl_params, +) +from .client_ws import ClientWebSocketResponse as ClientWebSocketResponse +from .connector import ( + HTTP_AND_EMPTY_SCHEMA_SET, + BaseConnector as BaseConnector, + NamedPipeConnector as NamedPipeConnector, + TCPConnector as TCPConnector, + UnixConnector as UnixConnector, +) +from .cookiejar import CookieJar +from .helpers import ( + _SENTINEL, + DEBUG, + BasicAuth, + TimeoutHandle, + ceil_timeout, + get_env_proxy_for_url, + method_must_be_empty_body, + sentinel, + strip_auth_from_url, +) +from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter +from .http_websocket import WSHandshakeError, WSMessage, ws_ext_gen, ws_ext_parse +from .streams import FlowControlDataQueue +from .tracing import Trace, TraceConfig +from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, StrOrURL + +__all__ = ( + # client_exceptions + "ClientConnectionError", + "ClientConnectorCertificateError", + "ClientConnectorError", + "ClientConnectorSSLError", + "ClientError", + "ClientHttpProxyError", + "ClientOSError", + "ClientPayloadError", + "ClientProxyConnectionError", + "ClientResponseError", + "ClientSSLError", + "ConnectionTimeoutError", + "ContentTypeError", + "InvalidURL", + "InvalidUrlClientError", + "RedirectClientError", + "NonHttpUrlClientError", + "InvalidUrlRedirectClientError", + "NonHttpUrlRedirectClientError", + "ServerConnectionError", + "ServerDisconnectedError", + "ServerFingerprintMismatch", + "ServerTimeoutError", + "SocketTimeoutError", + "TooManyRedirects", + "WSServerHandshakeError", + # client_reqrep + "ClientRequest", + "ClientResponse", + "Fingerprint", + "RequestInfo", + # connector + "BaseConnector", + "TCPConnector", + "UnixConnector", + "NamedPipeConnector", + # client_ws + "ClientWebSocketResponse", + # client + "ClientSession", + "ClientTimeout", + "request", +) + + +if TYPE_CHECKING: + from ssl import SSLContext +else: + SSLContext = None + +if sys.version_info >= (3, 11) and TYPE_CHECKING: + from typing import Unpack + + +class _RequestOptions(TypedDict, total=False): + params: Union[Mapping[str, Union[str, int]], str, None] + data: Any + json: Any + cookies: Union[LooseCookies, None] + headers: Union[LooseHeaders, None] + skip_auto_headers: Union[Iterable[str], None] + auth: Union[BasicAuth, None] + allow_redirects: bool + max_redirects: int + compress: Union[str, None] + chunked: Union[bool, None] + expect100: bool + raise_for_status: Union[None, bool, Callable[[ClientResponse], Awaitable[None]]] + read_until_eof: bool + proxy: Union[StrOrURL, None] + proxy_auth: Union[BasicAuth, None] + timeout: "Union[ClientTimeout, _SENTINEL, None]" + ssl: Union[SSLContext, bool, Fingerprint] + server_hostname: Union[str, None] + proxy_headers: Union[LooseHeaders, None] + trace_request_ctx: Union[Mapping[str, str], None] + read_bufsize: Union[int, None] + auto_decompress: Union[bool, None] + max_line_size: Union[int, None] + max_field_size: Union[int, None] + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class ClientTimeout: + total: Optional[float] = None + connect: Optional[float] = None + sock_read: Optional[float] = None + sock_connect: Optional[float] = None + ceil_threshold: float = 5 + + # pool_queue_timeout: Optional[float] = None + # dns_resolution_timeout: Optional[float] = None + # socket_connect_timeout: Optional[float] = None + # connection_acquiring_timeout: Optional[float] = None + # new_connection_timeout: Optional[float] = None + # http_header_timeout: Optional[float] = None + # response_body_timeout: Optional[float] = None + + # to create a timeout specific for a single request, either + # - create a completely new one to overwrite the default + # - or use http://www.attrs.org/en/stable/api.html#attr.evolve + # to overwrite the defaults + + +# 5 Minute default read timeout +DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60) + +# https://www.rfc-editor.org/rfc/rfc9110#section-9.2.2 +IDEMPOTENT_METHODS = frozenset({"GET", "HEAD", "OPTIONS", "TRACE", "PUT", "DELETE"}) + +_RetType = TypeVar("_RetType") +_CharsetResolver = Callable[[ClientResponse, bytes], str] + + +class ClientSession: + """First-class interface for making HTTP requests.""" + + ATTRS = frozenset( + [ + "_base_url", + "_source_traceback", + "_connector", + "requote_redirect_url", + "_loop", + "_cookie_jar", + "_connector_owner", + "_default_auth", + "_version", + "_json_serialize", + "_requote_redirect_url", + "_timeout", + "_raise_for_status", + "_auto_decompress", + "_trust_env", + "_default_headers", + "_skip_auto_headers", + "_request_class", + "_response_class", + "_ws_response_class", + "_trace_configs", + "_read_bufsize", + "_max_line_size", + "_max_field_size", + "_resolve_charset", + ] + ) + + _source_traceback: Optional[traceback.StackSummary] = None + _connector: Optional[BaseConnector] = None + + def __init__( + self, + base_url: Optional[StrOrURL] = None, + *, + connector: Optional[BaseConnector] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + cookies: Optional[LooseCookies] = None, + headers: Optional[LooseHeaders] = None, + skip_auto_headers: Optional[Iterable[str]] = None, + auth: Optional[BasicAuth] = None, + json_serialize: JSONEncoder = json.dumps, + request_class: Type[ClientRequest] = ClientRequest, + response_class: Type[ClientResponse] = ClientResponse, + ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse, + version: HttpVersion = http.HttpVersion11, + cookie_jar: Optional[AbstractCookieJar] = None, + connector_owner: bool = True, + raise_for_status: Union[ + bool, Callable[[ClientResponse], Awaitable[None]] + ] = False, + read_timeout: Union[float, _SENTINEL] = sentinel, + conn_timeout: Optional[float] = None, + timeout: Union[object, ClientTimeout] = sentinel, + auto_decompress: bool = True, + trust_env: bool = False, + requote_redirect_url: bool = True, + trace_configs: Optional[List[TraceConfig]] = None, + read_bufsize: int = 2**16, + max_line_size: int = 8190, + max_field_size: int = 8190, + fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8", + ) -> None: + # We initialise _connector to None immediately, as it's referenced in __del__() + # and could cause issues if an exception occurs during initialisation. + self._connector: Optional[BaseConnector] = None + + if loop is None: + if connector is not None: + loop = connector._loop + + loop = loop or asyncio.get_running_loop() + + if base_url is None or isinstance(base_url, URL): + self._base_url: Optional[URL] = base_url + else: + self._base_url = URL(base_url) + assert ( + self._base_url.origin() == self._base_url + ), "Only absolute URLs without path part are supported" + + if timeout is sentinel or timeout is None: + self._timeout = DEFAULT_TIMEOUT + if read_timeout is not sentinel: + warnings.warn( + "read_timeout is deprecated, " "use timeout argument instead", + DeprecationWarning, + stacklevel=2, + ) + self._timeout = attr.evolve(self._timeout, total=read_timeout) + if conn_timeout is not None: + self._timeout = attr.evolve(self._timeout, connect=conn_timeout) + warnings.warn( + "conn_timeout is deprecated, " "use timeout argument instead", + DeprecationWarning, + stacklevel=2, + ) + else: + if not isinstance(timeout, ClientTimeout): + raise ValueError( + f"timeout parameter cannot be of {type(timeout)} type, " + "please use 'timeout=ClientTimeout(...)'", + ) + self._timeout = timeout + if read_timeout is not sentinel: + raise ValueError( + "read_timeout and timeout parameters " + "conflict, please setup " + "timeout.read" + ) + if conn_timeout is not None: + raise ValueError( + "conn_timeout and timeout parameters " + "conflict, please setup " + "timeout.connect" + ) + + if connector is None: + connector = TCPConnector(loop=loop) + + if connector._loop is not loop: + raise RuntimeError("Session and connector has to use same event loop") + + self._loop = loop + + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + if cookie_jar is None: + cookie_jar = CookieJar(loop=loop) + self._cookie_jar = cookie_jar + + if cookies is not None: + self._cookie_jar.update_cookies(cookies) + + self._connector = connector + self._connector_owner = connector_owner + self._default_auth = auth + self._version = version + self._json_serialize = json_serialize + self._raise_for_status = raise_for_status + self._auto_decompress = auto_decompress + self._trust_env = trust_env + self._requote_redirect_url = requote_redirect_url + self._read_bufsize = read_bufsize + self._max_line_size = max_line_size + self._max_field_size = max_field_size + + # Convert to list of tuples + if headers: + real_headers: CIMultiDict[str] = CIMultiDict(headers) + else: + real_headers = CIMultiDict() + self._default_headers: CIMultiDict[str] = real_headers + if skip_auto_headers is not None: + self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers) + else: + self._skip_auto_headers = frozenset() + + self._request_class = request_class + self._response_class = response_class + self._ws_response_class = ws_response_class + + self._trace_configs = trace_configs or [] + for trace_config in self._trace_configs: + trace_config.freeze() + + self._resolve_charset = fallback_charset_resolver + + def __init_subclass__(cls: Type["ClientSession"]) -> None: + warnings.warn( + "Inheritance class {} from ClientSession " + "is discouraged".format(cls.__name__), + DeprecationWarning, + stacklevel=2, + ) + + if DEBUG: + + def __setattr__(self, name: str, val: Any) -> None: + if name not in self.ATTRS: + warnings.warn( + "Setting custom ClientSession.{} attribute " + "is discouraged".format(name), + DeprecationWarning, + stacklevel=2, + ) + super().__setattr__(name, val) + + def __del__(self, _warnings: Any = warnings) -> None: + if not self.closed: + kwargs = {"source": self} + _warnings.warn( + f"Unclosed client session {self!r}", ResourceWarning, **kwargs + ) + context = {"client_session": self, "message": "Unclosed client session"} + if self._source_traceback is not None: + context["source_traceback"] = self._source_traceback + self._loop.call_exception_handler(context) + + if sys.version_info >= (3, 11) and TYPE_CHECKING: + + def request( + self, + method: str, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + else: + + def request( + self, method: str, url: StrOrURL, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP request.""" + return _RequestContextManager(self._request(method, url, **kwargs)) + + def _build_url(self, str_or_url: StrOrURL) -> URL: + url = URL(str_or_url) + if self._base_url is None: + return url + else: + assert not url.is_absolute() and url.path.startswith("/") + return self._base_url.join(url) + + async def _request( + self, + method: str, + str_or_url: StrOrURL, + *, + params: Optional[Mapping[str, str]] = None, + data: Any = None, + json: Any = None, + cookies: Optional[LooseCookies] = None, + headers: Optional[LooseHeaders] = None, + skip_auto_headers: Optional[Iterable[str]] = None, + auth: Optional[BasicAuth] = None, + allow_redirects: bool = True, + max_redirects: int = 10, + compress: Optional[str] = None, + chunked: Optional[bool] = None, + expect100: bool = False, + raise_for_status: Union[ + None, bool, Callable[[ClientResponse], Awaitable[None]] + ] = None, + read_until_eof: bool = True, + proxy: Optional[StrOrURL] = None, + proxy_auth: Optional[BasicAuth] = None, + timeout: Union[ClientTimeout, _SENTINEL] = sentinel, + verify_ssl: Optional[bool] = None, + fingerprint: Optional[bytes] = None, + ssl_context: Optional[SSLContext] = None, + ssl: Union[SSLContext, bool, Fingerprint] = True, + server_hostname: Optional[str] = None, + proxy_headers: Optional[LooseHeaders] = None, + trace_request_ctx: Optional[Mapping[str, str]] = None, + read_bufsize: Optional[int] = None, + auto_decompress: Optional[bool] = None, + max_line_size: Optional[int] = None, + max_field_size: Optional[int] = None, + ) -> ClientResponse: + + # NOTE: timeout clamps existing connect and read timeouts. We cannot + # set the default to None because we need to detect if the user wants + # to use the existing timeouts by setting timeout to None. + + if self.closed: + raise RuntimeError("Session is closed") + + ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) + + if data is not None and json is not None: + raise ValueError( + "data and json parameters can not be used at the same time" + ) + elif json is not None: + data = payload.JsonPayload(json, dumps=self._json_serialize) + + if not isinstance(chunked, bool) and chunked is not None: + warnings.warn("Chunk size is deprecated #1615", DeprecationWarning) + + redirects = 0 + history = [] + version = self._version + params = params or {} + + # Merge with default headers and transform to CIMultiDict + headers = self._prepare_headers(headers) + proxy_headers = self._prepare_headers(proxy_headers) + + try: + url = self._build_url(str_or_url) + except ValueError as e: + raise InvalidUrlClientError(str_or_url) from e + + assert self._connector is not None + if url.scheme not in self._connector.allowed_protocol_schema_set: + raise NonHttpUrlClientError(url) + + skip_headers = set(self._skip_auto_headers) + if skip_auto_headers is not None: + for i in skip_auto_headers: + skip_headers.add(istr(i)) + + if proxy is not None: + try: + proxy = URL(proxy) + except ValueError as e: + raise InvalidURL(proxy) from e + + if timeout is sentinel: + real_timeout: ClientTimeout = self._timeout + else: + if not isinstance(timeout, ClientTimeout): + real_timeout = ClientTimeout(total=timeout) + else: + real_timeout = timeout + # timeout is cumulative for all request operations + # (request, redirects, responses, data consuming) + tm = TimeoutHandle( + self._loop, real_timeout.total, ceil_threshold=real_timeout.ceil_threshold + ) + handle = tm.start() + + if read_bufsize is None: + read_bufsize = self._read_bufsize + + if auto_decompress is None: + auto_decompress = self._auto_decompress + + if max_line_size is None: + max_line_size = self._max_line_size + + if max_field_size is None: + max_field_size = self._max_field_size + + traces = [ + Trace( + self, + trace_config, + trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx), + ) + for trace_config in self._trace_configs + ] + + for trace in traces: + await trace.send_request_start(method, url.update_query(params), headers) + + timer = tm.timer() + try: + with timer: + # https://www.rfc-editor.org/rfc/rfc9112.html#name-retrying-requests + retry_persistent_connection = method in IDEMPOTENT_METHODS + while True: + url, auth_from_url = strip_auth_from_url(url) + if not url.raw_host: + # NOTE: Bail early, otherwise, causes `InvalidURL` through + # NOTE: `self._request_class()` below. + err_exc_cls = ( + InvalidUrlRedirectClientError + if redirects + else InvalidUrlClientError + ) + raise err_exc_cls(url) + if auth and auth_from_url: + raise ValueError( + "Cannot combine AUTH argument with " + "credentials encoded in URL" + ) + + if auth is None: + auth = auth_from_url + if auth is None: + auth = self._default_auth + # It would be confusing if we support explicit + # Authorization header with auth argument + if ( + headers is not None + and auth is not None + and hdrs.AUTHORIZATION in headers + ): + raise ValueError( + "Cannot combine AUTHORIZATION header " + "with AUTH argument or credentials " + "encoded in URL" + ) + + all_cookies = self._cookie_jar.filter_cookies(url) + + if cookies is not None: + tmp_cookie_jar = CookieJar() + tmp_cookie_jar.update_cookies(cookies) + req_cookies = tmp_cookie_jar.filter_cookies(url) + if req_cookies: + all_cookies.load(req_cookies) + + if proxy is not None: + proxy = URL(proxy) + elif self._trust_env: + with suppress(LookupError): + proxy, proxy_auth = get_env_proxy_for_url(url) + + req = self._request_class( + method, + url, + params=params, + headers=headers, + skip_auto_headers=skip_headers, + data=data, + cookies=all_cookies, + auth=auth, + version=version, + compress=compress, + chunked=chunked, + expect100=expect100, + loop=self._loop, + response_class=self._response_class, + proxy=proxy, + proxy_auth=proxy_auth, + timer=timer, + session=self, + ssl=ssl if ssl is not None else True, + server_hostname=server_hostname, + proxy_headers=proxy_headers, + traces=traces, + trust_env=self.trust_env, + ) + + # connection timeout + try: + async with ceil_timeout( + real_timeout.connect, + ceil_threshold=real_timeout.ceil_threshold, + ): + conn = await self._connector.connect( + req, traces=traces, timeout=real_timeout + ) + except asyncio.TimeoutError as exc: + raise ConnectionTimeoutError( + f"Connection timeout to host {url}" + ) from exc + + assert conn.transport is not None + + assert conn.protocol is not None + conn.protocol.set_response_params( + timer=timer, + skip_payload=method_must_be_empty_body(method), + read_until_eof=read_until_eof, + auto_decompress=auto_decompress, + read_timeout=real_timeout.sock_read, + read_bufsize=read_bufsize, + timeout_ceil_threshold=self._connector._timeout_ceil_threshold, + max_line_size=max_line_size, + max_field_size=max_field_size, + ) + + try: + try: + resp = await req.send(conn) + try: + await resp.start(conn) + except BaseException: + resp.close() + raise + except BaseException: + conn.close() + raise + except (ClientOSError, ServerDisconnectedError): + if retry_persistent_connection: + retry_persistent_connection = False + continue + raise + except ClientError: + raise + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise ClientOSError(*exc.args) from exc + + self._cookie_jar.update_cookies(resp.cookies, resp.url) + + # redirects + if resp.status in (301, 302, 303, 307, 308) and allow_redirects: + + for trace in traces: + await trace.send_request_redirect( + method, url.update_query(params), headers, resp + ) + + redirects += 1 + history.append(resp) + if max_redirects and redirects >= max_redirects: + resp.close() + raise TooManyRedirects( + history[0].request_info, tuple(history) + ) + + # For 301 and 302, mimic IE, now changed in RFC + # https://github.com/kennethreitz/requests/pull/269 + if (resp.status == 303 and resp.method != hdrs.METH_HEAD) or ( + resp.status in (301, 302) and resp.method == hdrs.METH_POST + ): + method = hdrs.METH_GET + data = None + if headers.get(hdrs.CONTENT_LENGTH): + headers.pop(hdrs.CONTENT_LENGTH) + + r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get( + hdrs.URI + ) + if r_url is None: + # see github.com/aio-libs/aiohttp/issues/2022 + break + else: + # reading from correct redirection + # response is forbidden + resp.release() + + try: + parsed_redirect_url = URL( + r_url, encoded=not self._requote_redirect_url + ) + except ValueError as e: + raise InvalidUrlRedirectClientError( + r_url, + "Server attempted redirecting to a location that does not look like a URL", + ) from e + + scheme = parsed_redirect_url.scheme + if scheme not in HTTP_AND_EMPTY_SCHEMA_SET: + resp.close() + raise NonHttpUrlRedirectClientError(r_url) + elif not scheme: + parsed_redirect_url = url.join(parsed_redirect_url) + + try: + redirect_origin = parsed_redirect_url.origin() + except ValueError as origin_val_err: + raise InvalidUrlRedirectClientError( + parsed_redirect_url, + "Invalid redirect URL origin", + ) from origin_val_err + + if url.origin() != redirect_origin: + auth = None + headers.pop(hdrs.AUTHORIZATION, None) + + url = parsed_redirect_url + params = {} + resp.release() + continue + + break + + # check response status + if raise_for_status is None: + raise_for_status = self._raise_for_status + + if raise_for_status is None: + pass + elif callable(raise_for_status): + await raise_for_status(resp) + elif raise_for_status: + resp.raise_for_status() + + # register connection + if handle is not None: + if resp.connection is not None: + resp.connection.add_callback(handle.cancel) + else: + handle.cancel() + + resp._history = tuple(history) + + for trace in traces: + await trace.send_request_end( + method, url.update_query(params), headers, resp + ) + return resp + + except BaseException as e: + # cleanup timer + tm.close() + if handle: + handle.cancel() + handle = None + + for trace in traces: + await trace.send_request_exception( + method, url.update_query(params), headers, e + ) + raise + + def ws_connect( + self, + url: StrOrURL, + *, + method: str = hdrs.METH_GET, + protocols: Iterable[str] = (), + timeout: float = 10.0, + receive_timeout: Optional[float] = None, + autoclose: bool = True, + autoping: bool = True, + heartbeat: Optional[float] = None, + auth: Optional[BasicAuth] = None, + origin: Optional[str] = None, + params: Optional[Mapping[str, str]] = None, + headers: Optional[LooseHeaders] = None, + proxy: Optional[StrOrURL] = None, + proxy_auth: Optional[BasicAuth] = None, + ssl: Union[SSLContext, bool, Fingerprint] = True, + verify_ssl: Optional[bool] = None, + fingerprint: Optional[bytes] = None, + ssl_context: Optional[SSLContext] = None, + proxy_headers: Optional[LooseHeaders] = None, + compress: int = 0, + max_msg_size: int = 4 * 1024 * 1024, + ) -> "_WSRequestContextManager": + """Initiate websocket connection.""" + return _WSRequestContextManager( + self._ws_connect( + url, + method=method, + protocols=protocols, + timeout=timeout, + receive_timeout=receive_timeout, + autoclose=autoclose, + autoping=autoping, + heartbeat=heartbeat, + auth=auth, + origin=origin, + params=params, + headers=headers, + proxy=proxy, + proxy_auth=proxy_auth, + ssl=ssl, + verify_ssl=verify_ssl, + fingerprint=fingerprint, + ssl_context=ssl_context, + proxy_headers=proxy_headers, + compress=compress, + max_msg_size=max_msg_size, + ) + ) + + async def _ws_connect( + self, + url: StrOrURL, + *, + method: str = hdrs.METH_GET, + protocols: Iterable[str] = (), + timeout: float = 10.0, + receive_timeout: Optional[float] = None, + autoclose: bool = True, + autoping: bool = True, + heartbeat: Optional[float] = None, + auth: Optional[BasicAuth] = None, + origin: Optional[str] = None, + params: Optional[Mapping[str, str]] = None, + headers: Optional[LooseHeaders] = None, + proxy: Optional[StrOrURL] = None, + proxy_auth: Optional[BasicAuth] = None, + ssl: Union[SSLContext, bool, Fingerprint] = True, + verify_ssl: Optional[bool] = None, + fingerprint: Optional[bytes] = None, + ssl_context: Optional[SSLContext] = None, + proxy_headers: Optional[LooseHeaders] = None, + compress: int = 0, + max_msg_size: int = 4 * 1024 * 1024, + ) -> ClientWebSocketResponse: + + if headers is None: + real_headers: CIMultiDict[str] = CIMultiDict() + else: + real_headers = CIMultiDict(headers) + + default_headers = { + hdrs.UPGRADE: "websocket", + hdrs.CONNECTION: "Upgrade", + hdrs.SEC_WEBSOCKET_VERSION: "13", + } + + for key, value in default_headers.items(): + real_headers.setdefault(key, value) + + sec_key = base64.b64encode(os.urandom(16)) + real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode() + + if protocols: + real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ",".join(protocols) + if origin is not None: + real_headers[hdrs.ORIGIN] = origin + if compress: + extstr = ws_ext_gen(compress=compress) + real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr + + # For the sake of backward compatibility, if user passes in None, convert it to True + if ssl is None: + warnings.warn( + "ssl=None is deprecated, please use ssl=True", + DeprecationWarning, + stacklevel=2, + ) + ssl = True + ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) + + # send request + resp = await self.request( + method, + url, + params=params, + headers=real_headers, + read_until_eof=False, + auth=auth, + proxy=proxy, + proxy_auth=proxy_auth, + ssl=ssl, + proxy_headers=proxy_headers, + ) + + try: + # check handshake + if resp.status != 101: + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message="Invalid response status", + status=resp.status, + headers=resp.headers, + ) + + if resp.headers.get(hdrs.UPGRADE, "").lower() != "websocket": + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message="Invalid upgrade header", + status=resp.status, + headers=resp.headers, + ) + + if resp.headers.get(hdrs.CONNECTION, "").lower() != "upgrade": + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message="Invalid connection header", + status=resp.status, + headers=resp.headers, + ) + + # key calculation + r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, "") + match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode() + if r_key != match: + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message="Invalid challenge response", + status=resp.status, + headers=resp.headers, + ) + + # websocket protocol + protocol = None + if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers: + resp_protocols = [ + proto.strip() + for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",") + ] + + for proto in resp_protocols: + if proto in protocols: + protocol = proto + break + + # websocket compress + notakeover = False + if compress: + compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS) + if compress_hdrs: + try: + compress, notakeover = ws_ext_parse(compress_hdrs) + except WSHandshakeError as exc: + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message=exc.args[0], + status=resp.status, + headers=resp.headers, + ) from exc + else: + compress = 0 + notakeover = False + + conn = resp.connection + assert conn is not None + conn_proto = conn.protocol + assert conn_proto is not None + + # For WS connection the read_timeout must be either receive_timeout or greater + # None == no timeout, i.e. infinite timeout, so None is the max timeout possible + if receive_timeout is None: + # Reset regardless + conn_proto.read_timeout = receive_timeout + elif conn_proto.read_timeout is not None: + # If read_timeout was set check which wins + conn_proto.read_timeout = max(receive_timeout, conn_proto.read_timeout) + + transport = conn.transport + assert transport is not None + reader: FlowControlDataQueue[WSMessage] = FlowControlDataQueue( + conn_proto, 2**16, loop=self._loop + ) + conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader) + writer = WebSocketWriter( + conn_proto, + transport, + use_mask=True, + compress=compress, + notakeover=notakeover, + ) + except BaseException: + resp.close() + raise + else: + return self._ws_response_class( + reader, + writer, + protocol, + resp, + timeout, + autoclose, + autoping, + self._loop, + receive_timeout=receive_timeout, + heartbeat=heartbeat, + compress=compress, + client_notakeover=notakeover, + ) + + def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str]": + """Add default headers and transform it to CIMultiDict""" + # Convert headers to MultiDict + result = CIMultiDict(self._default_headers) + if headers: + if not isinstance(headers, (MultiDictProxy, MultiDict)): + headers = CIMultiDict(headers) + added_names: Set[str] = set() + for key, value in headers.items(): + if key in added_names: + result.add(key, value) + else: + result[key] = value + added_names.add(key) + return result + + if sys.version_info >= (3, 11) and TYPE_CHECKING: + + def get( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + def options( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + def head( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + def post( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + def put( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + def patch( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + def delete( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + else: + + def get( + self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP GET request.""" + return _RequestContextManager( + self._request( + hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs + ) + ) + + def options( + self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP OPTIONS request.""" + return _RequestContextManager( + self._request( + hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs + ) + ) + + def head( + self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP HEAD request.""" + return _RequestContextManager( + self._request( + hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs + ) + ) + + def post( + self, url: StrOrURL, *, data: Any = None, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP POST request.""" + return _RequestContextManager( + self._request(hdrs.METH_POST, url, data=data, **kwargs) + ) + + def put( + self, url: StrOrURL, *, data: Any = None, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP PUT request.""" + return _RequestContextManager( + self._request(hdrs.METH_PUT, url, data=data, **kwargs) + ) + + def patch( + self, url: StrOrURL, *, data: Any = None, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP PATCH request.""" + return _RequestContextManager( + self._request(hdrs.METH_PATCH, url, data=data, **kwargs) + ) + + def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager": + """Perform HTTP DELETE request.""" + return _RequestContextManager( + self._request(hdrs.METH_DELETE, url, **kwargs) + ) + + async def close(self) -> None: + """Close underlying connector. + + Release all acquired resources. + """ + if not self.closed: + if self._connector is not None and self._connector_owner: + await self._connector.close() + self._connector = None + + @property + def closed(self) -> bool: + """Is client session closed. + + A readonly property. + """ + return self._connector is None or self._connector.closed + + @property + def connector(self) -> Optional[BaseConnector]: + """Connector instance used for the session.""" + return self._connector + + @property + def cookie_jar(self) -> AbstractCookieJar: + """The session cookies.""" + return self._cookie_jar + + @property + def version(self) -> Tuple[int, int]: + """The session HTTP protocol version.""" + return self._version + + @property + def requote_redirect_url(self) -> bool: + """Do URL requoting on redirection handling.""" + return self._requote_redirect_url + + @requote_redirect_url.setter + def requote_redirect_url(self, val: bool) -> None: + """Do URL requoting on redirection handling.""" + warnings.warn( + "session.requote_redirect_url modification " "is deprecated #2778", + DeprecationWarning, + stacklevel=2, + ) + self._requote_redirect_url = val + + @property + def loop(self) -> asyncio.AbstractEventLoop: + """Session's loop.""" + warnings.warn( + "client.loop property is deprecated", DeprecationWarning, stacklevel=2 + ) + return self._loop + + @property + def timeout(self) -> ClientTimeout: + """Timeout for the session.""" + return self._timeout + + @property + def headers(self) -> "CIMultiDict[str]": + """The default headers of the client session.""" + return self._default_headers + + @property + def skip_auto_headers(self) -> FrozenSet[istr]: + """Headers for which autogeneration should be skipped""" + return self._skip_auto_headers + + @property + def auth(self) -> Optional[BasicAuth]: + """An object that represents HTTP Basic Authorization""" + return self._default_auth + + @property + def json_serialize(self) -> JSONEncoder: + """Json serializer callable""" + return self._json_serialize + + @property + def connector_owner(self) -> bool: + """Should connector be closed on session closing""" + return self._connector_owner + + @property + def raise_for_status( + self, + ) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]: + """Should `ClientResponse.raise_for_status()` be called for each response.""" + return self._raise_for_status + + @property + def auto_decompress(self) -> bool: + """Should the body response be automatically decompressed.""" + return self._auto_decompress + + @property + def trust_env(self) -> bool: + """ + Should proxies information from environment or netrc be trusted. + + Information is from HTTP_PROXY / HTTPS_PROXY environment variables + or ~/.netrc file if present. + """ + return self._trust_env + + @property + def trace_configs(self) -> List[TraceConfig]: + """A list of TraceConfig instances used for client tracing""" + return self._trace_configs + + def detach(self) -> None: + """Detach connector from session without closing the former. + + Session is switched to closed state anyway. + """ + self._connector = None + + def __enter__(self) -> None: + raise TypeError("Use async with instead") + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + # __exit__ should exist in pair with __enter__ but never executed + pass # pragma: no cover + + async def __aenter__(self) -> "ClientSession": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + await self.close() + + +class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType]): + + __slots__ = ("_coro", "_resp") + + def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None: + self._coro = coro + + def send(self, arg: None) -> "asyncio.Future[Any]": + return self._coro.send(arg) + + def throw(self, *args: Any, **kwargs: Any) -> "asyncio.Future[Any]": + return self._coro.throw(*args, **kwargs) + + def close(self) -> None: + return self._coro.close() + + def __await__(self) -> Generator[Any, None, _RetType]: + ret = self._coro.__await__() + return ret + + def __iter__(self) -> Generator[Any, None, _RetType]: + return self.__await__() + + async def __aenter__(self) -> _RetType: + self._resp = await self._coro + return self._resp + + +class _RequestContextManager(_BaseRequestContextManager[ClientResponse]): + __slots__ = () + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + # We're basing behavior on the exception as it can be caused by + # user code unrelated to the status of the connection. If you + # would like to close a connection you must do that + # explicitly. Otherwise connection error handling should kick in + # and close/recycle the connection as required. + self._resp.release() + await self._resp.wait_for_close() + + +class _WSRequestContextManager(_BaseRequestContextManager[ClientWebSocketResponse]): + __slots__ = () + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + await self._resp.close() + + +class _SessionRequestContextManager: + + __slots__ = ("_coro", "_resp", "_session") + + def __init__( + self, + coro: Coroutine["asyncio.Future[Any]", None, ClientResponse], + session: ClientSession, + ) -> None: + self._coro = coro + self._resp: Optional[ClientResponse] = None + self._session = session + + async def __aenter__(self) -> ClientResponse: + try: + self._resp = await self._coro + except BaseException: + await self._session.close() + raise + else: + return self._resp + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + assert self._resp is not None + self._resp.close() + await self._session.close() + + +def request( + method: str, + url: StrOrURL, + *, + params: Optional[Mapping[str, str]] = None, + data: Any = None, + json: Any = None, + headers: Optional[LooseHeaders] = None, + skip_auto_headers: Optional[Iterable[str]] = None, + auth: Optional[BasicAuth] = None, + allow_redirects: bool = True, + max_redirects: int = 10, + compress: Optional[str] = None, + chunked: Optional[bool] = None, + expect100: bool = False, + raise_for_status: Optional[bool] = None, + read_until_eof: bool = True, + proxy: Optional[StrOrURL] = None, + proxy_auth: Optional[BasicAuth] = None, + timeout: Union[ClientTimeout, object] = sentinel, + cookies: Optional[LooseCookies] = None, + version: HttpVersion = http.HttpVersion11, + connector: Optional[BaseConnector] = None, + read_bufsize: Optional[int] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + max_line_size: int = 8190, + max_field_size: int = 8190, +) -> _SessionRequestContextManager: + """Constructs and sends a request. + + Returns response object. + method - HTTP method + url - request url + params - (optional) Dictionary or bytes to be sent in the query + string of the new request + data - (optional) Dictionary, bytes, or file-like object to + send in the body of the request + json - (optional) Any json compatible python object + headers - (optional) Dictionary of HTTP Headers to send with + the request + cookies - (optional) Dict object to send with the request + auth - (optional) BasicAuth named tuple represent HTTP Basic Auth + auth - aiohttp.helpers.BasicAuth + allow_redirects - (optional) If set to False, do not follow + redirects + version - Request HTTP version. + compress - Set to True if request has to be compressed + with deflate encoding. + chunked - Set to chunk size for chunked transfer encoding. + expect100 - Expect 100-continue response from server. + connector - BaseConnector sub-class instance to support + connection pooling. + read_until_eof - Read response until eof if response + does not have Content-Length header. + loop - Optional event loop. + timeout - Optional ClientTimeout settings structure, 5min + total timeout by default. + Usage:: + >>> import aiohttp + >>> resp = await aiohttp.request('GET', 'http://python.org/') + >>> resp + + >>> data = await resp.read() + """ + connector_owner = False + if connector is None: + connector_owner = True + connector = TCPConnector(loop=loop, force_close=True) + + session = ClientSession( + loop=loop, + cookies=cookies, + version=version, + timeout=timeout, + connector=connector, + connector_owner=connector_owner, + ) + + return _SessionRequestContextManager( + session._request( + method, + url, + params=params, + data=data, + json=json, + headers=headers, + skip_auto_headers=skip_auto_headers, + auth=auth, + allow_redirects=allow_redirects, + max_redirects=max_redirects, + compress=compress, + chunked=chunked, + expect100=expect100, + raise_for_status=raise_for_status, + read_until_eof=read_until_eof, + proxy=proxy, + proxy_auth=proxy_auth, + read_bufsize=read_bufsize, + max_line_size=max_line_size, + max_field_size=max_field_size, + ), + session, + ) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/client_exceptions.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/client_exceptions.py new file mode 100644 index 000000000..ff29b3d3c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/client_exceptions.py @@ -0,0 +1,396 @@ +"""HTTP related errors.""" + +import asyncio +import warnings +from typing import TYPE_CHECKING, Optional, Tuple, Union + +from .http_parser import RawResponseMessage +from .typedefs import LooseHeaders, StrOrURL + +try: + import ssl + + SSLContext = ssl.SSLContext +except ImportError: # pragma: no cover + ssl = SSLContext = None # type: ignore[assignment] + + +if TYPE_CHECKING: + from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo +else: + RequestInfo = ClientResponse = ConnectionKey = None + +__all__ = ( + "ClientError", + "ClientConnectionError", + "ClientOSError", + "ClientConnectorError", + "ClientProxyConnectionError", + "ClientSSLError", + "ClientConnectorSSLError", + "ClientConnectorCertificateError", + "ConnectionTimeoutError", + "SocketTimeoutError", + "ServerConnectionError", + "ServerTimeoutError", + "ServerDisconnectedError", + "ServerFingerprintMismatch", + "ClientResponseError", + "ClientHttpProxyError", + "WSServerHandshakeError", + "ContentTypeError", + "ClientPayloadError", + "InvalidURL", + "InvalidUrlClientError", + "RedirectClientError", + "NonHttpUrlClientError", + "InvalidUrlRedirectClientError", + "NonHttpUrlRedirectClientError", +) + + +class ClientError(Exception): + """Base class for client connection errors.""" + + +class ClientResponseError(ClientError): + """Base class for exceptions that occur after getting a response. + + request_info: An instance of RequestInfo. + history: A sequence of responses, if redirects occurred. + status: HTTP status code. + message: Error message. + headers: Response headers. + """ + + def __init__( + self, + request_info: RequestInfo, + history: Tuple[ClientResponse, ...], + *, + code: Optional[int] = None, + status: Optional[int] = None, + message: str = "", + headers: Optional[LooseHeaders] = None, + ) -> None: + self.request_info = request_info + if code is not None: + if status is not None: + raise ValueError( + "Both code and status arguments are provided; " + "code is deprecated, use status instead" + ) + warnings.warn( + "code argument is deprecated, use status instead", + DeprecationWarning, + stacklevel=2, + ) + if status is not None: + self.status = status + elif code is not None: + self.status = code + else: + self.status = 0 + self.message = message + self.headers = headers + self.history = history + self.args = (request_info, history) + + def __str__(self) -> str: + return "{}, message={!r}, url={!r}".format( + self.status, + self.message, + str(self.request_info.real_url), + ) + + def __repr__(self) -> str: + args = f"{self.request_info!r}, {self.history!r}" + if self.status != 0: + args += f", status={self.status!r}" + if self.message != "": + args += f", message={self.message!r}" + if self.headers is not None: + args += f", headers={self.headers!r}" + return f"{type(self).__name__}({args})" + + @property + def code(self) -> int: + warnings.warn( + "code property is deprecated, use status instead", + DeprecationWarning, + stacklevel=2, + ) + return self.status + + @code.setter + def code(self, value: int) -> None: + warnings.warn( + "code property is deprecated, use status instead", + DeprecationWarning, + stacklevel=2, + ) + self.status = value + + +class ContentTypeError(ClientResponseError): + """ContentType found is not valid.""" + + +class WSServerHandshakeError(ClientResponseError): + """websocket server handshake error.""" + + +class ClientHttpProxyError(ClientResponseError): + """HTTP proxy error. + + Raised in :class:`aiohttp.connector.TCPConnector` if + proxy responds with status other than ``200 OK`` + on ``CONNECT`` request. + """ + + +class TooManyRedirects(ClientResponseError): + """Client was redirected too many times.""" + + +class ClientConnectionError(ClientError): + """Base class for client socket errors.""" + + +class ClientOSError(ClientConnectionError, OSError): + """OSError error.""" + + +class ClientConnectorError(ClientOSError): + """Client connector error. + + Raised in :class:`aiohttp.connector.TCPConnector` if + a connection can not be established. + """ + + def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None: + self._conn_key = connection_key + self._os_error = os_error + super().__init__(os_error.errno, os_error.strerror) + self.args = (connection_key, os_error) + + @property + def os_error(self) -> OSError: + return self._os_error + + @property + def host(self) -> str: + return self._conn_key.host + + @property + def port(self) -> Optional[int]: + return self._conn_key.port + + @property + def ssl(self) -> Union[SSLContext, bool, "Fingerprint"]: + return self._conn_key.ssl + + def __str__(self) -> str: + return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format( + self, "default" if self.ssl is True else self.ssl, self.strerror + ) + + # OSError.__reduce__ does too much black magick + __reduce__ = BaseException.__reduce__ + + +class ClientProxyConnectionError(ClientConnectorError): + """Proxy connection error. + + Raised in :class:`aiohttp.connector.TCPConnector` if + connection to proxy can not be established. + """ + + +class UnixClientConnectorError(ClientConnectorError): + """Unix connector error. + + Raised in :py:class:`aiohttp.connector.UnixConnector` + if connection to unix socket can not be established. + """ + + def __init__( + self, path: str, connection_key: ConnectionKey, os_error: OSError + ) -> None: + self._path = path + super().__init__(connection_key, os_error) + + @property + def path(self) -> str: + return self._path + + def __str__(self) -> str: + return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format( + self, "default" if self.ssl is True else self.ssl, self.strerror + ) + + +class ServerConnectionError(ClientConnectionError): + """Server connection errors.""" + + +class ServerDisconnectedError(ServerConnectionError): + """Server disconnected.""" + + def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None: + if message is None: + message = "Server disconnected" + + self.args = (message,) + self.message = message + + +class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError): + """Server timeout error.""" + + +class ConnectionTimeoutError(ServerTimeoutError): + """Connection timeout error.""" + + +class SocketTimeoutError(ServerTimeoutError): + """Socket timeout error.""" + + +class ServerFingerprintMismatch(ServerConnectionError): + """SSL certificate does not match expected fingerprint.""" + + def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None: + self.expected = expected + self.got = got + self.host = host + self.port = port + self.args = (expected, got, host, port) + + def __repr__(self) -> str: + return "<{} expected={!r} got={!r} host={!r} port={!r}>".format( + self.__class__.__name__, self.expected, self.got, self.host, self.port + ) + + +class ClientPayloadError(ClientError): + """Response payload error.""" + + +class InvalidURL(ClientError, ValueError): + """Invalid URL. + + URL used for fetching is malformed, e.g. it doesn't contains host + part. + """ + + # Derive from ValueError for backward compatibility + + def __init__(self, url: StrOrURL, description: Union[str, None] = None) -> None: + # The type of url is not yarl.URL because the exception can be raised + # on URL(url) call + self._url = url + self._description = description + + if description: + super().__init__(url, description) + else: + super().__init__(url) + + @property + def url(self) -> StrOrURL: + return self._url + + @property + def description(self) -> "str | None": + return self._description + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self}>" + + def __str__(self) -> str: + if self._description: + return f"{self._url} - {self._description}" + return str(self._url) + + +class InvalidUrlClientError(InvalidURL): + """Invalid URL client error.""" + + +class RedirectClientError(ClientError): + """Client redirect error.""" + + +class NonHttpUrlClientError(ClientError): + """Non http URL client error.""" + + +class InvalidUrlRedirectClientError(InvalidUrlClientError, RedirectClientError): + """Invalid URL redirect client error.""" + + +class NonHttpUrlRedirectClientError(NonHttpUrlClientError, RedirectClientError): + """Non http URL redirect client error.""" + + +class ClientSSLError(ClientConnectorError): + """Base error for ssl.*Errors.""" + + +if ssl is not None: + cert_errors = (ssl.CertificateError,) + cert_errors_bases = ( + ClientSSLError, + ssl.CertificateError, + ) + + ssl_errors = (ssl.SSLError,) + ssl_error_bases = (ClientSSLError, ssl.SSLError) +else: # pragma: no cover + cert_errors = tuple() + cert_errors_bases = ( + ClientSSLError, + ValueError, + ) + + ssl_errors = tuple() + ssl_error_bases = (ClientSSLError,) + + +class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc] + """Response ssl error.""" + + +class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc] + """Response certificate error.""" + + def __init__( + self, connection_key: ConnectionKey, certificate_error: Exception + ) -> None: + self._conn_key = connection_key + self._certificate_error = certificate_error + self.args = (connection_key, certificate_error) + + @property + def certificate_error(self) -> Exception: + return self._certificate_error + + @property + def host(self) -> str: + return self._conn_key.host + + @property + def port(self) -> Optional[int]: + return self._conn_key.port + + @property + def ssl(self) -> bool: + return self._conn_key.is_ssl + + def __str__(self) -> str: + return ( + "Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} " + "[{0.certificate_error.__class__.__name__}: " + "{0.certificate_error.args}]".format(self) + ) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/client_proto.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/client_proto.py new file mode 100644 index 000000000..f8c832402 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/client_proto.py @@ -0,0 +1,304 @@ +import asyncio +from contextlib import suppress +from typing import Any, Optional, Tuple + +from .base_protocol import BaseProtocol +from .client_exceptions import ( + ClientOSError, + ClientPayloadError, + ServerDisconnectedError, + SocketTimeoutError, +) +from .helpers import ( + _EXC_SENTINEL, + BaseTimerContext, + set_exception, + status_code_must_be_empty_body, +) +from .http import HttpResponseParser, RawResponseMessage +from .http_exceptions import HttpProcessingError +from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader + + +class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]): + """Helper class to adapt between Protocol and StreamReader.""" + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + BaseProtocol.__init__(self, loop=loop) + DataQueue.__init__(self, loop) + + self._should_close = False + + self._payload: Optional[StreamReader] = None + self._skip_payload = False + self._payload_parser = None + + self._timer = None + + self._tail = b"" + self._upgraded = False + self._parser: Optional[HttpResponseParser] = None + + self._read_timeout: Optional[float] = None + self._read_timeout_handle: Optional[asyncio.TimerHandle] = None + + self._timeout_ceil_threshold: Optional[float] = 5 + + @property + def upgraded(self) -> bool: + return self._upgraded + + @property + def should_close(self) -> bool: + if self._payload is not None and not self._payload.is_eof() or self._upgraded: + return True + + return ( + self._should_close + or self._upgraded + or self.exception() is not None + or self._payload_parser is not None + or len(self) > 0 + or bool(self._tail) + ) + + def force_close(self) -> None: + self._should_close = True + + def close(self) -> None: + transport = self.transport + if transport is not None: + transport.close() + self.transport = None + self._payload = None + self._drop_timeout() + + def is_connected(self) -> bool: + return self.transport is not None and not self.transport.is_closing() + + def connection_lost(self, exc: Optional[BaseException]) -> None: + self._drop_timeout() + + original_connection_error = exc + reraised_exc = original_connection_error + + connection_closed_cleanly = original_connection_error is None + + if self._payload_parser is not None: + with suppress(Exception): # FIXME: log this somehow? + self._payload_parser.feed_eof() + + uncompleted = None + if self._parser is not None: + try: + uncompleted = self._parser.feed_eof() + except Exception as underlying_exc: + if self._payload is not None: + client_payload_exc_msg = ( + f"Response payload is not completed: {underlying_exc !r}" + ) + if not connection_closed_cleanly: + client_payload_exc_msg = ( + f"{client_payload_exc_msg !s}. " + f"{original_connection_error !r}" + ) + set_exception( + self._payload, + ClientPayloadError(client_payload_exc_msg), + underlying_exc, + ) + + if not self.is_eof(): + if isinstance(original_connection_error, OSError): + reraised_exc = ClientOSError(*original_connection_error.args) + if connection_closed_cleanly: + reraised_exc = ServerDisconnectedError(uncompleted) + # assigns self._should_close to True as side effect, + # we do it anyway below + underlying_non_eof_exc = ( + _EXC_SENTINEL + if connection_closed_cleanly + else original_connection_error + ) + assert underlying_non_eof_exc is not None + assert reraised_exc is not None + self.set_exception(reraised_exc, underlying_non_eof_exc) + + self._should_close = True + self._parser = None + self._payload = None + self._payload_parser = None + self._reading_paused = False + + super().connection_lost(reraised_exc) + + def eof_received(self) -> None: + # should call parser.feed_eof() most likely + self._drop_timeout() + + def pause_reading(self) -> None: + super().pause_reading() + self._drop_timeout() + + def resume_reading(self) -> None: + super().resume_reading() + self._reschedule_timeout() + + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: + self._should_close = True + self._drop_timeout() + super().set_exception(exc, exc_cause) + + def set_parser(self, parser: Any, payload: Any) -> None: + # TODO: actual types are: + # parser: WebSocketReader + # payload: FlowControlDataQueue + # but they are not generi enough + # Need an ABC for both types + self._payload = payload + self._payload_parser = parser + + self._drop_timeout() + + if self._tail: + data, self._tail = self._tail, b"" + self.data_received(data) + + def set_response_params( + self, + *, + timer: Optional[BaseTimerContext] = None, + skip_payload: bool = False, + read_until_eof: bool = False, + auto_decompress: bool = True, + read_timeout: Optional[float] = None, + read_bufsize: int = 2**16, + timeout_ceil_threshold: float = 5, + max_line_size: int = 8190, + max_field_size: int = 8190, + ) -> None: + self._skip_payload = skip_payload + + self._read_timeout = read_timeout + + self._timeout_ceil_threshold = timeout_ceil_threshold + + self._parser = HttpResponseParser( + self, + self._loop, + read_bufsize, + timer=timer, + payload_exception=ClientPayloadError, + response_with_body=not skip_payload, + read_until_eof=read_until_eof, + auto_decompress=auto_decompress, + max_line_size=max_line_size, + max_field_size=max_field_size, + ) + + if self._tail: + data, self._tail = self._tail, b"" + self.data_received(data) + + def _drop_timeout(self) -> None: + if self._read_timeout_handle is not None: + self._read_timeout_handle.cancel() + self._read_timeout_handle = None + + def _reschedule_timeout(self) -> None: + timeout = self._read_timeout + if self._read_timeout_handle is not None: + self._read_timeout_handle.cancel() + + if timeout: + self._read_timeout_handle = self._loop.call_later( + timeout, self._on_read_timeout + ) + else: + self._read_timeout_handle = None + + def start_timeout(self) -> None: + self._reschedule_timeout() + + @property + def read_timeout(self) -> Optional[float]: + return self._read_timeout + + @read_timeout.setter + def read_timeout(self, read_timeout: Optional[float]) -> None: + self._read_timeout = read_timeout + + def _on_read_timeout(self) -> None: + exc = SocketTimeoutError("Timeout on reading data from socket") + self.set_exception(exc) + if self._payload is not None: + set_exception(self._payload, exc) + + def data_received(self, data: bytes) -> None: + self._reschedule_timeout() + + if not data: + return + + # custom payload parser + if self._payload_parser is not None: + eof, tail = self._payload_parser.feed_data(data) + if eof: + self._payload = None + self._payload_parser = None + + if tail: + self.data_received(tail) + return + else: + if self._upgraded or self._parser is None: + # i.e. websocket connection, websocket parser is not set yet + self._tail += data + else: + # parse http messages + try: + messages, upgraded, tail = self._parser.feed_data(data) + except BaseException as underlying_exc: + if self.transport is not None: + # connection.release() could be called BEFORE + # data_received(), the transport is already + # closed in this case + self.transport.close() + # should_close is True after the call + self.set_exception(HttpProcessingError(), underlying_exc) + return + + self._upgraded = upgraded + + payload: Optional[StreamReader] = None + for message, payload in messages: + if message.should_close: + self._should_close = True + + self._payload = payload + + if self._skip_payload or status_code_must_be_empty_body( + message.code + ): + self.feed_data((message, EMPTY_PAYLOAD), 0) + else: + self.feed_data((message, payload), 0) + if payload is not None: + # new message(s) was processed + # register timeout handler unsubscribing + # either on end-of-stream or immediately for + # EMPTY_PAYLOAD + if payload is not EMPTY_PAYLOAD: + payload.on_eof(self._drop_timeout) + else: + self._drop_timeout() + + if tail: + if upgraded: + self.data_received(tail) + else: + self._tail = tail diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/client_reqrep.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/client_reqrep.py new file mode 100644 index 000000000..2c10da4ff --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/client_reqrep.py @@ -0,0 +1,1230 @@ +import asyncio +import codecs +import contextlib +import functools +import io +import re +import sys +import traceback +import warnings +from hashlib import md5, sha1, sha256 +from http.cookies import CookieError, Morsel, SimpleCookie +from types import MappingProxyType, TracebackType +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Iterable, + List, + Mapping, + Optional, + Tuple, + Type, + Union, + cast, +) + +import attr +from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy +from yarl import URL + +from . import hdrs, helpers, http, multipart, payload +from .abc import AbstractStreamWriter +from .client_exceptions import ( + ClientConnectionError, + ClientOSError, + ClientResponseError, + ContentTypeError, + InvalidURL, + ServerFingerprintMismatch, +) +from .compression_utils import HAS_BROTLI +from .formdata import FormData +from .helpers import ( + BaseTimerContext, + BasicAuth, + HeadersMixin, + TimerNoop, + basicauth_from_netrc, + netrc_from_env, + noop, + reify, + set_exception, + set_result, +) +from .http import ( + SERVER_SOFTWARE, + HttpVersion, + HttpVersion10, + HttpVersion11, + StreamWriter, +) +from .log import client_logger +from .streams import StreamReader +from .typedefs import ( + DEFAULT_JSON_DECODER, + JSONDecoder, + LooseCookies, + LooseHeaders, + RawHeaders, +) + +try: + import ssl + from ssl import SSLContext +except ImportError: # pragma: no cover + ssl = None # type: ignore[assignment] + SSLContext = object # type: ignore[misc,assignment] + + +__all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint") + + +if TYPE_CHECKING: + from .client import ClientSession + from .connector import Connection + from .tracing import Trace + + +_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") +json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json") + + +def _gen_default_accept_encoding() -> str: + return "gzip, deflate, br" if HAS_BROTLI else "gzip, deflate" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class ContentDisposition: + type: Optional[str] + parameters: "MappingProxyType[str, str]" + filename: Optional[str] + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class RequestInfo: + url: URL + method: str + headers: "CIMultiDictProxy[str]" + real_url: URL = attr.ib() + + @real_url.default + def real_url_default(self) -> URL: + return self.url + + +class Fingerprint: + HASHFUNC_BY_DIGESTLEN = { + 16: md5, + 20: sha1, + 32: sha256, + } + + def __init__(self, fingerprint: bytes) -> None: + digestlen = len(fingerprint) + hashfunc = self.HASHFUNC_BY_DIGESTLEN.get(digestlen) + if not hashfunc: + raise ValueError("fingerprint has invalid length") + elif hashfunc is md5 or hashfunc is sha1: + raise ValueError( + "md5 and sha1 are insecure and " "not supported. Use sha256." + ) + self._hashfunc = hashfunc + self._fingerprint = fingerprint + + @property + def fingerprint(self) -> bytes: + return self._fingerprint + + def check(self, transport: asyncio.Transport) -> None: + if not transport.get_extra_info("sslcontext"): + return + sslobj = transport.get_extra_info("ssl_object") + cert = sslobj.getpeercert(binary_form=True) + got = self._hashfunc(cert).digest() + if got != self._fingerprint: + host, port, *_ = transport.get_extra_info("peername") + raise ServerFingerprintMismatch(self._fingerprint, got, host, port) + + +if ssl is not None: + SSL_ALLOWED_TYPES = (ssl.SSLContext, bool, Fingerprint, type(None)) +else: # pragma: no cover + SSL_ALLOWED_TYPES = (bool, type(None)) + + +def _merge_ssl_params( + ssl: Union["SSLContext", bool, Fingerprint], + verify_ssl: Optional[bool], + ssl_context: Optional["SSLContext"], + fingerprint: Optional[bytes], +) -> Union["SSLContext", bool, Fingerprint]: + if ssl is None: + ssl = True # Double check for backwards compatibility + if verify_ssl is not None and not verify_ssl: + warnings.warn( + "verify_ssl is deprecated, use ssl=False instead", + DeprecationWarning, + stacklevel=3, + ) + if ssl is not True: + raise ValueError( + "verify_ssl, ssl_context, fingerprint and ssl " + "parameters are mutually exclusive" + ) + else: + ssl = False + if ssl_context is not None: + warnings.warn( + "ssl_context is deprecated, use ssl=context instead", + DeprecationWarning, + stacklevel=3, + ) + if ssl is not True: + raise ValueError( + "verify_ssl, ssl_context, fingerprint and ssl " + "parameters are mutually exclusive" + ) + else: + ssl = ssl_context + if fingerprint is not None: + warnings.warn( + "fingerprint is deprecated, " "use ssl=Fingerprint(fingerprint) instead", + DeprecationWarning, + stacklevel=3, + ) + if ssl is not True: + raise ValueError( + "verify_ssl, ssl_context, fingerprint and ssl " + "parameters are mutually exclusive" + ) + else: + ssl = Fingerprint(fingerprint) + if not isinstance(ssl, SSL_ALLOWED_TYPES): + raise TypeError( + "ssl should be SSLContext, bool, Fingerprint or None, " + "got {!r} instead.".format(ssl) + ) + return ssl + + +@attr.s(auto_attribs=True, slots=True, frozen=True) +class ConnectionKey: + # the key should contain an information about used proxy / TLS + # to prevent reusing wrong connections from a pool + host: str + port: Optional[int] + is_ssl: bool + ssl: Union[SSLContext, bool, Fingerprint] + proxy: Optional[URL] + proxy_auth: Optional[BasicAuth] + proxy_headers_hash: Optional[int] # hash(CIMultiDict) + + +def _is_expected_content_type( + response_content_type: str, expected_content_type: str +) -> bool: + if expected_content_type == "application/json": + return json_re.match(response_content_type) is not None + return expected_content_type in response_content_type + + +class ClientRequest: + GET_METHODS = { + hdrs.METH_GET, + hdrs.METH_HEAD, + hdrs.METH_OPTIONS, + hdrs.METH_TRACE, + } + POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT} + ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE}) + + DEFAULT_HEADERS = { + hdrs.ACCEPT: "*/*", + hdrs.ACCEPT_ENCODING: _gen_default_accept_encoding(), + } + + # Type of body depends on PAYLOAD_REGISTRY, which is dynamic. + body: Any = b"" + auth = None + response = None + + __writer = None # async task for streaming data + _continue = None # waiter future for '100 Continue' response + + # N.B. + # Adding __del__ method with self._writer closing doesn't make sense + # because _writer is instance method, thus it keeps a reference to self. + # Until writer has finished finalizer will not be called. + + def __init__( + self, + method: str, + url: URL, + *, + params: Optional[Mapping[str, str]] = None, + headers: Optional[LooseHeaders] = None, + skip_auto_headers: Iterable[str] = frozenset(), + data: Any = None, + cookies: Optional[LooseCookies] = None, + auth: Optional[BasicAuth] = None, + version: http.HttpVersion = http.HttpVersion11, + compress: Optional[str] = None, + chunked: Optional[bool] = None, + expect100: bool = False, + loop: Optional[asyncio.AbstractEventLoop] = None, + response_class: Optional[Type["ClientResponse"]] = None, + proxy: Optional[URL] = None, + proxy_auth: Optional[BasicAuth] = None, + timer: Optional[BaseTimerContext] = None, + session: Optional["ClientSession"] = None, + ssl: Union[SSLContext, bool, Fingerprint] = True, + proxy_headers: Optional[LooseHeaders] = None, + traces: Optional[List["Trace"]] = None, + trust_env: bool = False, + server_hostname: Optional[str] = None, + ): + if loop is None: + loop = asyncio.get_event_loop() + + match = _CONTAINS_CONTROL_CHAR_RE.search(method) + if match: + raise ValueError( + f"Method cannot contain non-token characters {method!r} " + "(found at least {match.group()!r})" + ) + + assert isinstance(url, URL), url + assert isinstance(proxy, (URL, type(None))), proxy + # FIXME: session is None in tests only, need to fix tests + # assert session is not None + self._session = cast("ClientSession", session) + if params: + q = MultiDict(url.query) + url2 = url.with_query(params) + q.extend(url2.query) + url = url.with_query(q) + self.original_url = url + self.url = url.with_fragment(None) + self.method = method.upper() + self.chunked = chunked + self.compress = compress + self.loop = loop + self.length = None + if response_class is None: + real_response_class = ClientResponse + else: + real_response_class = response_class + self.response_class: Type[ClientResponse] = real_response_class + self._timer = timer if timer is not None else TimerNoop() + self._ssl = ssl if ssl is not None else True + self.server_hostname = server_hostname + + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + self.update_version(version) + self.update_host(url) + self.update_headers(headers) + self.update_auto_headers(skip_auto_headers) + self.update_cookies(cookies) + self.update_content_encoding(data) + self.update_auth(auth, trust_env) + self.update_proxy(proxy, proxy_auth, proxy_headers) + + self.update_body_from_data(data) + if data is not None or self.method not in self.GET_METHODS: + self.update_transfer_encoding() + self.update_expect_continue(expect100) + if traces is None: + traces = [] + self._traces = traces + + def __reset_writer(self, _: object = None) -> None: + self.__writer = None + + @property + def _writer(self) -> Optional["asyncio.Task[None]"]: + return self.__writer + + @_writer.setter + def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None: + if self.__writer is not None: + self.__writer.remove_done_callback(self.__reset_writer) + self.__writer = writer + if writer is None: + return + if writer.done(): + # The writer is already done, so we can reset it immediately. + self.__reset_writer() + else: + writer.add_done_callback(self.__reset_writer) + + def is_ssl(self) -> bool: + return self.url.scheme in ("https", "wss") + + @property + def ssl(self) -> Union["SSLContext", bool, Fingerprint]: + return self._ssl + + @property + def connection_key(self) -> ConnectionKey: + proxy_headers = self.proxy_headers + if proxy_headers: + h: Optional[int] = hash(tuple((k, v) for k, v in proxy_headers.items())) + else: + h = None + return ConnectionKey( + self.host, + self.port, + self.is_ssl(), + self.ssl, + self.proxy, + self.proxy_auth, + h, + ) + + @property + def host(self) -> str: + ret = self.url.raw_host + assert ret is not None + return ret + + @property + def port(self) -> Optional[int]: + return self.url.port + + @property + def request_info(self) -> RequestInfo: + headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers) + return RequestInfo(self.url, self.method, headers, self.original_url) + + def update_host(self, url: URL) -> None: + """Update destination host, port and connection type (ssl).""" + # get host/port + if not url.raw_host: + raise InvalidURL(url) + + # basic auth info + username, password = url.user, url.password + if username: + self.auth = helpers.BasicAuth(username, password or "") + + def update_version(self, version: Union[http.HttpVersion, str]) -> None: + """Convert request version to two elements tuple. + + parser HTTP version '1.1' => (1, 1) + """ + if isinstance(version, str): + v = [part.strip() for part in version.split(".", 1)] + try: + version = http.HttpVersion(int(v[0]), int(v[1])) + except ValueError: + raise ValueError( + f"Can not parse http version number: {version}" + ) from None + self.version = version + + def update_headers(self, headers: Optional[LooseHeaders]) -> None: + """Update request headers.""" + self.headers: CIMultiDict[str] = CIMultiDict() + + # add host + netloc = cast(str, self.url.raw_host) + if helpers.is_ipv6_address(netloc): + netloc = f"[{netloc}]" + # See https://github.com/aio-libs/aiohttp/issues/3636. + netloc = netloc.rstrip(".") + if self.url.port is not None and not self.url.is_default_port(): + netloc += ":" + str(self.url.port) + self.headers[hdrs.HOST] = netloc + + if headers: + if isinstance(headers, (dict, MultiDictProxy, MultiDict)): + headers = headers.items() + + for key, value in headers: # type: ignore[misc] + # A special case for Host header + if key.lower() == "host": + self.headers[key] = value + else: + self.headers.add(key, value) + + def update_auto_headers(self, skip_auto_headers: Iterable[str]) -> None: + self.skip_auto_headers = CIMultiDict( + (hdr, None) for hdr in sorted(skip_auto_headers) + ) + used_headers = self.headers.copy() + used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type] + + for hdr, val in self.DEFAULT_HEADERS.items(): + if hdr not in used_headers: + self.headers.add(hdr, val) + + if hdrs.USER_AGENT not in used_headers: + self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE + + def update_cookies(self, cookies: Optional[LooseCookies]) -> None: + """Update request cookies header.""" + if not cookies: + return + + c = SimpleCookie() + if hdrs.COOKIE in self.headers: + c.load(self.headers.get(hdrs.COOKIE, "")) + del self.headers[hdrs.COOKIE] + + if isinstance(cookies, Mapping): + iter_cookies = cookies.items() + else: + iter_cookies = cookies # type: ignore[assignment] + for name, value in iter_cookies: + if isinstance(value, Morsel): + # Preserve coded_value + mrsl_val = value.get(value.key, Morsel()) + mrsl_val.set(value.key, value.value, value.coded_value) + c[name] = mrsl_val + else: + c[name] = value # type: ignore[assignment] + + self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip() + + def update_content_encoding(self, data: Any) -> None: + """Set request content encoding.""" + if data is None: + return + + enc = self.headers.get(hdrs.CONTENT_ENCODING, "").lower() + if enc: + if self.compress: + raise ValueError( + "compress can not be set " "if Content-Encoding header is set" + ) + elif self.compress: + if not isinstance(self.compress, str): + self.compress = "deflate" + self.headers[hdrs.CONTENT_ENCODING] = self.compress + self.chunked = True # enable chunked, no need to deal with length + + def update_transfer_encoding(self) -> None: + """Analyze transfer-encoding header.""" + te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower() + + if "chunked" in te: + if self.chunked: + raise ValueError( + "chunked can not be set " + 'if "Transfer-Encoding: chunked" header is set' + ) + + elif self.chunked: + if hdrs.CONTENT_LENGTH in self.headers: + raise ValueError( + "chunked can not be set " "if Content-Length header is set" + ) + + self.headers[hdrs.TRANSFER_ENCODING] = "chunked" + else: + if hdrs.CONTENT_LENGTH not in self.headers: + self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body)) + + def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> None: + """Set basic auth.""" + if auth is None: + auth = self.auth + if auth is None and trust_env and self.url.host is not None: + netrc_obj = netrc_from_env() + with contextlib.suppress(LookupError): + auth = basicauth_from_netrc(netrc_obj, self.url.host) + if auth is None: + return + + if not isinstance(auth, helpers.BasicAuth): + raise TypeError("BasicAuth() tuple is required instead") + + self.headers[hdrs.AUTHORIZATION] = auth.encode() + + def update_body_from_data(self, body: Any) -> None: + if body is None: + return + + # FormData + if isinstance(body, FormData): + body = body() + + try: + body = payload.PAYLOAD_REGISTRY.get(body, disposition=None) + except payload.LookupError: + body = FormData(body)() + + self.body = body + + # enable chunked encoding if needed + if not self.chunked: + if hdrs.CONTENT_LENGTH not in self.headers: + size = body.size + if size is None: + self.chunked = True + else: + if hdrs.CONTENT_LENGTH not in self.headers: + self.headers[hdrs.CONTENT_LENGTH] = str(size) + + # copy payload headers + assert body.headers + for key, value in body.headers.items(): + if key in self.headers: + continue + if key in self.skip_auto_headers: + continue + self.headers[key] = value + + def update_expect_continue(self, expect: bool = False) -> None: + if expect: + self.headers[hdrs.EXPECT] = "100-continue" + elif self.headers.get(hdrs.EXPECT, "").lower() == "100-continue": + expect = True + + if expect: + self._continue = self.loop.create_future() + + def update_proxy( + self, + proxy: Optional[URL], + proxy_auth: Optional[BasicAuth], + proxy_headers: Optional[LooseHeaders], + ) -> None: + if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth): + raise ValueError("proxy_auth must be None or BasicAuth() tuple") + self.proxy = proxy + self.proxy_auth = proxy_auth + if proxy_headers is not None and not isinstance( + proxy_headers, (MultiDict, MultiDictProxy) + ): + proxy_headers = CIMultiDict(proxy_headers) + self.proxy_headers = proxy_headers + + def keep_alive(self) -> bool: + if self.version < HttpVersion10: + # keep alive not supported at all + return False + if self.version == HttpVersion10: + if self.headers.get(hdrs.CONNECTION) == "keep-alive": + return True + else: # no headers means we close for Http 1.0 + return False + elif self.headers.get(hdrs.CONNECTION) == "close": + return False + + return True + + async def write_bytes( + self, writer: AbstractStreamWriter, conn: "Connection" + ) -> None: + """Support coroutines that yields bytes objects.""" + # 100 response + if self._continue is not None: + try: + await writer.drain() + await self._continue + except asyncio.CancelledError: + return + + protocol = conn.protocol + assert protocol is not None + try: + if isinstance(self.body, payload.Payload): + await self.body.write(writer) + else: + if isinstance(self.body, (bytes, bytearray)): + self.body = (self.body,) + + for chunk in self.body: + await writer.write(chunk) + except OSError as underlying_exc: + reraised_exc = underlying_exc + + exc_is_not_timeout = underlying_exc.errno is not None or not isinstance( + underlying_exc, asyncio.TimeoutError + ) + if exc_is_not_timeout: + reraised_exc = ClientOSError( + underlying_exc.errno, + f"Can not write request body for {self.url !s}", + ) + + set_exception(protocol, reraised_exc, underlying_exc) + except asyncio.CancelledError: + await writer.write_eof() + except Exception as underlying_exc: + set_exception( + protocol, + ClientConnectionError( + f"Failed to send bytes into the underlying connection {conn !s}", + ), + underlying_exc, + ) + else: + await writer.write_eof() + protocol.start_timeout() + + async def send(self, conn: "Connection") -> "ClientResponse": + # Specify request target: + # - CONNECT request must send authority form URI + # - not CONNECT proxy must send absolute form URI + # - most common is origin form URI + if self.method == hdrs.METH_CONNECT: + connect_host = self.url.raw_host + assert connect_host is not None + if helpers.is_ipv6_address(connect_host): + connect_host = f"[{connect_host}]" + path = f"{connect_host}:{self.url.port}" + elif self.proxy and not self.is_ssl(): + path = str(self.url) + else: + path = self.url.raw_path + if self.url.raw_query_string: + path += "?" + self.url.raw_query_string + + protocol = conn.protocol + assert protocol is not None + writer = StreamWriter( + protocol, + self.loop, + on_chunk_sent=functools.partial( + self._on_chunk_request_sent, self.method, self.url + ), + on_headers_sent=functools.partial( + self._on_headers_request_sent, self.method, self.url + ), + ) + + if self.compress: + writer.enable_compression(self.compress) + + if self.chunked is not None: + writer.enable_chunking() + + # set default content-type + if ( + self.method in self.POST_METHODS + and hdrs.CONTENT_TYPE not in self.skip_auto_headers + and hdrs.CONTENT_TYPE not in self.headers + ): + self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream" + + # set the connection header + connection = self.headers.get(hdrs.CONNECTION) + if not connection: + if self.keep_alive(): + if self.version == HttpVersion10: + connection = "keep-alive" + else: + if self.version == HttpVersion11: + connection = "close" + + if connection is not None: + self.headers[hdrs.CONNECTION] = connection + + # status + headers + status_line = "{0} {1} HTTP/{v.major}.{v.minor}".format( + self.method, path, v=self.version + ) + await writer.write_headers(status_line, self.headers) + coro = self.write_bytes(writer, conn) + + if sys.version_info >= (3, 12): + # Optimization for Python 3.12, try to write + # bytes immediately to avoid having to schedule + # the task on the event loop. + task = asyncio.Task(coro, loop=self.loop, eager_start=True) + else: + task = self.loop.create_task(coro) + + self._writer = task + response_class = self.response_class + assert response_class is not None + self.response = response_class( + self.method, + self.original_url, + writer=self._writer, + continue100=self._continue, + timer=self._timer, + request_info=self.request_info, + traces=self._traces, + loop=self.loop, + session=self._session, + ) + return self.response + + async def close(self) -> None: + if self._writer is not None: + with contextlib.suppress(asyncio.CancelledError): + await self._writer + + def terminate(self) -> None: + if self._writer is not None: + if not self.loop.is_closed(): + self._writer.cancel() + self._writer.remove_done_callback(self.__reset_writer) + self._writer = None + + async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None: + for trace in self._traces: + await trace.send_request_chunk_sent(method, url, chunk) + + async def _on_headers_request_sent( + self, method: str, url: URL, headers: "CIMultiDict[str]" + ) -> None: + for trace in self._traces: + await trace.send_request_headers(method, url, headers) + + +class ClientResponse(HeadersMixin): + + # Some of these attributes are None when created, + # but will be set by the start() method. + # As the end user will likely never see the None values, we cheat the types below. + # from the Status-Line of the response + version: Optional[HttpVersion] = None # HTTP-Version + status: int = None # type: ignore[assignment] # Status-Code + reason: Optional[str] = None # Reason-Phrase + + content: StreamReader = None # type: ignore[assignment] # Payload stream + _headers: CIMultiDictProxy[str] = None # type: ignore[assignment] + _raw_headers: RawHeaders = None # type: ignore[assignment] + + _connection = None # current connection + _source_traceback: Optional[traceback.StackSummary] = None + # set up by ClientRequest after ClientResponse object creation + # post-init stage allows to not change ctor signature + _closed = True # to allow __del__ for non-initialized properly response + _released = False + __writer = None + + def __init__( + self, + method: str, + url: URL, + *, + writer: "asyncio.Task[None]", + continue100: Optional["asyncio.Future[bool]"], + timer: BaseTimerContext, + request_info: RequestInfo, + traces: List["Trace"], + loop: asyncio.AbstractEventLoop, + session: "ClientSession", + ) -> None: + assert isinstance(url, URL) + + self.method = method + self.cookies = SimpleCookie() + + self._real_url = url + self._url = url.with_fragment(None) + self._body: Any = None + self._writer: Optional[asyncio.Task[None]] = writer + self._continue = continue100 # None by default + self._closed = True + self._history: Tuple[ClientResponse, ...] = () + self._request_info = request_info + self._timer = timer if timer is not None else TimerNoop() + self._cache: Dict[str, Any] = {} + self._traces = traces + self._loop = loop + # store a reference to session #1985 + self._session: Optional[ClientSession] = session + # Save reference to _resolve_charset, so that get_encoding() will still + # work after the response has finished reading the body. + if session is None: + # TODO: Fix session=None in tests (see ClientRequest.__init__). + self._resolve_charset: Callable[["ClientResponse", bytes], str] = ( + lambda *_: "utf-8" + ) + else: + self._resolve_charset = session._resolve_charset + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + def __reset_writer(self, _: object = None) -> None: + self.__writer = None + + @property + def _writer(self) -> Optional["asyncio.Task[None]"]: + return self.__writer + + @_writer.setter + def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None: + if self.__writer is not None: + self.__writer.remove_done_callback(self.__reset_writer) + self.__writer = writer + if writer is None: + return + if writer.done(): + # The writer is already done, so we can reset it immediately. + self.__reset_writer() + else: + writer.add_done_callback(self.__reset_writer) + + @reify + def url(self) -> URL: + return self._url + + @reify + def url_obj(self) -> URL: + warnings.warn("Deprecated, use .url #1654", DeprecationWarning, stacklevel=2) + return self._url + + @reify + def real_url(self) -> URL: + return self._real_url + + @reify + def host(self) -> str: + assert self._url.host is not None + return self._url.host + + @reify + def headers(self) -> "CIMultiDictProxy[str]": + return self._headers + + @reify + def raw_headers(self) -> RawHeaders: + return self._raw_headers + + @reify + def request_info(self) -> RequestInfo: + return self._request_info + + @reify + def content_disposition(self) -> Optional[ContentDisposition]: + raw = self._headers.get(hdrs.CONTENT_DISPOSITION) + if raw is None: + return None + disposition_type, params_dct = multipart.parse_content_disposition(raw) + params = MappingProxyType(params_dct) + filename = multipart.content_disposition_filename(params) + return ContentDisposition(disposition_type, params, filename) + + def __del__(self, _warnings: Any = warnings) -> None: + if self._closed: + return + + if self._connection is not None: + self._connection.release() + self._cleanup_writer() + + if self._loop.get_debug(): + kwargs = {"source": self} + _warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs) + context = {"client_response": self, "message": "Unclosed response"} + if self._source_traceback: + context["source_traceback"] = self._source_traceback + self._loop.call_exception_handler(context) + + def __repr__(self) -> str: + out = io.StringIO() + ascii_encodable_url = str(self.url) + if self.reason: + ascii_encodable_reason = self.reason.encode( + "ascii", "backslashreplace" + ).decode("ascii") + else: + ascii_encodable_reason = "None" + print( + "".format( + ascii_encodable_url, self.status, ascii_encodable_reason + ), + file=out, + ) + print(self.headers, file=out) + return out.getvalue() + + @property + def connection(self) -> Optional["Connection"]: + return self._connection + + @reify + def history(self) -> Tuple["ClientResponse", ...]: + """A sequence of of responses, if redirects occurred.""" + return self._history + + @reify + def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]": + links_str = ", ".join(self.headers.getall("link", [])) + + if not links_str: + return MultiDictProxy(MultiDict()) + + links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict() + + for val in re.split(r",(?=\s*<)", links_str): + match = re.match(r"\s*<(.*)>(.*)", val) + if match is None: # pragma: no cover + # the check exists to suppress mypy error + continue + url, params_str = match.groups() + params = params_str.split(";")[1:] + + link: MultiDict[Union[str, URL]] = MultiDict() + + for param in params: + match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M) + if match is None: # pragma: no cover + # the check exists to suppress mypy error + continue + key, _, value, _ = match.groups() + + link.add(key, value) + + key = link.get("rel", url) + + link.add("url", self.url.join(URL(url))) + + links.add(str(key), MultiDictProxy(link)) + + return MultiDictProxy(links) + + async def start(self, connection: "Connection") -> "ClientResponse": + """Start response processing.""" + self._closed = False + self._protocol = connection.protocol + self._connection = connection + + with self._timer: + while True: + # read response + try: + protocol = self._protocol + message, payload = await protocol.read() # type: ignore[union-attr] + except http.HttpProcessingError as exc: + raise ClientResponseError( + self.request_info, + self.history, + status=exc.code, + message=exc.message, + headers=exc.headers, + ) from exc + + if message.code < 100 or message.code > 199 or message.code == 101: + break + + if self._continue is not None: + set_result(self._continue, True) + self._continue = None + + # payload eof handler + payload.on_eof(self._response_eof) + + # response status + self.version = message.version + self.status = message.code + self.reason = message.reason + + # headers + self._headers = message.headers # type is CIMultiDictProxy + self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes] + + # payload + self.content = payload + + # cookies + for hdr in self.headers.getall(hdrs.SET_COOKIE, ()): + try: + self.cookies.load(hdr) + except CookieError as exc: + client_logger.warning("Can not load response cookies: %s", exc) + return self + + def _response_eof(self) -> None: + if self._closed: + return + + # protocol could be None because connection could be detached + protocol = self._connection and self._connection.protocol + if protocol is not None and protocol.upgraded: + return + + self._closed = True + self._cleanup_writer() + self._release_connection() + + @property + def closed(self) -> bool: + return self._closed + + def close(self) -> None: + if not self._released: + self._notify_content() + + self._closed = True + if self._loop is None or self._loop.is_closed(): + return + + self._cleanup_writer() + if self._connection is not None: + self._connection.close() + self._connection = None + + def release(self) -> Any: + if not self._released: + self._notify_content() + + self._closed = True + + self._cleanup_writer() + self._release_connection() + return noop() + + @property + def ok(self) -> bool: + """Returns ``True`` if ``status`` is less than ``400``, ``False`` if not. + + This is **not** a check for ``200 OK`` but a check that the response + status is under 400. + """ + return 400 > self.status + + def raise_for_status(self) -> None: + if not self.ok: + # reason should always be not None for a started response + assert self.reason is not None + self.release() + raise ClientResponseError( + self.request_info, + self.history, + status=self.status, + message=self.reason, + headers=self.headers, + ) + + def _release_connection(self) -> None: + if self._connection is not None: + if self._writer is None: + self._connection.release() + self._connection = None + else: + self._writer.add_done_callback(lambda f: self._release_connection()) + + async def _wait_released(self) -> None: + if self._writer is not None: + await self._writer + self._release_connection() + + def _cleanup_writer(self) -> None: + if self._writer is not None: + self._writer.cancel() + self._session = None + + def _notify_content(self) -> None: + content = self.content + if content and content.exception() is None: + set_exception(content, ClientConnectionError("Connection closed")) + self._released = True + + async def wait_for_close(self) -> None: + if self._writer is not None: + await self._writer + self.release() + + async def read(self) -> bytes: + """Read response payload.""" + if self._body is None: + try: + self._body = await self.content.read() + for trace in self._traces: + await trace.send_response_chunk_received( + self.method, self.url, self._body + ) + except BaseException: + self.close() + raise + elif self._released: # Response explicitly released + raise ClientConnectionError("Connection closed") + + protocol = self._connection and self._connection.protocol + if protocol is None or not protocol.upgraded: + await self._wait_released() # Underlying connection released + return self._body # type: ignore[no-any-return] + + def get_encoding(self) -> str: + ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower() + mimetype = helpers.parse_mimetype(ctype) + + encoding = mimetype.parameters.get("charset") + if encoding: + with contextlib.suppress(LookupError): + return codecs.lookup(encoding).name + + if mimetype.type == "application" and ( + mimetype.subtype == "json" or mimetype.subtype == "rdap" + ): + # RFC 7159 states that the default encoding is UTF-8. + # RFC 7483 defines application/rdap+json + return "utf-8" + + if self._body is None: + raise RuntimeError( + "Cannot compute fallback encoding of a not yet read body" + ) + + return self._resolve_charset(self, self._body) + + async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str: + """Read response payload and decode.""" + if self._body is None: + await self.read() + + if encoding is None: + encoding = self.get_encoding() + + return self._body.decode( # type: ignore[no-any-return,union-attr] + encoding, errors=errors + ) + + async def json( + self, + *, + encoding: Optional[str] = None, + loads: JSONDecoder = DEFAULT_JSON_DECODER, + content_type: Optional[str] = "application/json", + ) -> Any: + """Read and decodes JSON response.""" + if self._body is None: + await self.read() + + if content_type: + ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower() + if not _is_expected_content_type(ctype, content_type): + raise ContentTypeError( + self.request_info, + self.history, + message=( + "Attempt to decode JSON with " "unexpected mimetype: %s" % ctype + ), + headers=self.headers, + ) + + stripped = self._body.strip() # type: ignore[union-attr] + if not stripped: + return None + + if encoding is None: + encoding = self.get_encoding() + + return loads(stripped.decode(encoding)) + + async def __aenter__(self) -> "ClientResponse": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + # similar to _RequestContextManager, we do not need to check + # for exceptions, response object can close connection + # if state is broken + self.release() + await self.wait_for_close() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/client_ws.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/client_ws.py new file mode 100644 index 000000000..247f62c75 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/client_ws.py @@ -0,0 +1,370 @@ +"""WebSocket client for asyncio.""" + +import asyncio +import sys +from typing import Any, Optional, cast + +from .client_exceptions import ClientError, ServerTimeoutError +from .client_reqrep import ClientResponse +from .helpers import calculate_timeout_when, set_result +from .http import ( + WS_CLOSED_MESSAGE, + WS_CLOSING_MESSAGE, + WebSocketError, + WSCloseCode, + WSMessage, + WSMsgType, +) +from .http_websocket import WebSocketWriter # WSMessage +from .streams import EofStream, FlowControlDataQueue +from .typedefs import ( + DEFAULT_JSON_DECODER, + DEFAULT_JSON_ENCODER, + JSONDecoder, + JSONEncoder, +) + +if sys.version_info >= (3, 11): + import asyncio as async_timeout +else: + import async_timeout + + +class ClientWebSocketResponse: + def __init__( + self, + reader: "FlowControlDataQueue[WSMessage]", + writer: WebSocketWriter, + protocol: Optional[str], + response: ClientResponse, + timeout: float, + autoclose: bool, + autoping: bool, + loop: asyncio.AbstractEventLoop, + *, + receive_timeout: Optional[float] = None, + heartbeat: Optional[float] = None, + compress: int = 0, + client_notakeover: bool = False, + ) -> None: + self._response = response + self._conn = response.connection + + self._writer = writer + self._reader = reader + self._protocol = protocol + self._closed = False + self._closing = False + self._close_code: Optional[int] = None + self._timeout = timeout + self._receive_timeout = receive_timeout + self._autoclose = autoclose + self._autoping = autoping + self._heartbeat = heartbeat + self._heartbeat_cb: Optional[asyncio.TimerHandle] = None + self._heartbeat_when: float = 0.0 + if heartbeat is not None: + self._pong_heartbeat = heartbeat / 2.0 + self._pong_response_cb: Optional[asyncio.TimerHandle] = None + self._loop = loop + self._waiting: bool = False + self._close_wait: Optional[asyncio.Future[None]] = None + self._exception: Optional[BaseException] = None + self._compress = compress + self._client_notakeover = client_notakeover + self._ping_task: Optional[asyncio.Task[None]] = None + + self._reset_heartbeat() + + def _cancel_heartbeat(self) -> None: + self._cancel_pong_response_cb() + if self._heartbeat_cb is not None: + self._heartbeat_cb.cancel() + self._heartbeat_cb = None + if self._ping_task is not None: + self._ping_task.cancel() + self._ping_task = None + + def _cancel_pong_response_cb(self) -> None: + if self._pong_response_cb is not None: + self._pong_response_cb.cancel() + self._pong_response_cb = None + + def _reset_heartbeat(self) -> None: + if self._heartbeat is None: + return + self._cancel_pong_response_cb() + loop = self._loop + assert loop is not None + conn = self._conn + timeout_ceil_threshold = ( + conn._connector._timeout_ceil_threshold if conn is not None else 5 + ) + now = loop.time() + when = calculate_timeout_when(now, self._heartbeat, timeout_ceil_threshold) + self._heartbeat_when = when + if self._heartbeat_cb is None: + # We do not cancel the previous heartbeat_cb here because + # it generates a significant amount of TimerHandle churn + # which causes asyncio to rebuild the heap frequently. + # Instead _send_heartbeat() will reschedule the next + # heartbeat if it fires too early. + self._heartbeat_cb = loop.call_at(when, self._send_heartbeat) + + def _send_heartbeat(self) -> None: + self._heartbeat_cb = None + loop = self._loop + now = loop.time() + if now < self._heartbeat_when: + # Heartbeat fired too early, reschedule + self._heartbeat_cb = loop.call_at( + self._heartbeat_when, self._send_heartbeat + ) + return + + conn = self._conn + timeout_ceil_threshold = ( + conn._connector._timeout_ceil_threshold if conn is not None else 5 + ) + when = calculate_timeout_when(now, self._pong_heartbeat, timeout_ceil_threshold) + self._cancel_pong_response_cb() + self._pong_response_cb = loop.call_at(when, self._pong_not_received) + + if sys.version_info >= (3, 12): + # Optimization for Python 3.12, try to send the ping + # immediately to avoid having to schedule + # the task on the event loop. + ping_task = asyncio.Task(self._writer.ping(), loop=loop, eager_start=True) + else: + ping_task = loop.create_task(self._writer.ping()) + + if not ping_task.done(): + self._ping_task = ping_task + ping_task.add_done_callback(self._ping_task_done) + + def _ping_task_done(self, task: "asyncio.Task[None]") -> None: + """Callback for when the ping task completes.""" + self._ping_task = None + + def _pong_not_received(self) -> None: + if not self._closed: + self._set_closed() + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = ServerTimeoutError() + self._response.close() + if self._waiting and not self._closing: + self._reader.feed_data( + WSMessage(WSMsgType.ERROR, self._exception, None) + ) + + def _set_closed(self) -> None: + """Set the connection to closed. + + Cancel any heartbeat timers and set the closed flag. + """ + self._closed = True + self._cancel_heartbeat() + + def _set_closing(self) -> None: + """Set the connection to closing. + + Cancel any heartbeat timers and set the closing flag. + """ + self._closing = True + self._cancel_heartbeat() + + @property + def closed(self) -> bool: + return self._closed + + @property + def close_code(self) -> Optional[int]: + return self._close_code + + @property + def protocol(self) -> Optional[str]: + return self._protocol + + @property + def compress(self) -> int: + return self._compress + + @property + def client_notakeover(self) -> bool: + return self._client_notakeover + + def get_extra_info(self, name: str, default: Any = None) -> Any: + """extra info from connection transport""" + conn = self._response.connection + if conn is None: + return default + transport = conn.transport + if transport is None: + return default + return transport.get_extra_info(name, default) + + def exception(self) -> Optional[BaseException]: + return self._exception + + async def ping(self, message: bytes = b"") -> None: + await self._writer.ping(message) + + async def pong(self, message: bytes = b"") -> None: + await self._writer.pong(message) + + async def send_str(self, data: str, compress: Optional[int] = None) -> None: + if not isinstance(data, str): + raise TypeError("data argument must be str (%r)" % type(data)) + await self._writer.send(data, binary=False, compress=compress) + + async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None: + if not isinstance(data, (bytes, bytearray, memoryview)): + raise TypeError("data argument must be byte-ish (%r)" % type(data)) + await self._writer.send(data, binary=True, compress=compress) + + async def send_json( + self, + data: Any, + compress: Optional[int] = None, + *, + dumps: JSONEncoder = DEFAULT_JSON_ENCODER, + ) -> None: + await self.send_str(dumps(data), compress=compress) + + async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool: + # we need to break `receive()` cycle first, + # `close()` may be called from different task + if self._waiting and not self._closing: + assert self._loop is not None + self._close_wait = self._loop.create_future() + self._set_closing() + self._reader.feed_data(WS_CLOSING_MESSAGE, 0) + await self._close_wait + + if not self._closed: + self._set_closed() + try: + await self._writer.close(code, message) + except asyncio.CancelledError: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._response.close() + raise + except Exception as exc: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = exc + self._response.close() + return True + + if self._close_code: + self._response.close() + return True + + while True: + try: + async with async_timeout.timeout(self._timeout): + msg = await self._reader.read() + except asyncio.CancelledError: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._response.close() + raise + except Exception as exc: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = exc + self._response.close() + return True + + if msg.type is WSMsgType.CLOSE: + self._close_code = msg.data + self._response.close() + return True + else: + return False + + async def receive(self, timeout: Optional[float] = None) -> WSMessage: + while True: + if self._waiting: + raise RuntimeError("Concurrent call to receive() is not allowed") + + if self._closed: + return WS_CLOSED_MESSAGE + elif self._closing: + await self.close() + return WS_CLOSED_MESSAGE + + try: + self._waiting = True + try: + async with async_timeout.timeout(timeout or self._receive_timeout): + msg = await self._reader.read() + self._reset_heartbeat() + finally: + self._waiting = False + if self._close_wait: + set_result(self._close_wait, None) + except (asyncio.CancelledError, asyncio.TimeoutError): + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + raise + except EofStream: + self._close_code = WSCloseCode.OK + await self.close() + return WSMessage(WSMsgType.CLOSED, None, None) + except ClientError: + # Likely ServerDisconnectedError when connection is lost + self._set_closed() + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + return WS_CLOSED_MESSAGE + except WebSocketError as exc: + self._close_code = exc.code + await self.close(code=exc.code) + return WSMessage(WSMsgType.ERROR, exc, None) + except Exception as exc: + self._exception = exc + self._set_closing() + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + await self.close() + return WSMessage(WSMsgType.ERROR, exc, None) + + if msg.type is WSMsgType.CLOSE: + self._set_closing() + self._close_code = msg.data + if not self._closed and self._autoclose: + await self.close() + elif msg.type is WSMsgType.CLOSING: + self._set_closing() + elif msg.type is WSMsgType.PING and self._autoping: + await self.pong(msg.data) + continue + elif msg.type is WSMsgType.PONG and self._autoping: + continue + + return msg + + async def receive_str(self, *, timeout: Optional[float] = None) -> str: + msg = await self.receive(timeout) + if msg.type is not WSMsgType.TEXT: + raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str") + return cast(str, msg.data) + + async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes: + msg = await self.receive(timeout) + if msg.type is not WSMsgType.BINARY: + raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes") + return cast(bytes, msg.data) + + async def receive_json( + self, + *, + loads: JSONDecoder = DEFAULT_JSON_DECODER, + timeout: Optional[float] = None, + ) -> Any: + data = await self.receive_str(timeout=timeout) + return loads(data) + + def __aiter__(self) -> "ClientWebSocketResponse": + return self + + async def __anext__(self) -> WSMessage: + msg = await self.receive() + if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED): + raise StopAsyncIteration + return msg diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/compression_utils.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/compression_utils.py new file mode 100644 index 000000000..ab4a2f1cc --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/compression_utils.py @@ -0,0 +1,159 @@ +import asyncio +import zlib +from concurrent.futures import Executor +from typing import Optional, cast + +try: + try: + import brotlicffi as brotli + except ImportError: + import brotli + + HAS_BROTLI = True +except ImportError: # pragma: no cover + HAS_BROTLI = False + +MAX_SYNC_CHUNK_SIZE = 1024 + + +def encoding_to_mode( + encoding: Optional[str] = None, + suppress_deflate_header: bool = False, +) -> int: + if encoding == "gzip": + return 16 + zlib.MAX_WBITS + + return -zlib.MAX_WBITS if suppress_deflate_header else zlib.MAX_WBITS + + +class ZlibBaseHandler: + def __init__( + self, + mode: int, + executor: Optional[Executor] = None, + max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, + ): + self._mode = mode + self._executor = executor + self._max_sync_chunk_size = max_sync_chunk_size + + +class ZLibCompressor(ZlibBaseHandler): + def __init__( + self, + encoding: Optional[str] = None, + suppress_deflate_header: bool = False, + level: Optional[int] = None, + wbits: Optional[int] = None, + strategy: int = zlib.Z_DEFAULT_STRATEGY, + executor: Optional[Executor] = None, + max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, + ): + super().__init__( + mode=( + encoding_to_mode(encoding, suppress_deflate_header) + if wbits is None + else wbits + ), + executor=executor, + max_sync_chunk_size=max_sync_chunk_size, + ) + if level is None: + self._compressor = zlib.compressobj(wbits=self._mode, strategy=strategy) + else: + self._compressor = zlib.compressobj( + wbits=self._mode, strategy=strategy, level=level + ) + self._compress_lock = asyncio.Lock() + + def compress_sync(self, data: bytes) -> bytes: + return self._compressor.compress(data) + + async def compress(self, data: bytes) -> bytes: + async with self._compress_lock: + # To ensure the stream is consistent in the event + # there are multiple writers, we need to lock + # the compressor so that only one writer can + # compress at a time. + if ( + self._max_sync_chunk_size is not None + and len(data) > self._max_sync_chunk_size + ): + return await asyncio.get_event_loop().run_in_executor( + self._executor, self.compress_sync, data + ) + return self.compress_sync(data) + + def flush(self, mode: int = zlib.Z_FINISH) -> bytes: + return self._compressor.flush(mode) + + +class ZLibDecompressor(ZlibBaseHandler): + def __init__( + self, + encoding: Optional[str] = None, + suppress_deflate_header: bool = False, + executor: Optional[Executor] = None, + max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, + ): + super().__init__( + mode=encoding_to_mode(encoding, suppress_deflate_header), + executor=executor, + max_sync_chunk_size=max_sync_chunk_size, + ) + self._decompressor = zlib.decompressobj(wbits=self._mode) + + def decompress_sync(self, data: bytes, max_length: int = 0) -> bytes: + return self._decompressor.decompress(data, max_length) + + async def decompress(self, data: bytes, max_length: int = 0) -> bytes: + if ( + self._max_sync_chunk_size is not None + and len(data) > self._max_sync_chunk_size + ): + return await asyncio.get_event_loop().run_in_executor( + self._executor, self.decompress_sync, data, max_length + ) + return self.decompress_sync(data, max_length) + + def flush(self, length: int = 0) -> bytes: + return ( + self._decompressor.flush(length) + if length > 0 + else self._decompressor.flush() + ) + + @property + def eof(self) -> bool: + return self._decompressor.eof + + @property + def unconsumed_tail(self) -> bytes: + return self._decompressor.unconsumed_tail + + @property + def unused_data(self) -> bytes: + return self._decompressor.unused_data + + +class BrotliDecompressor: + # Supports both 'brotlipy' and 'Brotli' packages + # since they share an import name. The top branches + # are for 'brotlipy' and bottom branches for 'Brotli' + def __init__(self) -> None: + if not HAS_BROTLI: + raise RuntimeError( + "The brotli decompression is not available. " + "Please install `Brotli` module" + ) + self._obj = brotli.Decompressor() + + def decompress_sync(self, data: bytes) -> bytes: + if hasattr(self._obj, "decompress"): + return cast(bytes, self._obj.decompress(data)) + return cast(bytes, self._obj.process(data)) + + def flush(self) -> bytes: + if hasattr(self._obj, "flush"): + return cast(bytes, self._obj.flush()) + return b"" diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/connector.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/connector.py new file mode 100644 index 000000000..d4691b10e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/connector.py @@ -0,0 +1,1584 @@ +import asyncio +import functools +import random +import socket +import sys +import traceback +import warnings +from collections import defaultdict, deque +from contextlib import suppress +from http import HTTPStatus +from http.cookies import SimpleCookie +from itertools import cycle, islice +from time import monotonic +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + DefaultDict, + Dict, + Iterator, + List, + Literal, + Optional, + Sequence, + Set, + Tuple, + Type, + Union, + cast, +) + +import aiohappyeyeballs +import attr + +from . import hdrs, helpers +from .abc import AbstractResolver, ResolveResult +from .client_exceptions import ( + ClientConnectionError, + ClientConnectorCertificateError, + ClientConnectorError, + ClientConnectorSSLError, + ClientHttpProxyError, + ClientProxyConnectionError, + ServerFingerprintMismatch, + UnixClientConnectorError, + cert_errors, + ssl_errors, +) +from .client_proto import ResponseHandler +from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params +from .helpers import ceil_timeout, is_ip_address, noop, sentinel +from .locks import EventResultOrError +from .resolver import DefaultResolver + +try: + import ssl + + SSLContext = ssl.SSLContext +except ImportError: # pragma: no cover + ssl = None # type: ignore[assignment] + SSLContext = object # type: ignore[misc,assignment] + + +EMPTY_SCHEMA_SET = frozenset({""}) +HTTP_SCHEMA_SET = frozenset({"http", "https"}) +WS_SCHEMA_SET = frozenset({"ws", "wss"}) + +HTTP_AND_EMPTY_SCHEMA_SET = HTTP_SCHEMA_SET | EMPTY_SCHEMA_SET +HIGH_LEVEL_SCHEMA_SET = HTTP_AND_EMPTY_SCHEMA_SET | WS_SCHEMA_SET + + +__all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector") + + +if TYPE_CHECKING: + from .client import ClientTimeout + from .client_reqrep import ConnectionKey + from .tracing import Trace + + +class _DeprecationWaiter: + __slots__ = ("_awaitable", "_awaited") + + def __init__(self, awaitable: Awaitable[Any]) -> None: + self._awaitable = awaitable + self._awaited = False + + def __await__(self) -> Any: + self._awaited = True + return self._awaitable.__await__() + + def __del__(self) -> None: + if not self._awaited: + warnings.warn( + "Connector.close() is a coroutine, " + "please use await connector.close()", + DeprecationWarning, + ) + + +class Connection: + + _source_traceback = None + _transport = None + + def __init__( + self, + connector: "BaseConnector", + key: "ConnectionKey", + protocol: ResponseHandler, + loop: asyncio.AbstractEventLoop, + ) -> None: + self._key = key + self._connector = connector + self._loop = loop + self._protocol: Optional[ResponseHandler] = protocol + self._callbacks: List[Callable[[], None]] = [] + + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + def __repr__(self) -> str: + return f"Connection<{self._key}>" + + def __del__(self, _warnings: Any = warnings) -> None: + if self._protocol is not None: + kwargs = {"source": self} + _warnings.warn(f"Unclosed connection {self!r}", ResourceWarning, **kwargs) + if self._loop.is_closed(): + return + + self._connector._release(self._key, self._protocol, should_close=True) + + context = {"client_connection": self, "message": "Unclosed connection"} + if self._source_traceback is not None: + context["source_traceback"] = self._source_traceback + self._loop.call_exception_handler(context) + + def __bool__(self) -> Literal[True]: + """Force subclasses to not be falsy, to make checks simpler.""" + return True + + @property + def loop(self) -> asyncio.AbstractEventLoop: + warnings.warn( + "connector.loop property is deprecated", DeprecationWarning, stacklevel=2 + ) + return self._loop + + @property + def transport(self) -> Optional[asyncio.Transport]: + if self._protocol is None: + return None + return self._protocol.transport + + @property + def protocol(self) -> Optional[ResponseHandler]: + return self._protocol + + def add_callback(self, callback: Callable[[], None]) -> None: + if callback is not None: + self._callbacks.append(callback) + + def _notify_release(self) -> None: + callbacks, self._callbacks = self._callbacks[:], [] + + for cb in callbacks: + with suppress(Exception): + cb() + + def close(self) -> None: + self._notify_release() + + if self._protocol is not None: + self._connector._release(self._key, self._protocol, should_close=True) + self._protocol = None + + def release(self) -> None: + self._notify_release() + + if self._protocol is not None: + self._connector._release( + self._key, self._protocol, should_close=self._protocol.should_close + ) + self._protocol = None + + @property + def closed(self) -> bool: + return self._protocol is None or not self._protocol.is_connected() + + +class _TransportPlaceholder: + """placeholder for BaseConnector.connect function""" + + def close(self) -> None: + pass + + +class BaseConnector: + """Base connector class. + + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The total number of simultaneous connections. + limit_per_host - Number of simultaneous connections to one host. + enable_cleanup_closed - Enables clean-up closed ssl transports. + Disabled by default. + timeout_ceil_threshold - Trigger ceiling of timeout values when + it's above timeout_ceil_threshold. + loop - Optional event loop. + """ + + _closed = True # prevent AttributeError in __del__ if ctor was failed + _source_traceback = None + + # abort transport after 2 seconds (cleanup broken connections) + _cleanup_closed_period = 2.0 + + allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET + + def __init__( + self, + *, + keepalive_timeout: Union[object, None, float] = sentinel, + force_close: bool = False, + limit: int = 100, + limit_per_host: int = 0, + enable_cleanup_closed: bool = False, + loop: Optional[asyncio.AbstractEventLoop] = None, + timeout_ceil_threshold: float = 5, + ) -> None: + + if force_close: + if keepalive_timeout is not None and keepalive_timeout is not sentinel: + raise ValueError( + "keepalive_timeout cannot " "be set if force_close is True" + ) + else: + if keepalive_timeout is sentinel: + keepalive_timeout = 15.0 + + loop = loop or asyncio.get_running_loop() + self._timeout_ceil_threshold = timeout_ceil_threshold + + self._closed = False + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + self._conns: Dict[ConnectionKey, List[Tuple[ResponseHandler, float]]] = {} + self._limit = limit + self._limit_per_host = limit_per_host + self._acquired: Set[ResponseHandler] = set() + self._acquired_per_host: DefaultDict[ConnectionKey, Set[ResponseHandler]] = ( + defaultdict(set) + ) + self._keepalive_timeout = cast(float, keepalive_timeout) + self._force_close = force_close + + # {host_key: FIFO list of waiters} + self._waiters = defaultdict(deque) # type: ignore[var-annotated] + + self._loop = loop + self._factory = functools.partial(ResponseHandler, loop=loop) + + self.cookies = SimpleCookie() + + # start keep-alive connection cleanup task + self._cleanup_handle: Optional[asyncio.TimerHandle] = None + + # start cleanup closed transports task + self._cleanup_closed_handle: Optional[asyncio.TimerHandle] = None + self._cleanup_closed_disabled = not enable_cleanup_closed + self._cleanup_closed_transports: List[Optional[asyncio.Transport]] = [] + self._cleanup_closed() + + def __del__(self, _warnings: Any = warnings) -> None: + if self._closed: + return + if not self._conns: + return + + conns = [repr(c) for c in self._conns.values()] + + self._close() + + kwargs = {"source": self} + _warnings.warn(f"Unclosed connector {self!r}", ResourceWarning, **kwargs) + context = { + "connector": self, + "connections": conns, + "message": "Unclosed connector", + } + if self._source_traceback is not None: + context["source_traceback"] = self._source_traceback + self._loop.call_exception_handler(context) + + def __enter__(self) -> "BaseConnector": + warnings.warn( + '"with Connector():" is deprecated, ' + 'use "async with Connector():" instead', + DeprecationWarning, + ) + return self + + def __exit__(self, *exc: Any) -> None: + self._close() + + async def __aenter__(self) -> "BaseConnector": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + exc_traceback: Optional[TracebackType] = None, + ) -> None: + await self.close() + + @property + def force_close(self) -> bool: + """Ultimately close connection on releasing if True.""" + return self._force_close + + @property + def limit(self) -> int: + """The total number for simultaneous connections. + + If limit is 0 the connector has no limit. + The default limit size is 100. + """ + return self._limit + + @property + def limit_per_host(self) -> int: + """The limit for simultaneous connections to the same endpoint. + + Endpoints are the same if they are have equal + (host, port, is_ssl) triple. + """ + return self._limit_per_host + + def _cleanup(self) -> None: + """Cleanup unused transports.""" + if self._cleanup_handle: + self._cleanup_handle.cancel() + # _cleanup_handle should be unset, otherwise _release() will not + # recreate it ever! + self._cleanup_handle = None + + now = self._loop.time() + timeout = self._keepalive_timeout + + if self._conns: + connections = {} + deadline = now - timeout + for key, conns in self._conns.items(): + alive = [] + for proto, use_time in conns: + if proto.is_connected(): + if use_time - deadline < 0: + transport = proto.transport + proto.close() + if key.is_ssl and not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transport) + else: + alive.append((proto, use_time)) + else: + transport = proto.transport + proto.close() + if key.is_ssl and not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transport) + + if alive: + connections[key] = alive + + self._conns = connections + + if self._conns: + self._cleanup_handle = helpers.weakref_handle( + self, + "_cleanup", + timeout, + self._loop, + timeout_ceil_threshold=self._timeout_ceil_threshold, + ) + + def _drop_acquired_per_host( + self, key: "ConnectionKey", val: ResponseHandler + ) -> None: + acquired_per_host = self._acquired_per_host + if key not in acquired_per_host: + return + conns = acquired_per_host[key] + conns.remove(val) + if not conns: + del self._acquired_per_host[key] + + def _cleanup_closed(self) -> None: + """Double confirmation for transport close. + + Some broken ssl servers may leave socket open without proper close. + """ + if self._cleanup_closed_handle: + self._cleanup_closed_handle.cancel() + + for transport in self._cleanup_closed_transports: + if transport is not None: + transport.abort() + + self._cleanup_closed_transports = [] + + if not self._cleanup_closed_disabled: + self._cleanup_closed_handle = helpers.weakref_handle( + self, + "_cleanup_closed", + self._cleanup_closed_period, + self._loop, + timeout_ceil_threshold=self._timeout_ceil_threshold, + ) + + def close(self) -> Awaitable[None]: + """Close all opened transports.""" + self._close() + return _DeprecationWaiter(noop()) + + def _close(self) -> None: + if self._closed: + return + + self._closed = True + + try: + if self._loop.is_closed(): + return + + # cancel cleanup task + if self._cleanup_handle: + self._cleanup_handle.cancel() + + # cancel cleanup close task + if self._cleanup_closed_handle: + self._cleanup_closed_handle.cancel() + + for data in self._conns.values(): + for proto, t0 in data: + proto.close() + + for proto in self._acquired: + proto.close() + + for transport in self._cleanup_closed_transports: + if transport is not None: + transport.abort() + + finally: + self._conns.clear() + self._acquired.clear() + self._waiters.clear() + self._cleanup_handle = None + self._cleanup_closed_transports.clear() + self._cleanup_closed_handle = None + + @property + def closed(self) -> bool: + """Is connector closed. + + A readonly property. + """ + return self._closed + + def _available_connections(self, key: "ConnectionKey") -> int: + """ + Return number of available connections. + + The limit, limit_per_host and the connection key are taken into account. + + If it returns less than 1 means that there are no connections + available. + """ + if self._limit: + # total calc available connections + available = self._limit - len(self._acquired) + + # check limit per host + if ( + self._limit_per_host + and available > 0 + and key in self._acquired_per_host + ): + acquired = self._acquired_per_host.get(key) + assert acquired is not None + available = self._limit_per_host - len(acquired) + + elif self._limit_per_host and key in self._acquired_per_host: + # check limit per host + acquired = self._acquired_per_host.get(key) + assert acquired is not None + available = self._limit_per_host - len(acquired) + else: + available = 1 + + return available + + async def connect( + self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" + ) -> Connection: + """Get from pool or create new connection.""" + key = req.connection_key + available = self._available_connections(key) + + # Wait if there are no available connections or if there are/were + # waiters (i.e. don't steal connection from a waiter about to wake up) + if available <= 0 or key in self._waiters: + fut = self._loop.create_future() + + # This connection will now count towards the limit. + self._waiters[key].append(fut) + + if traces: + for trace in traces: + await trace.send_connection_queued_start() + + try: + await fut + except BaseException as e: + if key in self._waiters: + # remove a waiter even if it was cancelled, normally it's + # removed when it's notified + try: + self._waiters[key].remove(fut) + except ValueError: # fut may no longer be in list + pass + + raise e + finally: + if key in self._waiters and not self._waiters[key]: + del self._waiters[key] + + if traces: + for trace in traces: + await trace.send_connection_queued_end() + + proto = self._get(key) + if proto is None: + placeholder = cast(ResponseHandler, _TransportPlaceholder()) + self._acquired.add(placeholder) + self._acquired_per_host[key].add(placeholder) + + if traces: + for trace in traces: + await trace.send_connection_create_start() + + try: + proto = await self._create_connection(req, traces, timeout) + if self._closed: + proto.close() + raise ClientConnectionError("Connector is closed.") + except BaseException: + if not self._closed: + self._acquired.remove(placeholder) + self._drop_acquired_per_host(key, placeholder) + self._release_waiter() + raise + else: + if not self._closed: + self._acquired.remove(placeholder) + self._drop_acquired_per_host(key, placeholder) + + if traces: + for trace in traces: + await trace.send_connection_create_end() + else: + if traces: + # Acquire the connection to prevent race conditions with limits + placeholder = cast(ResponseHandler, _TransportPlaceholder()) + self._acquired.add(placeholder) + self._acquired_per_host[key].add(placeholder) + for trace in traces: + await trace.send_connection_reuseconn() + self._acquired.remove(placeholder) + self._drop_acquired_per_host(key, placeholder) + + self._acquired.add(proto) + self._acquired_per_host[key].add(proto) + return Connection(self, key, proto, self._loop) + + def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]: + try: + conns = self._conns[key] + except KeyError: + return None + + t1 = self._loop.time() + while conns: + proto, t0 = conns.pop() + if proto.is_connected(): + if t1 - t0 > self._keepalive_timeout: + transport = proto.transport + proto.close() + # only for SSL transports + if key.is_ssl and not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transport) + else: + if not conns: + # The very last connection was reclaimed: drop the key + del self._conns[key] + return proto + else: + transport = proto.transport + proto.close() + if key.is_ssl and not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transport) + + # No more connections: drop the key + del self._conns[key] + return None + + def _release_waiter(self) -> None: + """ + Iterates over all waiters until one to be released is found. + + The one to be released is not finished and + belongs to a host that has available connections. + """ + if not self._waiters: + return + + # Having the dict keys ordered this avoids to iterate + # at the same order at each call. + queues = list(self._waiters.keys()) + random.shuffle(queues) + + for key in queues: + if self._available_connections(key) < 1: + continue + + waiters = self._waiters[key] + while waiters: + waiter = waiters.popleft() + if not waiter.done(): + waiter.set_result(None) + return + + def _release_acquired(self, key: "ConnectionKey", proto: ResponseHandler) -> None: + if self._closed: + # acquired connection is already released on connector closing + return + + try: + self._acquired.remove(proto) + self._drop_acquired_per_host(key, proto) + except KeyError: # pragma: no cover + # this may be result of undetermenistic order of objects + # finalization due garbage collection. + pass + else: + self._release_waiter() + + def _release( + self, + key: "ConnectionKey", + protocol: ResponseHandler, + *, + should_close: bool = False, + ) -> None: + if self._closed: + # acquired connection is already released on connector closing + return + + self._release_acquired(key, protocol) + + if self._force_close: + should_close = True + + if should_close or protocol.should_close: + transport = protocol.transport + protocol.close() + + if key.is_ssl and not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transport) + else: + conns = self._conns.get(key) + if conns is None: + conns = self._conns[key] = [] + conns.append((protocol, self._loop.time())) + + if self._cleanup_handle is None: + self._cleanup_handle = helpers.weakref_handle( + self, + "_cleanup", + self._keepalive_timeout, + self._loop, + timeout_ceil_threshold=self._timeout_ceil_threshold, + ) + + async def _create_connection( + self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" + ) -> ResponseHandler: + raise NotImplementedError() + + +class _DNSCacheTable: + def __init__(self, ttl: Optional[float] = None) -> None: + self._addrs_rr: Dict[Tuple[str, int], Tuple[Iterator[ResolveResult], int]] = {} + self._timestamps: Dict[Tuple[str, int], float] = {} + self._ttl = ttl + + def __contains__(self, host: object) -> bool: + return host in self._addrs_rr + + def add(self, key: Tuple[str, int], addrs: List[ResolveResult]) -> None: + self._addrs_rr[key] = (cycle(addrs), len(addrs)) + + if self._ttl is not None: + self._timestamps[key] = monotonic() + + def remove(self, key: Tuple[str, int]) -> None: + self._addrs_rr.pop(key, None) + + if self._ttl is not None: + self._timestamps.pop(key, None) + + def clear(self) -> None: + self._addrs_rr.clear() + self._timestamps.clear() + + def next_addrs(self, key: Tuple[str, int]) -> List[ResolveResult]: + loop, length = self._addrs_rr[key] + addrs = list(islice(loop, length)) + # Consume one more element to shift internal state of `cycle` + next(loop) + return addrs + + def expired(self, key: Tuple[str, int]) -> bool: + if self._ttl is None: + return False + + return self._timestamps[key] + self._ttl < monotonic() + + +class TCPConnector(BaseConnector): + """TCP connector. + + verify_ssl - Set to True to check ssl certifications. + fingerprint - Pass the binary sha256 + digest of the expected certificate in DER format to verify + that the certificate the server presents matches. See also + https://en.wikipedia.org/wiki/HTTP_Public_Key_Pinning + resolver - Enable DNS lookups and use this + resolver + use_dns_cache - Use memory cache for DNS lookups. + ttl_dns_cache - Max seconds having cached a DNS entry, None forever. + family - socket address family + local_addr - local tuple of (host, port) to bind socket to + + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The total number of simultaneous connections. + limit_per_host - Number of simultaneous connections to one host. + enable_cleanup_closed - Enables clean-up closed ssl transports. + Disabled by default. + happy_eyeballs_delay - This is the “Connection Attempt Delay” + as defined in RFC 8305. To disable + the happy eyeballs algorithm, set to None. + interleave - “First Address Family Count” as defined in RFC 8305 + loop - Optional event loop. + """ + + allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"tcp"}) + + def __init__( + self, + *, + verify_ssl: bool = True, + fingerprint: Optional[bytes] = None, + use_dns_cache: bool = True, + ttl_dns_cache: Optional[int] = 10, + family: socket.AddressFamily = socket.AddressFamily.AF_UNSPEC, + ssl_context: Optional[SSLContext] = None, + ssl: Union[bool, Fingerprint, SSLContext] = True, + local_addr: Optional[Tuple[str, int]] = None, + resolver: Optional[AbstractResolver] = None, + keepalive_timeout: Union[None, float, object] = sentinel, + force_close: bool = False, + limit: int = 100, + limit_per_host: int = 0, + enable_cleanup_closed: bool = False, + loop: Optional[asyncio.AbstractEventLoop] = None, + timeout_ceil_threshold: float = 5, + happy_eyeballs_delay: Optional[float] = 0.25, + interleave: Optional[int] = None, + ): + super().__init__( + keepalive_timeout=keepalive_timeout, + force_close=force_close, + limit=limit, + limit_per_host=limit_per_host, + enable_cleanup_closed=enable_cleanup_closed, + loop=loop, + timeout_ceil_threshold=timeout_ceil_threshold, + ) + + self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) + if resolver is None: + resolver = DefaultResolver(loop=self._loop) + self._resolver = resolver + + self._use_dns_cache = use_dns_cache + self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache) + self._throttle_dns_events: Dict[Tuple[str, int], EventResultOrError] = {} + self._family = family + self._local_addr_infos = aiohappyeyeballs.addr_to_addr_infos(local_addr) + self._happy_eyeballs_delay = happy_eyeballs_delay + self._interleave = interleave + + def close(self) -> Awaitable[None]: + """Close all ongoing DNS calls.""" + for ev in self._throttle_dns_events.values(): + ev.cancel() + + return super().close() + + @property + def family(self) -> int: + """Socket family like AF_INET.""" + return self._family + + @property + def use_dns_cache(self) -> bool: + """True if local DNS caching is enabled.""" + return self._use_dns_cache + + def clear_dns_cache( + self, host: Optional[str] = None, port: Optional[int] = None + ) -> None: + """Remove specified host/port or clear all dns local cache.""" + if host is not None and port is not None: + self._cached_hosts.remove((host, port)) + elif host is not None or port is not None: + raise ValueError("either both host and port " "or none of them are allowed") + else: + self._cached_hosts.clear() + + async def _resolve_host( + self, host: str, port: int, traces: Optional[Sequence["Trace"]] = None + ) -> List[ResolveResult]: + """Resolve host and return list of addresses.""" + if is_ip_address(host): + return [ + { + "hostname": host, + "host": host, + "port": port, + "family": self._family, + "proto": 0, + "flags": 0, + } + ] + + if not self._use_dns_cache: + + if traces: + for trace in traces: + await trace.send_dns_resolvehost_start(host) + + res = await self._resolver.resolve(host, port, family=self._family) + + if traces: + for trace in traces: + await trace.send_dns_resolvehost_end(host) + + return res + + key = (host, port) + if key in self._cached_hosts and not self._cached_hosts.expired(key): + # get result early, before any await (#4014) + result = self._cached_hosts.next_addrs(key) + + if traces: + for trace in traces: + await trace.send_dns_cache_hit(host) + return result + + # + # If multiple connectors are resolving the same host, we wait + # for the first one to resolve and then use the result for all of them. + # We use a throttle event to ensure that we only resolve the host once + # and then use the result for all the waiters. + # + # In this case we need to create a task to ensure that we can shield + # the task from cancellation as cancelling this lookup should not cancel + # the underlying lookup or else the cancel event will get broadcast to + # all the waiters across all connections. + # + resolved_host_task = asyncio.create_task( + self._resolve_host_with_throttle(key, host, port, traces) + ) + try: + return await asyncio.shield(resolved_host_task) + except asyncio.CancelledError: + + def drop_exception(fut: "asyncio.Future[List[ResolveResult]]") -> None: + with suppress(Exception, asyncio.CancelledError): + fut.result() + + resolved_host_task.add_done_callback(drop_exception) + raise + + async def _resolve_host_with_throttle( + self, + key: Tuple[str, int], + host: str, + port: int, + traces: Optional[Sequence["Trace"]], + ) -> List[ResolveResult]: + """Resolve host with a dns events throttle.""" + if key in self._throttle_dns_events: + # get event early, before any await (#4014) + event = self._throttle_dns_events[key] + if traces: + for trace in traces: + await trace.send_dns_cache_hit(host) + await event.wait() + else: + # update dict early, before any await (#4014) + self._throttle_dns_events[key] = EventResultOrError(self._loop) + if traces: + for trace in traces: + await trace.send_dns_cache_miss(host) + try: + + if traces: + for trace in traces: + await trace.send_dns_resolvehost_start(host) + + addrs = await self._resolver.resolve(host, port, family=self._family) + if traces: + for trace in traces: + await trace.send_dns_resolvehost_end(host) + + self._cached_hosts.add(key, addrs) + self._throttle_dns_events[key].set() + except BaseException as e: + # any DNS exception, independently of the implementation + # is set for the waiters to raise the same exception. + self._throttle_dns_events[key].set(exc=e) + raise + finally: + self._throttle_dns_events.pop(key) + + return self._cached_hosts.next_addrs(key) + + async def _create_connection( + self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" + ) -> ResponseHandler: + """Create connection. + + Has same keyword arguments as BaseEventLoop.create_connection. + """ + if req.proxy: + _, proto = await self._create_proxy_connection(req, traces, timeout) + else: + _, proto = await self._create_direct_connection(req, traces, timeout) + + return proto + + @staticmethod + @functools.lru_cache(None) + def _make_ssl_context(verified: bool) -> SSLContext: + if verified: + return ssl.create_default_context() + else: + sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + sslcontext.options |= ssl.OP_NO_SSLv2 + sslcontext.options |= ssl.OP_NO_SSLv3 + sslcontext.check_hostname = False + sslcontext.verify_mode = ssl.CERT_NONE + try: + sslcontext.options |= ssl.OP_NO_COMPRESSION + except AttributeError as attr_err: + warnings.warn( + "{!s}: The Python interpreter is compiled " + "against OpenSSL < 1.0.0. Ref: " + "https://docs.python.org/3/library/ssl.html" + "#ssl.OP_NO_COMPRESSION".format(attr_err), + ) + sslcontext.set_default_verify_paths() + return sslcontext + + def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]: + """Logic to get the correct SSL context + + 0. if req.ssl is false, return None + + 1. if ssl_context is specified in req, use it + 2. if _ssl_context is specified in self, use it + 3. otherwise: + 1. if verify_ssl is not specified in req, use self.ssl_context + (will generate a default context according to self.verify_ssl) + 2. if verify_ssl is True in req, generate a default SSL context + 3. if verify_ssl is False in req, generate a SSL context that + won't verify + """ + if req.is_ssl(): + if ssl is None: # pragma: no cover + raise RuntimeError("SSL is not supported.") + sslcontext = req.ssl + if isinstance(sslcontext, ssl.SSLContext): + return sslcontext + if sslcontext is not True: + # not verified or fingerprinted + return self._make_ssl_context(False) + sslcontext = self._ssl + if isinstance(sslcontext, ssl.SSLContext): + return sslcontext + if sslcontext is not True: + # not verified or fingerprinted + return self._make_ssl_context(False) + return self._make_ssl_context(True) + else: + return None + + def _get_fingerprint(self, req: ClientRequest) -> Optional["Fingerprint"]: + ret = req.ssl + if isinstance(ret, Fingerprint): + return ret + ret = self._ssl + if isinstance(ret, Fingerprint): + return ret + return None + + async def _wrap_create_connection( + self, + *args: Any, + addr_infos: List[aiohappyeyeballs.AddrInfoType], + req: ClientRequest, + timeout: "ClientTimeout", + client_error: Type[Exception] = ClientConnectorError, + **kwargs: Any, + ) -> Tuple[asyncio.Transport, ResponseHandler]: + try: + async with ceil_timeout( + timeout.sock_connect, ceil_threshold=timeout.ceil_threshold + ): + sock = await aiohappyeyeballs.start_connection( + addr_infos=addr_infos, + local_addr_infos=self._local_addr_infos, + happy_eyeballs_delay=self._happy_eyeballs_delay, + interleave=self._interleave, + loop=self._loop, + ) + return await self._loop.create_connection(*args, **kwargs, sock=sock) + except cert_errors as exc: + raise ClientConnectorCertificateError(req.connection_key, exc) from exc + except ssl_errors as exc: + raise ClientConnectorSSLError(req.connection_key, exc) from exc + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise client_error(req.connection_key, exc) from exc + + async def _wrap_existing_connection( + self, + *args: Any, + req: ClientRequest, + timeout: "ClientTimeout", + client_error: Type[Exception] = ClientConnectorError, + **kwargs: Any, + ) -> Tuple[asyncio.Transport, ResponseHandler]: + try: + async with ceil_timeout( + timeout.sock_connect, ceil_threshold=timeout.ceil_threshold + ): + return await self._loop.create_connection(*args, **kwargs) + except cert_errors as exc: + raise ClientConnectorCertificateError(req.connection_key, exc) from exc + except ssl_errors as exc: + raise ClientConnectorSSLError(req.connection_key, exc) from exc + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise client_error(req.connection_key, exc) from exc + + def _fail_on_no_start_tls(self, req: "ClientRequest") -> None: + """Raise a :py:exc:`RuntimeError` on missing ``start_tls()``. + + It is necessary for TLS-in-TLS so that it is possible to + send HTTPS queries through HTTPS proxies. + + This doesn't affect regular HTTP requests, though. + """ + if not req.is_ssl(): + return + + proxy_url = req.proxy + assert proxy_url is not None + if proxy_url.scheme != "https": + return + + self._check_loop_for_start_tls() + + def _check_loop_for_start_tls(self) -> None: + try: + self._loop.start_tls + except AttributeError as attr_exc: + raise RuntimeError( + "An HTTPS request is being sent through an HTTPS proxy. " + "This needs support for TLS in TLS but it is not implemented " + "in your runtime for the stdlib asyncio.\n\n" + "Please upgrade to Python 3.11 or higher. For more details, " + "please see:\n" + "* https://bugs.python.org/issue37179\n" + "* https://github.com/python/cpython/pull/28073\n" + "* https://docs.aiohttp.org/en/stable/" + "client_advanced.html#proxy-support\n" + "* https://github.com/aio-libs/aiohttp/discussions/6044\n", + ) from attr_exc + + def _loop_supports_start_tls(self) -> bool: + try: + self._check_loop_for_start_tls() + except RuntimeError: + return False + else: + return True + + def _warn_about_tls_in_tls( + self, + underlying_transport: asyncio.Transport, + req: ClientRequest, + ) -> None: + """Issue a warning if the requested URL has HTTPS scheme.""" + if req.request_info.url.scheme != "https": + return + + asyncio_supports_tls_in_tls = getattr( + underlying_transport, + "_start_tls_compatible", + False, + ) + + if asyncio_supports_tls_in_tls: + return + + warnings.warn( + "An HTTPS request is being sent through an HTTPS proxy. " + "This support for TLS in TLS is known to be disabled " + "in the stdlib asyncio (Python <3.11). This is why you'll probably see " + "an error in the log below.\n\n" + "It is possible to enable it via monkeypatching. " + "For more details, see:\n" + "* https://bugs.python.org/issue37179\n" + "* https://github.com/python/cpython/pull/28073\n\n" + "You can temporarily patch this as follows:\n" + "* https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support\n" + "* https://github.com/aio-libs/aiohttp/discussions/6044\n", + RuntimeWarning, + source=self, + # Why `4`? At least 3 of the calls in the stack originate + # from the methods in this class. + stacklevel=3, + ) + + async def _start_tls_connection( + self, + underlying_transport: asyncio.Transport, + req: ClientRequest, + timeout: "ClientTimeout", + client_error: Type[Exception] = ClientConnectorError, + ) -> Tuple[asyncio.BaseTransport, ResponseHandler]: + """Wrap the raw TCP transport with TLS.""" + tls_proto = self._factory() # Create a brand new proto for TLS + + # Safety of the `cast()` call here is based on the fact that + # internally `_get_ssl_context()` only returns `None` when + # `req.is_ssl()` evaluates to `False` which is never gonna happen + # in this code path. Of course, it's rather fragile + # maintainability-wise but this is to be solved separately. + sslcontext = cast(ssl.SSLContext, self._get_ssl_context(req)) + + try: + async with ceil_timeout( + timeout.sock_connect, ceil_threshold=timeout.ceil_threshold + ): + try: + tls_transport = await self._loop.start_tls( + underlying_transport, + tls_proto, + sslcontext, + server_hostname=req.server_hostname or req.host, + ssl_handshake_timeout=timeout.total, + ) + except BaseException: + # We need to close the underlying transport since + # `start_tls()` probably failed before it had a + # chance to do this: + underlying_transport.close() + raise + except cert_errors as exc: + raise ClientConnectorCertificateError(req.connection_key, exc) from exc + except ssl_errors as exc: + raise ClientConnectorSSLError(req.connection_key, exc) from exc + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise client_error(req.connection_key, exc) from exc + except TypeError as type_err: + # Example cause looks like this: + # TypeError: transport is not supported by start_tls() + + raise ClientConnectionError( + "Cannot initialize a TLS-in-TLS connection to host " + f"{req.host!s}:{req.port:d} through an underlying connection " + f"to an HTTPS proxy {req.proxy!s} ssl:{req.ssl or 'default'} " + f"[{type_err!s}]" + ) from type_err + else: + if tls_transport is None: + msg = "Failed to start TLS (possibly caused by closing transport)" + raise client_error(req.connection_key, OSError(msg)) + tls_proto.connection_made( + tls_transport + ) # Kick the state machine of the new TLS protocol + + return tls_transport, tls_proto + + def _convert_hosts_to_addr_infos( + self, hosts: List[ResolveResult] + ) -> List[aiohappyeyeballs.AddrInfoType]: + """Converts the list of hosts to a list of addr_infos. + + The list of hosts is the result of a DNS lookup. The list of + addr_infos is the result of a call to `socket.getaddrinfo()`. + """ + addr_infos: List[aiohappyeyeballs.AddrInfoType] = [] + for hinfo in hosts: + host = hinfo["host"] + is_ipv6 = ":" in host + family = socket.AF_INET6 if is_ipv6 else socket.AF_INET + if self._family and self._family != family: + continue + addr = (host, hinfo["port"], 0, 0) if is_ipv6 else (host, hinfo["port"]) + addr_infos.append( + (family, socket.SOCK_STREAM, socket.IPPROTO_TCP, "", addr) + ) + return addr_infos + + async def _create_direct_connection( + self, + req: ClientRequest, + traces: List["Trace"], + timeout: "ClientTimeout", + *, + client_error: Type[Exception] = ClientConnectorError, + ) -> Tuple[asyncio.Transport, ResponseHandler]: + sslcontext = self._get_ssl_context(req) + fingerprint = self._get_fingerprint(req) + + host = req.url.raw_host + assert host is not None + # Replace multiple trailing dots with a single one. + # A trailing dot is only present for fully-qualified domain names. + # See https://github.com/aio-libs/aiohttp/pull/7364. + if host.endswith(".."): + host = host.rstrip(".") + "." + port = req.port + assert port is not None + try: + # Cancelling this lookup should not cancel the underlying lookup + # or else the cancel event will get broadcast to all the waiters + # across all connections. + hosts = await self._resolve_host(host, port, traces=traces) + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + # in case of proxy it is not ClientProxyConnectionError + # it is problem of resolving proxy ip itself + raise ClientConnectorError(req.connection_key, exc) from exc + + last_exc: Optional[Exception] = None + addr_infos = self._convert_hosts_to_addr_infos(hosts) + while addr_infos: + # Strip trailing dots, certificates contain FQDN without dots. + # See https://github.com/aio-libs/aiohttp/issues/3636 + server_hostname = ( + (req.server_hostname or host).rstrip(".") if sslcontext else None + ) + + try: + transp, proto = await self._wrap_create_connection( + self._factory, + timeout=timeout, + ssl=sslcontext, + addr_infos=addr_infos, + server_hostname=server_hostname, + req=req, + client_error=client_error, + ) + except ClientConnectorError as exc: + last_exc = exc + aiohappyeyeballs.pop_addr_infos_interleave(addr_infos, self._interleave) + continue + + if req.is_ssl() and fingerprint: + try: + fingerprint.check(transp) + except ServerFingerprintMismatch as exc: + transp.close() + if not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transp) + last_exc = exc + # Remove the bad peer from the list of addr_infos + sock: socket.socket = transp.get_extra_info("socket") + bad_peer = sock.getpeername() + aiohappyeyeballs.remove_addr_infos(addr_infos, bad_peer) + continue + + return transp, proto + else: + assert last_exc is not None + raise last_exc + + async def _create_proxy_connection( + self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" + ) -> Tuple[asyncio.BaseTransport, ResponseHandler]: + self._fail_on_no_start_tls(req) + runtime_has_start_tls = self._loop_supports_start_tls() + + headers: Dict[str, str] = {} + if req.proxy_headers is not None: + headers = req.proxy_headers # type: ignore[assignment] + headers[hdrs.HOST] = req.headers[hdrs.HOST] + + url = req.proxy + assert url is not None + proxy_req = ClientRequest( + hdrs.METH_GET, + url, + headers=headers, + auth=req.proxy_auth, + loop=self._loop, + ssl=req.ssl, + ) + + # create connection to proxy server + transport, proto = await self._create_direct_connection( + proxy_req, [], timeout, client_error=ClientProxyConnectionError + ) + + # Many HTTP proxies has buggy keepalive support. Let's not + # reuse connection but close it after processing every + # response. + proto.force_close() + + auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None) + if auth is not None: + if not req.is_ssl(): + req.headers[hdrs.PROXY_AUTHORIZATION] = auth + else: + proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth + + if req.is_ssl(): + if runtime_has_start_tls: + self._warn_about_tls_in_tls(transport, req) + + # For HTTPS requests over HTTP proxy + # we must notify proxy to tunnel connection + # so we send CONNECT command: + # CONNECT www.python.org:443 HTTP/1.1 + # Host: www.python.org + # + # next we must do TLS handshake and so on + # to do this we must wrap raw socket into secure one + # asyncio handles this perfectly + proxy_req.method = hdrs.METH_CONNECT + proxy_req.url = req.url + key = attr.evolve( + req.connection_key, proxy=None, proxy_auth=None, proxy_headers_hash=None + ) + conn = Connection(self, key, proto, self._loop) + proxy_resp = await proxy_req.send(conn) + try: + protocol = conn._protocol + assert protocol is not None + + # read_until_eof=True will ensure the connection isn't closed + # once the response is received and processed allowing + # START_TLS to work on the connection below. + protocol.set_response_params( + read_until_eof=runtime_has_start_tls, + timeout_ceil_threshold=self._timeout_ceil_threshold, + ) + resp = await proxy_resp.start(conn) + except BaseException: + proxy_resp.close() + conn.close() + raise + else: + conn._protocol = None + conn._transport = None + try: + if resp.status != 200: + message = resp.reason + if message is None: + message = HTTPStatus(resp.status).phrase + raise ClientHttpProxyError( + proxy_resp.request_info, + resp.history, + status=resp.status, + message=message, + headers=resp.headers, + ) + if not runtime_has_start_tls: + rawsock = transport.get_extra_info("socket", default=None) + if rawsock is None: + raise RuntimeError( + "Transport does not expose socket instance" + ) + # Duplicate the socket, so now we can close proxy transport + rawsock = rawsock.dup() + except BaseException: + # It shouldn't be closed in `finally` because it's fed to + # `loop.start_tls()` and the docs say not to touch it after + # passing there. + transport.close() + raise + finally: + if not runtime_has_start_tls: + transport.close() + + if not runtime_has_start_tls: + # HTTP proxy with support for upgrade to HTTPS + sslcontext = self._get_ssl_context(req) + return await self._wrap_existing_connection( + self._factory, + timeout=timeout, + ssl=sslcontext, + sock=rawsock, + server_hostname=req.host, + req=req, + ) + + return await self._start_tls_connection( + # Access the old transport for the last time before it's + # closed and forgotten forever: + transport, + req=req, + timeout=timeout, + ) + finally: + proxy_resp.close() + + return transport, proto + + +class UnixConnector(BaseConnector): + """Unix socket connector. + + path - Unix socket path. + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The total number of simultaneous connections. + limit_per_host - Number of simultaneous connections to one host. + loop - Optional event loop. + """ + + allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"unix"}) + + def __init__( + self, + path: str, + force_close: bool = False, + keepalive_timeout: Union[object, float, None] = sentinel, + limit: int = 100, + limit_per_host: int = 0, + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> None: + super().__init__( + force_close=force_close, + keepalive_timeout=keepalive_timeout, + limit=limit, + limit_per_host=limit_per_host, + loop=loop, + ) + self._path = path + + @property + def path(self) -> str: + """Path to unix socket.""" + return self._path + + async def _create_connection( + self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" + ) -> ResponseHandler: + try: + async with ceil_timeout( + timeout.sock_connect, ceil_threshold=timeout.ceil_threshold + ): + _, proto = await self._loop.create_unix_connection( + self._factory, self._path + ) + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise UnixClientConnectorError(self.path, req.connection_key, exc) from exc + + return proto + + +class NamedPipeConnector(BaseConnector): + """Named pipe connector. + + Only supported by the proactor event loop. + See also: https://docs.python.org/3/library/asyncio-eventloop.html + + path - Windows named pipe path. + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The total number of simultaneous connections. + limit_per_host - Number of simultaneous connections to one host. + loop - Optional event loop. + """ + + allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"npipe"}) + + def __init__( + self, + path: str, + force_close: bool = False, + keepalive_timeout: Union[object, float, None] = sentinel, + limit: int = 100, + limit_per_host: int = 0, + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> None: + super().__init__( + force_close=force_close, + keepalive_timeout=keepalive_timeout, + limit=limit, + limit_per_host=limit_per_host, + loop=loop, + ) + if not isinstance( + self._loop, asyncio.ProactorEventLoop # type: ignore[attr-defined] + ): + raise RuntimeError( + "Named Pipes only available in proactor " "loop under windows" + ) + self._path = path + + @property + def path(self) -> str: + """Path to the named pipe.""" + return self._path + + async def _create_connection( + self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" + ) -> ResponseHandler: + try: + async with ceil_timeout( + timeout.sock_connect, ceil_threshold=timeout.ceil_threshold + ): + _, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined] + self._factory, self._path + ) + # the drain is required so that the connection_made is called + # and transport is set otherwise it is not set before the + # `assert conn.transport is not None` + # in client.py's _request method + await asyncio.sleep(0) + # other option is to manually set transport like + # `proto.transport = trans` + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise ClientConnectorError(req.connection_key, exc) from exc + + return cast(ResponseHandler, proto) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/cookiejar.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/cookiejar.py new file mode 100644 index 000000000..e9997ce29 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/cookiejar.py @@ -0,0 +1,425 @@ +import asyncio +import calendar +import contextlib +import datetime +import itertools +import os # noqa +import pathlib +import pickle +import re +import time +from collections import defaultdict +from http.cookies import BaseCookie, Morsel, SimpleCookie +from math import ceil +from typing import ( + DefaultDict, + Dict, + Iterable, + Iterator, + List, + Mapping, + Optional, + Set, + Tuple, + Union, + cast, +) + +from yarl import URL + +from .abc import AbstractCookieJar, ClearCookiePredicate +from .helpers import is_ip_address +from .typedefs import LooseCookies, PathLike, StrOrURL + +__all__ = ("CookieJar", "DummyCookieJar") + + +CookieItem = Union[str, "Morsel[str]"] + +# We cache these string methods here as their use is in performance critical code. +_FORMAT_PATH = "{}/{}".format +_FORMAT_DOMAIN_REVERSED = "{1}.{0}".format + + +class CookieJar(AbstractCookieJar): + """Implements cookie storage adhering to RFC 6265.""" + + DATE_TOKENS_RE = re.compile( + r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*" + r"(?P[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)" + ) + + DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})") + + DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})") + + DATE_MONTH_RE = re.compile( + "(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" "(aug)|(sep)|(oct)|(nov)|(dec)", + re.I, + ) + + DATE_YEAR_RE = re.compile(r"(\d{2,4})") + + # calendar.timegm() fails for timestamps after datetime.datetime.max + # Minus one as a loss of precision occurs when timestamp() is called. + MAX_TIME = ( + int(datetime.datetime.max.replace(tzinfo=datetime.timezone.utc).timestamp()) - 1 + ) + try: + calendar.timegm(time.gmtime(MAX_TIME)) + except (OSError, ValueError): + # Hit the maximum representable time on Windows + # https://learn.microsoft.com/en-us/cpp/c-runtime-library/reference/localtime-localtime32-localtime64 + # Throws ValueError on PyPy 3.8 and 3.9, OSError elsewhere + MAX_TIME = calendar.timegm((3000, 12, 31, 23, 59, 59, -1, -1, -1)) + except OverflowError: + # #4515: datetime.max may not be representable on 32-bit platforms + MAX_TIME = 2**31 - 1 + # Avoid minuses in the future, 3x faster + SUB_MAX_TIME = MAX_TIME - 1 + + def __init__( + self, + *, + unsafe: bool = False, + quote_cookie: bool = True, + treat_as_secure_origin: Union[StrOrURL, List[StrOrURL], None] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> None: + super().__init__(loop=loop) + self._cookies: DefaultDict[Tuple[str, str], SimpleCookie] = defaultdict( + SimpleCookie + ) + self._host_only_cookies: Set[Tuple[str, str]] = set() + self._unsafe = unsafe + self._quote_cookie = quote_cookie + if treat_as_secure_origin is None: + treat_as_secure_origin = [] + elif isinstance(treat_as_secure_origin, URL): + treat_as_secure_origin = [treat_as_secure_origin.origin()] + elif isinstance(treat_as_secure_origin, str): + treat_as_secure_origin = [URL(treat_as_secure_origin).origin()] + else: + treat_as_secure_origin = [ + URL(url).origin() if isinstance(url, str) else url.origin() + for url in treat_as_secure_origin + ] + self._treat_as_secure_origin = treat_as_secure_origin + self._next_expiration: float = ceil(time.time()) + self._expirations: Dict[Tuple[str, str, str], float] = {} + + def save(self, file_path: PathLike) -> None: + file_path = pathlib.Path(file_path) + with file_path.open(mode="wb") as f: + pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL) + + def load(self, file_path: PathLike) -> None: + file_path = pathlib.Path(file_path) + with file_path.open(mode="rb") as f: + self._cookies = pickle.load(f) + + def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: + if predicate is None: + self._next_expiration = ceil(time.time()) + self._cookies.clear() + self._host_only_cookies.clear() + self._expirations.clear() + return + + to_del = [] + now = time.time() + for (domain, path), cookie in self._cookies.items(): + for name, morsel in cookie.items(): + key = (domain, path, name) + if ( + key in self._expirations and self._expirations[key] <= now + ) or predicate(morsel): + to_del.append(key) + + for domain, path, name in to_del: + self._host_only_cookies.discard((domain, name)) + key = (domain, path, name) + if key in self._expirations: + del self._expirations[(domain, path, name)] + self._cookies[(domain, path)].pop(name, None) + + self._next_expiration = ( + min(*self._expirations.values(), self.SUB_MAX_TIME) + 1 + if self._expirations + else self.MAX_TIME + ) + + def clear_domain(self, domain: str) -> None: + self.clear(lambda x: self._is_domain_match(domain, x["domain"])) + + def __iter__(self) -> "Iterator[Morsel[str]]": + self._do_expiration() + for val in self._cookies.values(): + yield from val.values() + + def __len__(self) -> int: + """Return number of cookies. + + This function does not iterate self to avoid unnecessary expiration + checks. + """ + return sum(len(cookie.values()) for cookie in self._cookies.values()) + + def _do_expiration(self) -> None: + self.clear(lambda x: False) + + def _expire_cookie(self, when: float, domain: str, path: str, name: str) -> None: + self._next_expiration = min(self._next_expiration, when) + self._expirations[(domain, path, name)] = when + + def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: + """Update cookies.""" + hostname = response_url.raw_host + + if not self._unsafe and is_ip_address(hostname): + # Don't accept cookies from IPs + return + + if isinstance(cookies, Mapping): + cookies = cookies.items() + + for name, cookie in cookies: + if not isinstance(cookie, Morsel): + tmp = SimpleCookie() + tmp[name] = cookie # type: ignore[assignment] + cookie = tmp[name] + + domain = cookie["domain"] + + # ignore domains with trailing dots + if domain.endswith("."): + domain = "" + del cookie["domain"] + + if not domain and hostname is not None: + # Set the cookie's domain to the response hostname + # and set its host-only-flag + self._host_only_cookies.add((hostname, name)) + domain = cookie["domain"] = hostname + + if domain.startswith("."): + # Remove leading dot + domain = domain[1:] + cookie["domain"] = domain + + if hostname and not self._is_domain_match(domain, hostname): + # Setting cookies for different domains is not allowed + continue + + path = cookie["path"] + if not path or not path.startswith("/"): + # Set the cookie's path to the response path + path = response_url.path + if not path.startswith("/"): + path = "/" + else: + # Cut everything from the last slash to the end + path = "/" + path[1 : path.rfind("/")] + cookie["path"] = path + path = path.rstrip("/") + + max_age = cookie["max-age"] + if max_age: + try: + delta_seconds = int(max_age) + max_age_expiration = min(time.time() + delta_seconds, self.MAX_TIME) + self._expire_cookie(max_age_expiration, domain, path, name) + except ValueError: + cookie["max-age"] = "" + + else: + expires = cookie["expires"] + if expires: + expire_time = self._parse_date(expires) + if expire_time: + self._expire_cookie(expire_time, domain, path, name) + else: + cookie["expires"] = "" + + self._cookies[(domain, path)][name] = cookie + + self._do_expiration() + + def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]": + """Returns this jar's cookies filtered by their attributes.""" + filtered: Union[SimpleCookie, "BaseCookie[str]"] = ( + SimpleCookie() if self._quote_cookie else BaseCookie() + ) + if not self._cookies: + # Skip do_expiration() if there are no cookies. + return filtered + self._do_expiration() + if not self._cookies: + # Skip rest of function if no non-expired cookies. + return filtered + request_url = URL(request_url) + hostname = request_url.raw_host or "" + + is_not_secure = request_url.scheme not in ("https", "wss") + if is_not_secure and self._treat_as_secure_origin: + request_origin = URL() + with contextlib.suppress(ValueError): + request_origin = request_url.origin() + is_not_secure = request_origin not in self._treat_as_secure_origin + + # Send shared cookie + for c in self._cookies[("", "")].values(): + filtered[c.key] = c.value + + if is_ip_address(hostname): + if not self._unsafe: + return filtered + domains: Iterable[str] = (hostname,) + else: + # Get all the subdomains that might match a cookie (e.g. "foo.bar.com", "bar.com", "com") + domains = itertools.accumulate( + reversed(hostname.split(".")), _FORMAT_DOMAIN_REVERSED + ) + + # Get all the path prefixes that might match a cookie (e.g. "", "/foo", "/foo/bar") + paths = itertools.accumulate(request_url.path.split("/"), _FORMAT_PATH) + # Create every combination of (domain, path) pairs. + pairs = itertools.product(domains, paths) + + # Point 2: https://www.rfc-editor.org/rfc/rfc6265.html#section-5.4 + cookies = itertools.chain.from_iterable( + self._cookies[p].values() for p in pairs + ) + path_len = len(request_url.path) + for cookie in cookies: + name = cookie.key + domain = cookie["domain"] + + if (domain, name) in self._host_only_cookies: + if domain != hostname: + continue + + # Skip edge case when the cookie has a trailing slash but request doesn't. + if len(cookie["path"]) > path_len: + continue + + if is_not_secure and cookie["secure"]: + continue + + # It's critical we use the Morsel so the coded_value + # (based on cookie version) is preserved + mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel())) + mrsl_val.set(cookie.key, cookie.value, cookie.coded_value) + filtered[name] = mrsl_val + + return filtered + + @staticmethod + def _is_domain_match(domain: str, hostname: str) -> bool: + """Implements domain matching adhering to RFC 6265.""" + if hostname == domain: + return True + + if not hostname.endswith(domain): + return False + + non_matching = hostname[: -len(domain)] + + if not non_matching.endswith("."): + return False + + return not is_ip_address(hostname) + + @classmethod + def _parse_date(cls, date_str: str) -> Optional[int]: + """Implements date string parsing adhering to RFC 6265.""" + if not date_str: + return None + + found_time = False + found_day = False + found_month = False + found_year = False + + hour = minute = second = 0 + day = 0 + month = 0 + year = 0 + + for token_match in cls.DATE_TOKENS_RE.finditer(date_str): + + token = token_match.group("token") + + if not found_time: + time_match = cls.DATE_HMS_TIME_RE.match(token) + if time_match: + found_time = True + hour, minute, second = (int(s) for s in time_match.groups()) + continue + + if not found_day: + day_match = cls.DATE_DAY_OF_MONTH_RE.match(token) + if day_match: + found_day = True + day = int(day_match.group()) + continue + + if not found_month: + month_match = cls.DATE_MONTH_RE.match(token) + if month_match: + found_month = True + assert month_match.lastindex is not None + month = month_match.lastindex + continue + + if not found_year: + year_match = cls.DATE_YEAR_RE.match(token) + if year_match: + found_year = True + year = int(year_match.group()) + + if 70 <= year <= 99: + year += 1900 + elif 0 <= year <= 69: + year += 2000 + + if False in (found_day, found_month, found_year, found_time): + return None + + if not 1 <= day <= 31: + return None + + if year < 1601 or hour > 23 or minute > 59 or second > 59: + return None + + return calendar.timegm((year, month, day, hour, minute, second, -1, -1, -1)) + + +class DummyCookieJar(AbstractCookieJar): + """Implements a dummy cookie storage. + + It can be used with the ClientSession when no cookie processing is needed. + + """ + + def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: + super().__init__(loop=loop) + + def __iter__(self) -> "Iterator[Morsel[str]]": + while False: + yield None + + def __len__(self) -> int: + return 0 + + def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: + pass + + def clear_domain(self, domain: str) -> None: + pass + + def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: + pass + + def filter_cookies(self, request_url: URL) -> "BaseCookie[str]": + return SimpleCookie() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/formdata.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/formdata.py new file mode 100644 index 000000000..2b75b3de7 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/formdata.py @@ -0,0 +1,182 @@ +import io +import warnings +from typing import Any, Iterable, List, Optional +from urllib.parse import urlencode + +from multidict import MultiDict, MultiDictProxy + +from . import hdrs, multipart, payload +from .helpers import guess_filename +from .payload import Payload + +__all__ = ("FormData",) + + +class FormData: + """Helper class for form body generation. + + Supports multipart/form-data and application/x-www-form-urlencoded. + """ + + def __init__( + self, + fields: Iterable[Any] = (), + quote_fields: bool = True, + charset: Optional[str] = None, + ) -> None: + self._writer = multipart.MultipartWriter("form-data") + self._fields: List[Any] = [] + self._is_multipart = False + self._is_processed = False + self._quote_fields = quote_fields + self._charset = charset + + if isinstance(fields, dict): + fields = list(fields.items()) + elif not isinstance(fields, (list, tuple)): + fields = (fields,) + self.add_fields(*fields) + + @property + def is_multipart(self) -> bool: + return self._is_multipart + + def add_field( + self, + name: str, + value: Any, + *, + content_type: Optional[str] = None, + filename: Optional[str] = None, + content_transfer_encoding: Optional[str] = None, + ) -> None: + + if isinstance(value, io.IOBase): + self._is_multipart = True + elif isinstance(value, (bytes, bytearray, memoryview)): + msg = ( + "In v4, passing bytes will no longer create a file field. " + "Please explicitly use the filename parameter or pass a BytesIO object." + ) + if filename is None and content_transfer_encoding is None: + warnings.warn(msg, DeprecationWarning) + filename = name + + type_options: MultiDict[str] = MultiDict({"name": name}) + if filename is not None and not isinstance(filename, str): + raise TypeError( + "filename must be an instance of str. " "Got: %s" % filename + ) + if filename is None and isinstance(value, io.IOBase): + filename = guess_filename(value, name) + if filename is not None: + type_options["filename"] = filename + self._is_multipart = True + + headers = {} + if content_type is not None: + if not isinstance(content_type, str): + raise TypeError( + "content_type must be an instance of str. " "Got: %s" % content_type + ) + headers[hdrs.CONTENT_TYPE] = content_type + self._is_multipart = True + if content_transfer_encoding is not None: + if not isinstance(content_transfer_encoding, str): + raise TypeError( + "content_transfer_encoding must be an instance" + " of str. Got: %s" % content_transfer_encoding + ) + msg = ( + "content_transfer_encoding is deprecated. " + "To maintain compatibility with v4 please pass a BytesPayload." + ) + warnings.warn(msg, DeprecationWarning) + self._is_multipart = True + + self._fields.append((type_options, headers, value)) + + def add_fields(self, *fields: Any) -> None: + to_add = list(fields) + + while to_add: + rec = to_add.pop(0) + + if isinstance(rec, io.IOBase): + k = guess_filename(rec, "unknown") + self.add_field(k, rec) # type: ignore[arg-type] + + elif isinstance(rec, (MultiDictProxy, MultiDict)): + to_add.extend(rec.items()) + + elif isinstance(rec, (list, tuple)) and len(rec) == 2: + k, fp = rec + self.add_field(k, fp) # type: ignore[arg-type] + + else: + raise TypeError( + "Only io.IOBase, multidict and (name, file) " + "pairs allowed, use .add_field() for passing " + "more complex parameters, got {!r}".format(rec) + ) + + def _gen_form_urlencoded(self) -> payload.BytesPayload: + # form data (x-www-form-urlencoded) + data = [] + for type_options, _, value in self._fields: + data.append((type_options["name"], value)) + + charset = self._charset if self._charset is not None else "utf-8" + + if charset == "utf-8": + content_type = "application/x-www-form-urlencoded" + else: + content_type = "application/x-www-form-urlencoded; " "charset=%s" % charset + + return payload.BytesPayload( + urlencode(data, doseq=True, encoding=charset).encode(), + content_type=content_type, + ) + + def _gen_form_data(self) -> multipart.MultipartWriter: + """Encode a list of fields using the multipart/form-data MIME format""" + if self._is_processed: + raise RuntimeError("Form data has been processed already") + for dispparams, headers, value in self._fields: + try: + if hdrs.CONTENT_TYPE in headers: + part = payload.get_payload( + value, + content_type=headers[hdrs.CONTENT_TYPE], + headers=headers, + encoding=self._charset, + ) + else: + part = payload.get_payload( + value, headers=headers, encoding=self._charset + ) + except Exception as exc: + raise TypeError( + "Can not serialize value type: %r\n " + "headers: %r\n value: %r" % (type(value), headers, value) + ) from exc + + if dispparams: + part.set_content_disposition( + "form-data", quote_fields=self._quote_fields, **dispparams + ) + # FIXME cgi.FieldStorage doesn't likes body parts with + # Content-Length which were sent via chunked transfer encoding + assert part.headers is not None + part.headers.popall(hdrs.CONTENT_LENGTH, None) + + self._writer.append_payload(part) + + self._is_processed = True + return self._writer + + def __call__(self) -> Payload: + if self._is_multipart: + return self._gen_form_data() + else: + return self._gen_form_urlencoded() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/hdrs.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/hdrs.py new file mode 100644 index 000000000..2f1f5e02b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/hdrs.py @@ -0,0 +1,108 @@ +"""HTTP Headers constants.""" + +# After changing the file content call ./tools/gen.py +# to regenerate the headers parser +from typing import Final, Set + +from multidict import istr + +METH_ANY: Final[str] = "*" +METH_CONNECT: Final[str] = "CONNECT" +METH_HEAD: Final[str] = "HEAD" +METH_GET: Final[str] = "GET" +METH_DELETE: Final[str] = "DELETE" +METH_OPTIONS: Final[str] = "OPTIONS" +METH_PATCH: Final[str] = "PATCH" +METH_POST: Final[str] = "POST" +METH_PUT: Final[str] = "PUT" +METH_TRACE: Final[str] = "TRACE" + +METH_ALL: Final[Set[str]] = { + METH_CONNECT, + METH_HEAD, + METH_GET, + METH_DELETE, + METH_OPTIONS, + METH_PATCH, + METH_POST, + METH_PUT, + METH_TRACE, +} + +ACCEPT: Final[istr] = istr("Accept") +ACCEPT_CHARSET: Final[istr] = istr("Accept-Charset") +ACCEPT_ENCODING: Final[istr] = istr("Accept-Encoding") +ACCEPT_LANGUAGE: Final[istr] = istr("Accept-Language") +ACCEPT_RANGES: Final[istr] = istr("Accept-Ranges") +ACCESS_CONTROL_MAX_AGE: Final[istr] = istr("Access-Control-Max-Age") +ACCESS_CONTROL_ALLOW_CREDENTIALS: Final[istr] = istr("Access-Control-Allow-Credentials") +ACCESS_CONTROL_ALLOW_HEADERS: Final[istr] = istr("Access-Control-Allow-Headers") +ACCESS_CONTROL_ALLOW_METHODS: Final[istr] = istr("Access-Control-Allow-Methods") +ACCESS_CONTROL_ALLOW_ORIGIN: Final[istr] = istr("Access-Control-Allow-Origin") +ACCESS_CONTROL_EXPOSE_HEADERS: Final[istr] = istr("Access-Control-Expose-Headers") +ACCESS_CONTROL_REQUEST_HEADERS: Final[istr] = istr("Access-Control-Request-Headers") +ACCESS_CONTROL_REQUEST_METHOD: Final[istr] = istr("Access-Control-Request-Method") +AGE: Final[istr] = istr("Age") +ALLOW: Final[istr] = istr("Allow") +AUTHORIZATION: Final[istr] = istr("Authorization") +CACHE_CONTROL: Final[istr] = istr("Cache-Control") +CONNECTION: Final[istr] = istr("Connection") +CONTENT_DISPOSITION: Final[istr] = istr("Content-Disposition") +CONTENT_ENCODING: Final[istr] = istr("Content-Encoding") +CONTENT_LANGUAGE: Final[istr] = istr("Content-Language") +CONTENT_LENGTH: Final[istr] = istr("Content-Length") +CONTENT_LOCATION: Final[istr] = istr("Content-Location") +CONTENT_MD5: Final[istr] = istr("Content-MD5") +CONTENT_RANGE: Final[istr] = istr("Content-Range") +CONTENT_TRANSFER_ENCODING: Final[istr] = istr("Content-Transfer-Encoding") +CONTENT_TYPE: Final[istr] = istr("Content-Type") +COOKIE: Final[istr] = istr("Cookie") +DATE: Final[istr] = istr("Date") +DESTINATION: Final[istr] = istr("Destination") +DIGEST: Final[istr] = istr("Digest") +ETAG: Final[istr] = istr("Etag") +EXPECT: Final[istr] = istr("Expect") +EXPIRES: Final[istr] = istr("Expires") +FORWARDED: Final[istr] = istr("Forwarded") +FROM: Final[istr] = istr("From") +HOST: Final[istr] = istr("Host") +IF_MATCH: Final[istr] = istr("If-Match") +IF_MODIFIED_SINCE: Final[istr] = istr("If-Modified-Since") +IF_NONE_MATCH: Final[istr] = istr("If-None-Match") +IF_RANGE: Final[istr] = istr("If-Range") +IF_UNMODIFIED_SINCE: Final[istr] = istr("If-Unmodified-Since") +KEEP_ALIVE: Final[istr] = istr("Keep-Alive") +LAST_EVENT_ID: Final[istr] = istr("Last-Event-ID") +LAST_MODIFIED: Final[istr] = istr("Last-Modified") +LINK: Final[istr] = istr("Link") +LOCATION: Final[istr] = istr("Location") +MAX_FORWARDS: Final[istr] = istr("Max-Forwards") +ORIGIN: Final[istr] = istr("Origin") +PRAGMA: Final[istr] = istr("Pragma") +PROXY_AUTHENTICATE: Final[istr] = istr("Proxy-Authenticate") +PROXY_AUTHORIZATION: Final[istr] = istr("Proxy-Authorization") +RANGE: Final[istr] = istr("Range") +REFERER: Final[istr] = istr("Referer") +RETRY_AFTER: Final[istr] = istr("Retry-After") +SEC_WEBSOCKET_ACCEPT: Final[istr] = istr("Sec-WebSocket-Accept") +SEC_WEBSOCKET_VERSION: Final[istr] = istr("Sec-WebSocket-Version") +SEC_WEBSOCKET_PROTOCOL: Final[istr] = istr("Sec-WebSocket-Protocol") +SEC_WEBSOCKET_EXTENSIONS: Final[istr] = istr("Sec-WebSocket-Extensions") +SEC_WEBSOCKET_KEY: Final[istr] = istr("Sec-WebSocket-Key") +SEC_WEBSOCKET_KEY1: Final[istr] = istr("Sec-WebSocket-Key1") +SERVER: Final[istr] = istr("Server") +SET_COOKIE: Final[istr] = istr("Set-Cookie") +TE: Final[istr] = istr("TE") +TRAILER: Final[istr] = istr("Trailer") +TRANSFER_ENCODING: Final[istr] = istr("Transfer-Encoding") +UPGRADE: Final[istr] = istr("Upgrade") +URI: Final[istr] = istr("URI") +USER_AGENT: Final[istr] = istr("User-Agent") +VARY: Final[istr] = istr("Vary") +VIA: Final[istr] = istr("Via") +WANT_DIGEST: Final[istr] = istr("Want-Digest") +WARNING: Final[istr] = istr("Warning") +WWW_AUTHENTICATE: Final[istr] = istr("WWW-Authenticate") +X_FORWARDED_FOR: Final[istr] = istr("X-Forwarded-For") +X_FORWARDED_HOST: Final[istr] = istr("X-Forwarded-Host") +X_FORWARDED_PROTO: Final[istr] = istr("X-Forwarded-Proto") diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/helpers.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/helpers.py new file mode 100644 index 000000000..437c871e8 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/helpers.py @@ -0,0 +1,1001 @@ +"""Various helper functions""" + +import asyncio +import base64 +import binascii +import contextlib +import datetime +import enum +import functools +import inspect +import netrc +import os +import platform +import re +import sys +import time +import weakref +from collections import namedtuple +from contextlib import suppress +from email.parser import HeaderParser +from email.utils import parsedate +from math import ceil +from pathlib import Path +from types import TracebackType +from typing import ( + Any, + Callable, + ContextManager, + Dict, + Generator, + Generic, + Iterable, + Iterator, + List, + Mapping, + Optional, + Pattern, + Protocol, + Tuple, + Type, + TypeVar, + Union, + get_args, + overload, +) +from urllib.parse import quote +from urllib.request import getproxies, proxy_bypass + +import attr +from multidict import MultiDict, MultiDictProxy, MultiMapping +from yarl import URL + +from . import hdrs +from .log import client_logger + +if sys.version_info >= (3, 11): + import asyncio as async_timeout +else: + import async_timeout + +__all__ = ("BasicAuth", "ChainMapProxy", "ETag") + +IS_MACOS = platform.system() == "Darwin" +IS_WINDOWS = platform.system() == "Windows" + +PY_310 = sys.version_info >= (3, 10) +PY_311 = sys.version_info >= (3, 11) + + +_T = TypeVar("_T") +_S = TypeVar("_S") + +_SENTINEL = enum.Enum("_SENTINEL", "sentinel") +sentinel = _SENTINEL.sentinel + +NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS")) + +DEBUG = sys.flags.dev_mode or ( + not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG")) +) + + +CHAR = {chr(i) for i in range(0, 128)} +CTL = {chr(i) for i in range(0, 32)} | { + chr(127), +} +SEPARATORS = { + "(", + ")", + "<", + ">", + "@", + ",", + ";", + ":", + "\\", + '"', + "/", + "[", + "]", + "?", + "=", + "{", + "}", + " ", + chr(9), +} +TOKEN = CHAR ^ CTL ^ SEPARATORS + + +class noop: + def __await__(self) -> Generator[None, None, None]: + yield + + +class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])): + """Http basic authentication helper.""" + + def __new__( + cls, login: str, password: str = "", encoding: str = "latin1" + ) -> "BasicAuth": + if login is None: + raise ValueError("None is not allowed as login value") + + if password is None: + raise ValueError("None is not allowed as password value") + + if ":" in login: + raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)') + + return super().__new__(cls, login, password, encoding) + + @classmethod + def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth": + """Create a BasicAuth object from an Authorization HTTP header.""" + try: + auth_type, encoded_credentials = auth_header.split(" ", 1) + except ValueError: + raise ValueError("Could not parse authorization header.") + + if auth_type.lower() != "basic": + raise ValueError("Unknown authorization method %s" % auth_type) + + try: + decoded = base64.b64decode( + encoded_credentials.encode("ascii"), validate=True + ).decode(encoding) + except binascii.Error: + raise ValueError("Invalid base64 encoding.") + + try: + # RFC 2617 HTTP Authentication + # https://www.ietf.org/rfc/rfc2617.txt + # the colon must be present, but the username and password may be + # otherwise blank. + username, password = decoded.split(":", 1) + except ValueError: + raise ValueError("Invalid credentials.") + + return cls(username, password, encoding=encoding) + + @classmethod + def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]: + """Create BasicAuth from url.""" + if not isinstance(url, URL): + raise TypeError("url should be yarl.URL instance") + if url.user is None: + return None + return cls(url.user, url.password or "", encoding=encoding) + + def encode(self) -> str: + """Encode credentials.""" + creds = (f"{self.login}:{self.password}").encode(self.encoding) + return "Basic %s" % base64.b64encode(creds).decode(self.encoding) + + +def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]: + auth = BasicAuth.from_url(url) + if auth is None: + return url, None + else: + return url.with_user(None), auth + + +def netrc_from_env() -> Optional[netrc.netrc]: + """Load netrc from file. + + Attempt to load it from the path specified by the env-var + NETRC or in the default location in the user's home directory. + + Returns None if it couldn't be found or fails to parse. + """ + netrc_env = os.environ.get("NETRC") + + if netrc_env is not None: + netrc_path = Path(netrc_env) + else: + try: + home_dir = Path.home() + except RuntimeError as e: # pragma: no cover + # if pathlib can't resolve home, it may raise a RuntimeError + client_logger.debug( + "Could not resolve home directory when " + "trying to look for .netrc file: %s", + e, + ) + return None + + netrc_path = home_dir / ("_netrc" if IS_WINDOWS else ".netrc") + + try: + return netrc.netrc(str(netrc_path)) + except netrc.NetrcParseError as e: + client_logger.warning("Could not parse .netrc file: %s", e) + except OSError as e: + netrc_exists = False + with contextlib.suppress(OSError): + netrc_exists = netrc_path.is_file() + # we couldn't read the file (doesn't exist, permissions, etc.) + if netrc_env or netrc_exists: + # only warn if the environment wanted us to load it, + # or it appears like the default file does actually exist + client_logger.warning("Could not read .netrc file: %s", e) + + return None + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class ProxyInfo: + proxy: URL + proxy_auth: Optional[BasicAuth] + + +def basicauth_from_netrc(netrc_obj: Optional[netrc.netrc], host: str) -> BasicAuth: + """ + Return :py:class:`~aiohttp.BasicAuth` credentials for ``host`` from ``netrc_obj``. + + :raises LookupError: if ``netrc_obj`` is :py:data:`None` or if no + entry is found for the ``host``. + """ + if netrc_obj is None: + raise LookupError("No .netrc file found") + auth_from_netrc = netrc_obj.authenticators(host) + + if auth_from_netrc is None: + raise LookupError(f"No entry for {host!s} found in the `.netrc` file.") + login, account, password = auth_from_netrc + + # TODO(PY311): username = login or account + # Up to python 3.10, account could be None if not specified, + # and login will be empty string if not specified. From 3.11, + # login and account will be empty string if not specified. + username = login if (login or account is None) else account + + # TODO(PY311): Remove this, as password will be empty string + # if not specified + if password is None: + password = "" + + return BasicAuth(username, password) + + +def proxies_from_env() -> Dict[str, ProxyInfo]: + proxy_urls = { + k: URL(v) + for k, v in getproxies().items() + if k in ("http", "https", "ws", "wss") + } + netrc_obj = netrc_from_env() + stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()} + ret = {} + for proto, val in stripped.items(): + proxy, auth = val + if proxy.scheme in ("https", "wss"): + client_logger.warning( + "%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy + ) + continue + if netrc_obj and auth is None: + if proxy.host is not None: + try: + auth = basicauth_from_netrc(netrc_obj, proxy.host) + except LookupError: + auth = None + ret[proto] = ProxyInfo(proxy, auth) + return ret + + +def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]: + """Get a permitted proxy for the given URL from the env.""" + if url.host is not None and proxy_bypass(url.host): + raise LookupError(f"Proxying is disallowed for `{url.host!r}`") + + proxies_in_env = proxies_from_env() + try: + proxy_info = proxies_in_env[url.scheme] + except KeyError: + raise LookupError(f"No proxies found for `{url!s}` in the env") + else: + return proxy_info.proxy, proxy_info.proxy_auth + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class MimeType: + type: str + subtype: str + suffix: str + parameters: "MultiDictProxy[str]" + + +@functools.lru_cache(maxsize=56) +def parse_mimetype(mimetype: str) -> MimeType: + """Parses a MIME type into its components. + + mimetype is a MIME type string. + + Returns a MimeType object. + + Example: + + >>> parse_mimetype('text/html; charset=utf-8') + MimeType(type='text', subtype='html', suffix='', + parameters={'charset': 'utf-8'}) + + """ + if not mimetype: + return MimeType( + type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict()) + ) + + parts = mimetype.split(";") + params: MultiDict[str] = MultiDict() + for item in parts[1:]: + if not item: + continue + key, _, value = item.partition("=") + params.add(key.lower().strip(), value.strip(' "')) + + fulltype = parts[0].strip().lower() + if fulltype == "*": + fulltype = "*/*" + + mtype, _, stype = fulltype.partition("/") + stype, _, suffix = stype.partition("+") + + return MimeType( + type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params) + ) + + +def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]: + name = getattr(obj, "name", None) + if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">": + return Path(name).name + return default + + +not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]") +QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"} + + +def quoted_string(content: str) -> str: + """Return 7-bit content as quoted-string. + + Format content into a quoted-string as defined in RFC5322 for + Internet Message Format. Notice that this is not the 8-bit HTTP + format, but the 7-bit email format. Content must be in usascii or + a ValueError is raised. + """ + if not (QCONTENT > set(content)): + raise ValueError(f"bad content for quoted-string {content!r}") + return not_qtext_re.sub(lambda x: "\\" + x.group(0), content) + + +def content_disposition_header( + disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str +) -> str: + """Sets ``Content-Disposition`` header for MIME. + + This is the MIME payload Content-Disposition header from RFC 2183 + and RFC 7579 section 4.2, not the HTTP Content-Disposition from + RFC 6266. + + disptype is a disposition type: inline, attachment, form-data. + Should be valid extension token (see RFC 2183) + + quote_fields performs value quoting to 7-bit MIME headers + according to RFC 7578. Set to quote_fields to False if recipient + can take 8-bit file names and field values. + + _charset specifies the charset to use when quote_fields is True. + + params is a dict with disposition params. + """ + if not disptype or not (TOKEN > set(disptype)): + raise ValueError("bad content disposition type {!r}" "".format(disptype)) + + value = disptype + if params: + lparams = [] + for key, val in params.items(): + if not key or not (TOKEN > set(key)): + raise ValueError( + "bad content disposition parameter" " {!r}={!r}".format(key, val) + ) + if quote_fields: + if key.lower() == "filename": + qval = quote(val, "", encoding=_charset) + lparams.append((key, '"%s"' % qval)) + else: + try: + qval = quoted_string(val) + except ValueError: + qval = "".join( + (_charset, "''", quote(val, "", encoding=_charset)) + ) + lparams.append((key + "*", qval)) + else: + lparams.append((key, '"%s"' % qval)) + else: + qval = val.replace("\\", "\\\\").replace('"', '\\"') + lparams.append((key, '"%s"' % qval)) + sparams = "; ".join("=".join(pair) for pair in lparams) + value = "; ".join((value, sparams)) + return value + + +class _TSelf(Protocol, Generic[_T]): + _cache: Dict[str, _T] + + +class reify(Generic[_T]): + """Use as a class method decorator. + + It operates almost exactly like + the Python `@property` decorator, but it puts the result of the + method it decorates into the instance dict after the first call, + effectively replacing the function it decorates with an instance + variable. It is, in Python parlance, a data descriptor. + """ + + def __init__(self, wrapped: Callable[..., _T]) -> None: + self.wrapped = wrapped + self.__doc__ = wrapped.__doc__ + self.name = wrapped.__name__ + + def __get__(self, inst: _TSelf[_T], owner: Optional[Type[Any]] = None) -> _T: + try: + try: + return inst._cache[self.name] + except KeyError: + val = self.wrapped(inst) + inst._cache[self.name] = val + return val + except AttributeError: + if inst is None: + return self + raise + + def __set__(self, inst: _TSelf[_T], value: _T) -> None: + raise AttributeError("reified property is read-only") + + +reify_py = reify + +try: + from ._helpers import reify as reify_c + + if not NO_EXTENSIONS: + reify = reify_c # type: ignore[misc,assignment] +except ImportError: + pass + +_ipv4_pattern = ( + r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}" + r"(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$" +) +_ipv6_pattern = ( + r"^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}" + r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)" + r"((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})" + r"(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}" + r"(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}" + r"[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)" + r"(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}" + r":|:(:[A-F0-9]{1,4}){7})$" +) +_ipv4_regex = re.compile(_ipv4_pattern) +_ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE) +_ipv4_regexb = re.compile(_ipv4_pattern.encode("ascii")) +_ipv6_regexb = re.compile(_ipv6_pattern.encode("ascii"), flags=re.IGNORECASE) + + +def _is_ip_address( + regex: Pattern[str], regexb: Pattern[bytes], host: Optional[Union[str, bytes]] +) -> bool: + if host is None: + return False + if isinstance(host, str): + return bool(regex.match(host)) + elif isinstance(host, (bytes, bytearray, memoryview)): + return bool(regexb.match(host)) + else: + raise TypeError(f"{host} [{type(host)}] is not a str or bytes") + + +is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb) +is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb) + + +def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool: + return is_ipv4_address(host) or is_ipv6_address(host) + + +_cached_current_datetime: Optional[int] = None +_cached_formatted_datetime = "" + + +def rfc822_formatted_time() -> str: + global _cached_current_datetime + global _cached_formatted_datetime + + now = int(time.time()) + if now != _cached_current_datetime: + # Weekday and month names for HTTP date/time formatting; + # always English! + # Tuples are constants stored in codeobject! + _weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun") + _monthname = ( + "", # Dummy so we can use 1-based month numbers + "Jan", + "Feb", + "Mar", + "Apr", + "May", + "Jun", + "Jul", + "Aug", + "Sep", + "Oct", + "Nov", + "Dec", + ) + + year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now) + _cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % ( + _weekdayname[wd], + day, + _monthname[month], + year, + hh, + mm, + ss, + ) + _cached_current_datetime = now + return _cached_formatted_datetime + + +def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None: + ref, name = info + ob = ref() + if ob is not None: + with suppress(Exception): + getattr(ob, name)() + + +def weakref_handle( + ob: object, + name: str, + timeout: float, + loop: asyncio.AbstractEventLoop, + timeout_ceil_threshold: float = 5, +) -> Optional[asyncio.TimerHandle]: + if timeout is not None and timeout > 0: + when = loop.time() + timeout + if timeout >= timeout_ceil_threshold: + when = ceil(when) + + return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name)) + return None + + +def call_later( + cb: Callable[[], Any], + timeout: float, + loop: asyncio.AbstractEventLoop, + timeout_ceil_threshold: float = 5, +) -> Optional[asyncio.TimerHandle]: + if timeout is None or timeout <= 0: + return None + now = loop.time() + when = calculate_timeout_when(now, timeout, timeout_ceil_threshold) + return loop.call_at(when, cb) + + +def calculate_timeout_when( + loop_time: float, + timeout: float, + timeout_ceiling_threshold: float, +) -> float: + """Calculate when to execute a timeout.""" + when = loop_time + timeout + if timeout > timeout_ceiling_threshold: + return ceil(when) + return when + + +class TimeoutHandle: + """Timeout handle""" + + def __init__( + self, + loop: asyncio.AbstractEventLoop, + timeout: Optional[float], + ceil_threshold: float = 5, + ) -> None: + self._timeout = timeout + self._loop = loop + self._ceil_threshold = ceil_threshold + self._callbacks: List[ + Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]] + ] = [] + + def register( + self, callback: Callable[..., None], *args: Any, **kwargs: Any + ) -> None: + self._callbacks.append((callback, args, kwargs)) + + def close(self) -> None: + self._callbacks.clear() + + def start(self) -> Optional[asyncio.Handle]: + timeout = self._timeout + if timeout is not None and timeout > 0: + when = self._loop.time() + timeout + if timeout >= self._ceil_threshold: + when = ceil(when) + return self._loop.call_at(when, self.__call__) + else: + return None + + def timer(self) -> "BaseTimerContext": + if self._timeout is not None and self._timeout > 0: + timer = TimerContext(self._loop) + self.register(timer.timeout) + return timer + else: + return TimerNoop() + + def __call__(self) -> None: + for cb, args, kwargs in self._callbacks: + with suppress(Exception): + cb(*args, **kwargs) + + self._callbacks.clear() + + +class BaseTimerContext(ContextManager["BaseTimerContext"]): + def assert_timeout(self) -> None: + """Raise TimeoutError if timeout has been exceeded.""" + + +class TimerNoop(BaseTimerContext): + def __enter__(self) -> BaseTimerContext: + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + return + + +class TimerContext(BaseTimerContext): + """Low resolution timeout context manager""" + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + self._loop = loop + self._tasks: List[asyncio.Task[Any]] = [] + self._cancelled = False + + def assert_timeout(self) -> None: + """Raise TimeoutError if timer has already been cancelled.""" + if self._cancelled: + raise asyncio.TimeoutError from None + + def __enter__(self) -> BaseTimerContext: + task = asyncio.current_task(loop=self._loop) + + if task is None: + raise RuntimeError( + "Timeout context manager should be used " "inside a task" + ) + + if self._cancelled: + raise asyncio.TimeoutError from None + + self._tasks.append(task) + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + if self._tasks: + self._tasks.pop() + + if exc_type is asyncio.CancelledError and self._cancelled: + raise asyncio.TimeoutError from None + return None + + def timeout(self) -> None: + if not self._cancelled: + for task in set(self._tasks): + task.cancel() + + self._cancelled = True + + +def ceil_timeout( + delay: Optional[float], ceil_threshold: float = 5 +) -> async_timeout.Timeout: + if delay is None or delay <= 0: + return async_timeout.timeout(None) + + loop = asyncio.get_running_loop() + now = loop.time() + when = now + delay + if delay > ceil_threshold: + when = ceil(when) + return async_timeout.timeout_at(when) + + +class HeadersMixin: + ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"]) + + _headers: MultiMapping[str] + + _content_type: Optional[str] = None + _content_dict: Optional[Dict[str, str]] = None + _stored_content_type: Union[str, None, _SENTINEL] = sentinel + + def _parse_content_type(self, raw: Optional[str]) -> None: + self._stored_content_type = raw + if raw is None: + # default value according to RFC 2616 + self._content_type = "application/octet-stream" + self._content_dict = {} + else: + msg = HeaderParser().parsestr("Content-Type: " + raw) + self._content_type = msg.get_content_type() + params = msg.get_params(()) + self._content_dict = dict(params[1:]) # First element is content type again + + @property + def content_type(self) -> str: + """The value of content part for Content-Type HTTP header.""" + raw = self._headers.get(hdrs.CONTENT_TYPE) + if self._stored_content_type != raw: + self._parse_content_type(raw) + return self._content_type # type: ignore[return-value] + + @property + def charset(self) -> Optional[str]: + """The value of charset part for Content-Type HTTP header.""" + raw = self._headers.get(hdrs.CONTENT_TYPE) + if self._stored_content_type != raw: + self._parse_content_type(raw) + return self._content_dict.get("charset") # type: ignore[union-attr] + + @property + def content_length(self) -> Optional[int]: + """The value of Content-Length HTTP header.""" + content_length = self._headers.get(hdrs.CONTENT_LENGTH) + + if content_length is not None: + return int(content_length) + else: + return None + + +def set_result(fut: "asyncio.Future[_T]", result: _T) -> None: + if not fut.done(): + fut.set_result(result) + + +_EXC_SENTINEL = BaseException() + + +class ErrorableProtocol(Protocol): + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = ..., + ) -> None: ... # pragma: no cover + + +def set_exception( + fut: "asyncio.Future[_T] | ErrorableProtocol", + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, +) -> None: + """Set future exception. + + If the future is marked as complete, this function is a no-op. + + :param exc_cause: An exception that is a direct cause of ``exc``. + Only set if provided. + """ + if asyncio.isfuture(fut) and fut.done(): + return + + exc_is_sentinel = exc_cause is _EXC_SENTINEL + exc_causes_itself = exc is exc_cause + if not exc_is_sentinel and not exc_causes_itself: + exc.__cause__ = exc_cause + + fut.set_exception(exc) + + +@functools.total_ordering +class AppKey(Generic[_T]): + """Keys for static typing support in Application.""" + + __slots__ = ("_name", "_t", "__orig_class__") + + # This may be set by Python when instantiating with a generic type. We need to + # support this, in order to support types that are not concrete classes, + # like Iterable, which can't be passed as the second parameter to __init__. + __orig_class__: Type[object] + + def __init__(self, name: str, t: Optional[Type[_T]] = None): + # Prefix with module name to help deduplicate key names. + frame = inspect.currentframe() + while frame: + if frame.f_code.co_name == "": + module: str = frame.f_globals["__name__"] + break + frame = frame.f_back + + self._name = module + "." + name + self._t = t + + def __lt__(self, other: object) -> bool: + if isinstance(other, AppKey): + return self._name < other._name + return True # Order AppKey above other types. + + def __repr__(self) -> str: + t = self._t + if t is None: + with suppress(AttributeError): + # Set to type arg. + t = get_args(self.__orig_class__)[0] + + if t is None: + t_repr = "<>" + elif isinstance(t, type): + if t.__module__ == "builtins": + t_repr = t.__qualname__ + else: + t_repr = f"{t.__module__}.{t.__qualname__}" + else: + t_repr = repr(t) + return f"" + + +class ChainMapProxy(Mapping[Union[str, AppKey[Any]], Any]): + __slots__ = ("_maps",) + + def __init__(self, maps: Iterable[Mapping[Union[str, AppKey[Any]], Any]]) -> None: + self._maps = tuple(maps) + + def __init_subclass__(cls) -> None: + raise TypeError( + "Inheritance class {} from ChainMapProxy " + "is forbidden".format(cls.__name__) + ) + + @overload # type: ignore[override] + def __getitem__(self, key: AppKey[_T]) -> _T: ... + + @overload + def __getitem__(self, key: str) -> Any: ... + + def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any: + for mapping in self._maps: + try: + return mapping[key] + except KeyError: + pass + raise KeyError(key) + + @overload # type: ignore[override] + def get(self, key: AppKey[_T], default: _S) -> Union[_T, _S]: ... + + @overload + def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: ... + + @overload + def get(self, key: str, default: Any = ...) -> Any: ... + + def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any: + try: + return self[key] + except KeyError: + return default + + def __len__(self) -> int: + # reuses stored hash values if possible + return len(set().union(*self._maps)) + + def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]: + d: Dict[Union[str, AppKey[Any]], Any] = {} + for mapping in reversed(self._maps): + # reuses stored hash values if possible + d.update(mapping) + return iter(d) + + def __contains__(self, key: object) -> bool: + return any(key in m for m in self._maps) + + def __bool__(self) -> bool: + return any(self._maps) + + def __repr__(self) -> str: + content = ", ".join(map(repr, self._maps)) + return f"ChainMapProxy({content})" + + +# https://tools.ietf.org/html/rfc7232#section-2.3 +_ETAGC = r"[!\x23-\x7E\x80-\xff]+" +_ETAGC_RE = re.compile(_ETAGC) +_QUOTED_ETAG = rf'(W/)?"({_ETAGC})"' +QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG) +LIST_QUOTED_ETAG_RE = re.compile(rf"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)") + +ETAG_ANY = "*" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class ETag: + value: str + is_weak: bool = False + + +def validate_etag_value(value: str) -> None: + if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value): + raise ValueError( + f"Value {value!r} is not a valid etag. Maybe it contains '\"'?" + ) + + +def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]: + """Process a date string, return a datetime object""" + if date_str is not None: + timetuple = parsedate(date_str) + if timetuple is not None: + with suppress(ValueError): + return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc) + return None + + +def must_be_empty_body(method: str, code: int) -> bool: + """Check if a request must return an empty body.""" + return ( + status_code_must_be_empty_body(code) + or method_must_be_empty_body(method) + or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT) + ) + + +def method_must_be_empty_body(method: str) -> bool: + """Check if a method must return an empty body.""" + # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1 + # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2 + return method.upper() == hdrs.METH_HEAD + + +def status_code_must_be_empty_body(code: int) -> bool: + """Check if a status code must return an empty body.""" + # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1 + return code in {204, 304} or 100 <= code < 200 + + +def should_remove_content_length(method: str, code: int) -> bool: + """Check if a Content-Length header should be removed. + + This should always be a subset of must_be_empty_body + """ + # https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-8 + # https://www.rfc-editor.org/rfc/rfc9110.html#section-15.4.5-4 + return ( + code in {204, 304} + or 100 <= code < 200 + or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT) + ) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/http.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/http.py new file mode 100644 index 000000000..a1feae2d9 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/http.py @@ -0,0 +1,72 @@ +import sys +from http import HTTPStatus +from typing import Mapping, Tuple + +from . import __version__ +from .http_exceptions import HttpProcessingError as HttpProcessingError +from .http_parser import ( + HeadersParser as HeadersParser, + HttpParser as HttpParser, + HttpRequestParser as HttpRequestParser, + HttpResponseParser as HttpResponseParser, + RawRequestMessage as RawRequestMessage, + RawResponseMessage as RawResponseMessage, +) +from .http_websocket import ( + WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE, + WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE, + WS_KEY as WS_KEY, + WebSocketError as WebSocketError, + WebSocketReader as WebSocketReader, + WebSocketWriter as WebSocketWriter, + WSCloseCode as WSCloseCode, + WSMessage as WSMessage, + WSMsgType as WSMsgType, + ws_ext_gen as ws_ext_gen, + ws_ext_parse as ws_ext_parse, +) +from .http_writer import ( + HttpVersion as HttpVersion, + HttpVersion10 as HttpVersion10, + HttpVersion11 as HttpVersion11, + StreamWriter as StreamWriter, +) + +__all__ = ( + "HttpProcessingError", + "RESPONSES", + "SERVER_SOFTWARE", + # .http_writer + "StreamWriter", + "HttpVersion", + "HttpVersion10", + "HttpVersion11", + # .http_parser + "HeadersParser", + "HttpParser", + "HttpRequestParser", + "HttpResponseParser", + "RawRequestMessage", + "RawResponseMessage", + # .http_websocket + "WS_CLOSED_MESSAGE", + "WS_CLOSING_MESSAGE", + "WS_KEY", + "WebSocketReader", + "WebSocketWriter", + "ws_ext_gen", + "ws_ext_parse", + "WSMessage", + "WebSocketError", + "WSMsgType", + "WSCloseCode", +) + + +SERVER_SOFTWARE: str = "Python/{0[0]}.{0[1]} aiohttp/{1}".format( + sys.version_info, __version__ +) + +RESPONSES: Mapping[int, Tuple[str, str]] = { + v: (v.phrase, v.description) for v in HTTPStatus.__members__.values() +} diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/http_exceptions.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/http_exceptions.py new file mode 100644 index 000000000..c43ee0d96 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/http_exceptions.py @@ -0,0 +1,105 @@ +"""Low-level http related exceptions.""" + +from textwrap import indent +from typing import Optional, Union + +from .typedefs import _CIMultiDict + +__all__ = ("HttpProcessingError",) + + +class HttpProcessingError(Exception): + """HTTP error. + + Shortcut for raising HTTP errors with custom code, message and headers. + + code: HTTP Error code. + message: (optional) Error message. + headers: (optional) Headers to be sent in response, a list of pairs + """ + + code = 0 + message = "" + headers = None + + def __init__( + self, + *, + code: Optional[int] = None, + message: str = "", + headers: Optional[_CIMultiDict] = None, + ) -> None: + if code is not None: + self.code = code + self.headers = headers + self.message = message + + def __str__(self) -> str: + msg = indent(self.message, " ") + return f"{self.code}, message:\n{msg}" + + def __repr__(self) -> str: + return f"<{self.__class__.__name__}: {self.code}, message={self.message!r}>" + + +class BadHttpMessage(HttpProcessingError): + + code = 400 + message = "Bad Request" + + def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None: + super().__init__(message=message, headers=headers) + self.args = (message,) + + +class HttpBadRequest(BadHttpMessage): + + code = 400 + message = "Bad Request" + + +class PayloadEncodingError(BadHttpMessage): + """Base class for payload errors""" + + +class ContentEncodingError(PayloadEncodingError): + """Content encoding error.""" + + +class TransferEncodingError(PayloadEncodingError): + """transfer encoding error.""" + + +class ContentLengthError(PayloadEncodingError): + """Not enough data for satisfy content length header.""" + + +class LineTooLong(BadHttpMessage): + def __init__( + self, line: str, limit: str = "Unknown", actual_size: str = "Unknown" + ) -> None: + super().__init__( + f"Got more than {limit} bytes ({actual_size}) when reading {line}." + ) + self.args = (line, limit, actual_size) + + +class InvalidHeader(BadHttpMessage): + def __init__(self, hdr: Union[bytes, str]) -> None: + hdr_s = hdr.decode(errors="backslashreplace") if isinstance(hdr, bytes) else hdr + super().__init__(f"Invalid HTTP header: {hdr!r}") + self.hdr = hdr_s + self.args = (hdr,) + + +class BadStatusLine(BadHttpMessage): + def __init__(self, line: str = "", error: Optional[str] = None) -> None: + if not isinstance(line, str): + line = repr(line) + super().__init__(error or f"Bad status line {line!r}") + self.args = (line,) + self.line = line + + +class InvalidURLError(BadHttpMessage): + pass diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/http_parser.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/http_parser.py new file mode 100644 index 000000000..751a7e1bb --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/http_parser.py @@ -0,0 +1,1023 @@ +import abc +import asyncio +import re +import string +from contextlib import suppress +from enum import IntEnum +from typing import ( + Any, + ClassVar, + Final, + Generic, + List, + Literal, + NamedTuple, + Optional, + Pattern, + Set, + Tuple, + Type, + TypeVar, + Union, +) + +from multidict import CIMultiDict, CIMultiDictProxy, istr +from yarl import URL + +from . import hdrs +from .base_protocol import BaseProtocol +from .compression_utils import HAS_BROTLI, BrotliDecompressor, ZLibDecompressor +from .helpers import ( + _EXC_SENTINEL, + DEBUG, + NO_EXTENSIONS, + BaseTimerContext, + method_must_be_empty_body, + set_exception, + status_code_must_be_empty_body, +) +from .http_exceptions import ( + BadHttpMessage, + BadStatusLine, + ContentEncodingError, + ContentLengthError, + InvalidHeader, + InvalidURLError, + LineTooLong, + TransferEncodingError, +) +from .http_writer import HttpVersion, HttpVersion10 +from .streams import EMPTY_PAYLOAD, StreamReader +from .typedefs import RawHeaders + +__all__ = ( + "HeadersParser", + "HttpParser", + "HttpRequestParser", + "HttpResponseParser", + "RawRequestMessage", + "RawResponseMessage", +) + +_SEP = Literal[b"\r\n", b"\n"] + +ASCIISET: Final[Set[str]] = set(string.printable) + +# See https://www.rfc-editor.org/rfc/rfc9110.html#name-overview +# and https://www.rfc-editor.org/rfc/rfc9110.html#name-tokens +# +# method = token +# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." / +# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA +# token = 1*tchar +_TCHAR_SPECIALS: Final[str] = re.escape("!#$%&'*+-.^_`|~") +TOKENRE: Final[Pattern[str]] = re.compile(f"[0-9A-Za-z{_TCHAR_SPECIALS}]+") +VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d)\.(\d)", re.ASCII) +DIGITS: Final[Pattern[str]] = re.compile(r"\d+", re.ASCII) +HEXDIGITS: Final[Pattern[bytes]] = re.compile(rb"[0-9a-fA-F]+") + + +class RawRequestMessage(NamedTuple): + method: str + path: str + version: HttpVersion + headers: "CIMultiDictProxy[str]" + raw_headers: RawHeaders + should_close: bool + compression: Optional[str] + upgrade: bool + chunked: bool + url: URL + + +class RawResponseMessage(NamedTuple): + version: HttpVersion + code: int + reason: str + headers: CIMultiDictProxy[str] + raw_headers: RawHeaders + should_close: bool + compression: Optional[str] + upgrade: bool + chunked: bool + + +_MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage) + + +class ParseState(IntEnum): + + PARSE_NONE = 0 + PARSE_LENGTH = 1 + PARSE_CHUNKED = 2 + PARSE_UNTIL_EOF = 3 + + +class ChunkState(IntEnum): + PARSE_CHUNKED_SIZE = 0 + PARSE_CHUNKED_CHUNK = 1 + PARSE_CHUNKED_CHUNK_EOF = 2 + PARSE_MAYBE_TRAILERS = 3 + PARSE_TRAILERS = 4 + + +class HeadersParser: + def __init__( + self, + max_line_size: int = 8190, + max_headers: int = 32768, + max_field_size: int = 8190, + lax: bool = False, + ) -> None: + self.max_line_size = max_line_size + self.max_headers = max_headers + self.max_field_size = max_field_size + self._lax = lax + + def parse_headers( + self, lines: List[bytes] + ) -> Tuple["CIMultiDictProxy[str]", RawHeaders]: + headers: CIMultiDict[str] = CIMultiDict() + # note: "raw" does not mean inclusion of OWS before/after the field value + raw_headers = [] + + lines_idx = 1 + line = lines[1] + line_count = len(lines) + + while line: + # Parse initial header name : value pair. + try: + bname, bvalue = line.split(b":", 1) + except ValueError: + raise InvalidHeader(line) from None + + if len(bname) == 0: + raise InvalidHeader(bname) + + # https://www.rfc-editor.org/rfc/rfc9112.html#section-5.1-2 + if {bname[0], bname[-1]} & {32, 9}: # {" ", "\t"} + raise InvalidHeader(line) + + bvalue = bvalue.lstrip(b" \t") + if len(bname) > self.max_field_size: + raise LineTooLong( + "request header name {}".format( + bname.decode("utf8", "backslashreplace") + ), + str(self.max_field_size), + str(len(bname)), + ) + name = bname.decode("utf-8", "surrogateescape") + if not TOKENRE.fullmatch(name): + raise InvalidHeader(bname) + + header_length = len(bvalue) + + # next line + lines_idx += 1 + line = lines[lines_idx] + + # consume continuation lines + continuation = self._lax and line and line[0] in (32, 9) # (' ', '\t') + + # Deprecated: https://www.rfc-editor.org/rfc/rfc9112.html#name-obsolete-line-folding + if continuation: + bvalue_lst = [bvalue] + while continuation: + header_length += len(line) + if header_length > self.max_field_size: + raise LineTooLong( + "request header field {}".format( + bname.decode("utf8", "backslashreplace") + ), + str(self.max_field_size), + str(header_length), + ) + bvalue_lst.append(line) + + # next line + lines_idx += 1 + if lines_idx < line_count: + line = lines[lines_idx] + if line: + continuation = line[0] in (32, 9) # (' ', '\t') + else: + line = b"" + break + bvalue = b"".join(bvalue_lst) + else: + if header_length > self.max_field_size: + raise LineTooLong( + "request header field {}".format( + bname.decode("utf8", "backslashreplace") + ), + str(self.max_field_size), + str(header_length), + ) + + bvalue = bvalue.strip(b" \t") + value = bvalue.decode("utf-8", "surrogateescape") + + # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5 + if "\n" in value or "\r" in value or "\x00" in value: + raise InvalidHeader(bvalue) + + headers.add(name, value) + raw_headers.append((bname, bvalue)) + + return (CIMultiDictProxy(headers), tuple(raw_headers)) + + +def _is_supported_upgrade(headers: CIMultiDictProxy[str]) -> bool: + """Check if the upgrade header is supported.""" + return headers.get(hdrs.UPGRADE, "").lower() in {"tcp", "websocket"} + + +class HttpParser(abc.ABC, Generic[_MsgT]): + lax: ClassVar[bool] = False + + def __init__( + self, + protocol: Optional[BaseProtocol] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + limit: int = 2**16, + max_line_size: int = 8190, + max_headers: int = 32768, + max_field_size: int = 8190, + timer: Optional[BaseTimerContext] = None, + code: Optional[int] = None, + method: Optional[str] = None, + payload_exception: Optional[Type[BaseException]] = None, + response_with_body: bool = True, + read_until_eof: bool = False, + auto_decompress: bool = True, + ) -> None: + self.protocol = protocol + self.loop = loop + self.max_line_size = max_line_size + self.max_headers = max_headers + self.max_field_size = max_field_size + self.timer = timer + self.code = code + self.method = method + self.payload_exception = payload_exception + self.response_with_body = response_with_body + self.read_until_eof = read_until_eof + + self._lines: List[bytes] = [] + self._tail = b"" + self._upgraded = False + self._payload = None + self._payload_parser: Optional[HttpPayloadParser] = None + self._auto_decompress = auto_decompress + self._limit = limit + self._headers_parser = HeadersParser( + max_line_size, max_headers, max_field_size, self.lax + ) + + @abc.abstractmethod + def parse_message(self, lines: List[bytes]) -> _MsgT: + pass + + def feed_eof(self) -> Optional[_MsgT]: + if self._payload_parser is not None: + self._payload_parser.feed_eof() + self._payload_parser = None + else: + # try to extract partial message + if self._tail: + self._lines.append(self._tail) + + if self._lines: + if self._lines[-1] != "\r\n": + self._lines.append(b"") + with suppress(Exception): + return self.parse_message(self._lines) + return None + + def feed_data( + self, + data: bytes, + SEP: _SEP = b"\r\n", + EMPTY: bytes = b"", + CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH, + METH_CONNECT: str = hdrs.METH_CONNECT, + SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1, + ) -> Tuple[List[Tuple[_MsgT, StreamReader]], bool, bytes]: + + messages = [] + + if self._tail: + data, self._tail = self._tail + data, b"" + + data_len = len(data) + start_pos = 0 + loop = self.loop + + while start_pos < data_len: + + # read HTTP message (request/response line + headers), \r\n\r\n + # and split by lines + if self._payload_parser is None and not self._upgraded: + pos = data.find(SEP, start_pos) + # consume \r\n + if pos == start_pos and not self._lines: + start_pos = pos + len(SEP) + continue + + if pos >= start_pos: + # line found + line = data[start_pos:pos] + if SEP == b"\n": # For lax response parsing + line = line.rstrip(b"\r") + self._lines.append(line) + start_pos = pos + len(SEP) + + # \r\n\r\n found + if self._lines[-1] == EMPTY: + try: + msg: _MsgT = self.parse_message(self._lines) + finally: + self._lines.clear() + + def get_content_length() -> Optional[int]: + # payload length + length_hdr = msg.headers.get(CONTENT_LENGTH) + if length_hdr is None: + return None + + # Shouldn't allow +/- or other number formats. + # https://www.rfc-editor.org/rfc/rfc9110#section-8.6-2 + # msg.headers is already stripped of leading/trailing wsp + if not DIGITS.fullmatch(length_hdr): + raise InvalidHeader(CONTENT_LENGTH) + + return int(length_hdr) + + length = get_content_length() + # do not support old websocket spec + if SEC_WEBSOCKET_KEY1 in msg.headers: + raise InvalidHeader(SEC_WEBSOCKET_KEY1) + + self._upgraded = msg.upgrade and _is_supported_upgrade( + msg.headers + ) + + method = getattr(msg, "method", self.method) + # code is only present on responses + code = getattr(msg, "code", 0) + + assert self.protocol is not None + # calculate payload + empty_body = status_code_must_be_empty_body(code) or bool( + method and method_must_be_empty_body(method) + ) + if not empty_body and ( + ((length is not None and length > 0) or msg.chunked) + and not self._upgraded + ): + payload = StreamReader( + self.protocol, + timer=self.timer, + loop=loop, + limit=self._limit, + ) + payload_parser = HttpPayloadParser( + payload, + length=length, + chunked=msg.chunked, + method=method, + compression=msg.compression, + code=self.code, + response_with_body=self.response_with_body, + auto_decompress=self._auto_decompress, + lax=self.lax, + ) + if not payload_parser.done: + self._payload_parser = payload_parser + elif method == METH_CONNECT: + assert isinstance(msg, RawRequestMessage) + payload = StreamReader( + self.protocol, + timer=self.timer, + loop=loop, + limit=self._limit, + ) + self._upgraded = True + self._payload_parser = HttpPayloadParser( + payload, + method=msg.method, + compression=msg.compression, + auto_decompress=self._auto_decompress, + lax=self.lax, + ) + elif not empty_body and length is None and self.read_until_eof: + payload = StreamReader( + self.protocol, + timer=self.timer, + loop=loop, + limit=self._limit, + ) + payload_parser = HttpPayloadParser( + payload, + length=length, + chunked=msg.chunked, + method=method, + compression=msg.compression, + code=self.code, + response_with_body=self.response_with_body, + auto_decompress=self._auto_decompress, + lax=self.lax, + ) + if not payload_parser.done: + self._payload_parser = payload_parser + else: + payload = EMPTY_PAYLOAD + + messages.append((msg, payload)) + else: + self._tail = data[start_pos:] + data = EMPTY + break + + # no parser, just store + elif self._payload_parser is None and self._upgraded: + assert not self._lines + break + + # feed payload + elif data and start_pos < data_len: + assert not self._lines + assert self._payload_parser is not None + try: + eof, data = self._payload_parser.feed_data(data[start_pos:], SEP) + except BaseException as underlying_exc: + reraised_exc = underlying_exc + if self.payload_exception is not None: + reraised_exc = self.payload_exception(str(underlying_exc)) + + set_exception( + self._payload_parser.payload, + reraised_exc, + underlying_exc, + ) + + eof = True + data = b"" + + if eof: + start_pos = 0 + data_len = len(data) + self._payload_parser = None + continue + else: + break + + if data and start_pos < data_len: + data = data[start_pos:] + else: + data = EMPTY + + return messages, self._upgraded, data + + def parse_headers( + self, lines: List[bytes] + ) -> Tuple[ + "CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool + ]: + """Parses RFC 5322 headers from a stream. + + Line continuations are supported. Returns list of header name + and value pairs. Header name is in upper case. + """ + headers, raw_headers = self._headers_parser.parse_headers(lines) + close_conn = None + encoding = None + upgrade = False + chunked = False + + # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-6 + # https://www.rfc-editor.org/rfc/rfc9110.html#name-collected-abnf + singletons = ( + hdrs.CONTENT_LENGTH, + hdrs.CONTENT_LOCATION, + hdrs.CONTENT_RANGE, + hdrs.CONTENT_TYPE, + hdrs.ETAG, + hdrs.HOST, + hdrs.MAX_FORWARDS, + hdrs.SERVER, + hdrs.TRANSFER_ENCODING, + hdrs.USER_AGENT, + ) + bad_hdr = next((h for h in singletons if len(headers.getall(h, ())) > 1), None) + if bad_hdr is not None: + raise BadHttpMessage(f"Duplicate '{bad_hdr}' header found.") + + # keep-alive + conn = headers.get(hdrs.CONNECTION) + if conn: + v = conn.lower() + if v == "close": + close_conn = True + elif v == "keep-alive": + close_conn = False + # https://www.rfc-editor.org/rfc/rfc9110.html#name-101-switching-protocols + elif v == "upgrade" and headers.get(hdrs.UPGRADE): + upgrade = True + + # encoding + enc = headers.get(hdrs.CONTENT_ENCODING) + if enc: + enc = enc.lower() + if enc in ("gzip", "deflate", "br"): + encoding = enc + + # chunking + te = headers.get(hdrs.TRANSFER_ENCODING) + if te is not None: + if "chunked" == te.lower(): + chunked = True + else: + raise BadHttpMessage("Request has invalid `Transfer-Encoding`") + + if hdrs.CONTENT_LENGTH in headers: + raise BadHttpMessage( + "Transfer-Encoding can't be present with Content-Length", + ) + + return (headers, raw_headers, close_conn, encoding, upgrade, chunked) + + def set_upgraded(self, val: bool) -> None: + """Set connection upgraded (to websocket) mode. + + :param bool val: new state. + """ + self._upgraded = val + + +class HttpRequestParser(HttpParser[RawRequestMessage]): + """Read request status line. + + Exception .http_exceptions.BadStatusLine + could be raised in case of any errors in status line. + Returns RawRequestMessage. + """ + + def parse_message(self, lines: List[bytes]) -> RawRequestMessage: + # request line + line = lines[0].decode("utf-8", "surrogateescape") + try: + method, path, version = line.split(" ", maxsplit=2) + except ValueError: + raise BadStatusLine(line) from None + + if len(path) > self.max_line_size: + raise LineTooLong( + "Status line is too long", str(self.max_line_size), str(len(path)) + ) + + # method + if not TOKENRE.fullmatch(method): + raise BadStatusLine(method) + + # version + match = VERSRE.fullmatch(version) + if match is None: + raise BadStatusLine(line) + version_o = HttpVersion(int(match.group(1)), int(match.group(2))) + + if method == "CONNECT": + # authority-form, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3 + url = URL.build(authority=path, encoded=True) + elif path.startswith("/"): + # origin-form, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1 + path_part, _hash_separator, url_fragment = path.partition("#") + path_part, _question_mark_separator, qs_part = path_part.partition("?") + + # NOTE: `yarl.URL.build()` is used to mimic what the Cython-based + # NOTE: parser does, otherwise it results into the same + # NOTE: HTTP Request-Line input producing different + # NOTE: `yarl.URL()` objects + url = URL.build( + path=path_part, + query_string=qs_part, + fragment=url_fragment, + encoded=True, + ) + elif path == "*" and method == "OPTIONS": + # asterisk-form, + url = URL(path, encoded=True) + else: + # absolute-form for proxy maybe, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2 + url = URL(path, encoded=True) + if url.scheme == "": + # not absolute-form + raise InvalidURLError( + path.encode(errors="surrogateescape").decode("latin1") + ) + + # read headers + ( + headers, + raw_headers, + close, + compression, + upgrade, + chunked, + ) = self.parse_headers(lines) + + if close is None: # then the headers weren't set in the request + if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close + close = True + else: # HTTP 1.1 must ask to close. + close = False + + return RawRequestMessage( + method, + path, + version_o, + headers, + raw_headers, + close, + compression, + upgrade, + chunked, + url, + ) + + +class HttpResponseParser(HttpParser[RawResponseMessage]): + """Read response status line and headers. + + BadStatusLine could be raised in case of any errors in status line. + Returns RawResponseMessage. + """ + + # Lax mode should only be enabled on response parser. + lax = not DEBUG + + def feed_data( + self, + data: bytes, + SEP: Optional[_SEP] = None, + *args: Any, + **kwargs: Any, + ) -> Tuple[List[Tuple[RawResponseMessage, StreamReader]], bool, bytes]: + if SEP is None: + SEP = b"\r\n" if DEBUG else b"\n" + return super().feed_data(data, SEP, *args, **kwargs) + + def parse_message(self, lines: List[bytes]) -> RawResponseMessage: + line = lines[0].decode("utf-8", "surrogateescape") + try: + version, status = line.split(maxsplit=1) + except ValueError: + raise BadStatusLine(line) from None + + try: + status, reason = status.split(maxsplit=1) + except ValueError: + status = status.strip() + reason = "" + + if len(reason) > self.max_line_size: + raise LineTooLong( + "Status line is too long", str(self.max_line_size), str(len(reason)) + ) + + # version + match = VERSRE.fullmatch(version) + if match is None: + raise BadStatusLine(line) + version_o = HttpVersion(int(match.group(1)), int(match.group(2))) + + # The status code is a three-digit ASCII number, no padding + if len(status) != 3 or not DIGITS.fullmatch(status): + raise BadStatusLine(line) + status_i = int(status) + + # read headers + ( + headers, + raw_headers, + close, + compression, + upgrade, + chunked, + ) = self.parse_headers(lines) + + if close is None: + if version_o <= HttpVersion10: + close = True + # https://www.rfc-editor.org/rfc/rfc9112.html#name-message-body-length + elif 100 <= status_i < 200 or status_i in {204, 304}: + close = False + elif hdrs.CONTENT_LENGTH in headers or hdrs.TRANSFER_ENCODING in headers: + close = False + else: + # https://www.rfc-editor.org/rfc/rfc9112.html#section-6.3-2.8 + close = True + + return RawResponseMessage( + version_o, + status_i, + reason.strip(), + headers, + raw_headers, + close, + compression, + upgrade, + chunked, + ) + + +class HttpPayloadParser: + def __init__( + self, + payload: StreamReader, + length: Optional[int] = None, + chunked: bool = False, + compression: Optional[str] = None, + code: Optional[int] = None, + method: Optional[str] = None, + response_with_body: bool = True, + auto_decompress: bool = True, + lax: bool = False, + ) -> None: + self._length = 0 + self._type = ParseState.PARSE_UNTIL_EOF + self._chunk = ChunkState.PARSE_CHUNKED_SIZE + self._chunk_size = 0 + self._chunk_tail = b"" + self._auto_decompress = auto_decompress + self._lax = lax + self.done = False + + # payload decompression wrapper + if response_with_body and compression and self._auto_decompress: + real_payload: Union[StreamReader, DeflateBuffer] = DeflateBuffer( + payload, compression + ) + else: + real_payload = payload + + # payload parser + if not response_with_body: + # don't parse payload if it's not expected to be received + self._type = ParseState.PARSE_NONE + real_payload.feed_eof() + self.done = True + elif chunked: + self._type = ParseState.PARSE_CHUNKED + elif length is not None: + self._type = ParseState.PARSE_LENGTH + self._length = length + if self._length == 0: + real_payload.feed_eof() + self.done = True + + self.payload = real_payload + + def feed_eof(self) -> None: + if self._type == ParseState.PARSE_UNTIL_EOF: + self.payload.feed_eof() + elif self._type == ParseState.PARSE_LENGTH: + raise ContentLengthError( + "Not enough data for satisfy content length header." + ) + elif self._type == ParseState.PARSE_CHUNKED: + raise TransferEncodingError( + "Not enough data for satisfy transfer length header." + ) + + def feed_data( + self, chunk: bytes, SEP: _SEP = b"\r\n", CHUNK_EXT: bytes = b";" + ) -> Tuple[bool, bytes]: + # Read specified amount of bytes + if self._type == ParseState.PARSE_LENGTH: + required = self._length + chunk_len = len(chunk) + + if required >= chunk_len: + self._length = required - chunk_len + self.payload.feed_data(chunk, chunk_len) + if self._length == 0: + self.payload.feed_eof() + return True, b"" + else: + self._length = 0 + self.payload.feed_data(chunk[:required], required) + self.payload.feed_eof() + return True, chunk[required:] + + # Chunked transfer encoding parser + elif self._type == ParseState.PARSE_CHUNKED: + if self._chunk_tail: + chunk = self._chunk_tail + chunk + self._chunk_tail = b"" + + while chunk: + + # read next chunk size + if self._chunk == ChunkState.PARSE_CHUNKED_SIZE: + pos = chunk.find(SEP) + if pos >= 0: + i = chunk.find(CHUNK_EXT, 0, pos) + if i >= 0: + size_b = chunk[:i] # strip chunk-extensions + else: + size_b = chunk[:pos] + + if self._lax: # Allow whitespace in lax mode. + size_b = size_b.strip() + + if not re.fullmatch(HEXDIGITS, size_b): + exc = TransferEncodingError( + chunk[:pos].decode("ascii", "surrogateescape") + ) + set_exception(self.payload, exc) + raise exc + size = int(bytes(size_b), 16) + + chunk = chunk[pos + len(SEP) :] + if size == 0: # eof marker + self._chunk = ChunkState.PARSE_MAYBE_TRAILERS + if self._lax and chunk.startswith(b"\r"): + chunk = chunk[1:] + else: + self._chunk = ChunkState.PARSE_CHUNKED_CHUNK + self._chunk_size = size + self.payload.begin_http_chunk_receiving() + else: + self._chunk_tail = chunk + return False, b"" + + # read chunk and feed buffer + if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK: + required = self._chunk_size + chunk_len = len(chunk) + + if required > chunk_len: + self._chunk_size = required - chunk_len + self.payload.feed_data(chunk, chunk_len) + return False, b"" + else: + self._chunk_size = 0 + self.payload.feed_data(chunk[:required], required) + chunk = chunk[required:] + if self._lax and chunk.startswith(b"\r"): + chunk = chunk[1:] + self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF + self.payload.end_http_chunk_receiving() + + # toss the CRLF at the end of the chunk + if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF: + if chunk[: len(SEP)] == SEP: + chunk = chunk[len(SEP) :] + self._chunk = ChunkState.PARSE_CHUNKED_SIZE + else: + self._chunk_tail = chunk + return False, b"" + + # if stream does not contain trailer, after 0\r\n + # we should get another \r\n otherwise + # trailers needs to be skipped until \r\n\r\n + if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS: + head = chunk[: len(SEP)] + if head == SEP: + # end of stream + self.payload.feed_eof() + return True, chunk[len(SEP) :] + # Both CR and LF, or only LF may not be received yet. It is + # expected that CRLF or LF will be shown at the very first + # byte next time, otherwise trailers should come. The last + # CRLF which marks the end of response might not be + # contained in the same TCP segment which delivered the + # size indicator. + if not head: + return False, b"" + if head == SEP[:1]: + self._chunk_tail = head + return False, b"" + self._chunk = ChunkState.PARSE_TRAILERS + + # read and discard trailer up to the CRLF terminator + if self._chunk == ChunkState.PARSE_TRAILERS: + pos = chunk.find(SEP) + if pos >= 0: + chunk = chunk[pos + len(SEP) :] + self._chunk = ChunkState.PARSE_MAYBE_TRAILERS + else: + self._chunk_tail = chunk + return False, b"" + + # Read all bytes until eof + elif self._type == ParseState.PARSE_UNTIL_EOF: + self.payload.feed_data(chunk, len(chunk)) + + return False, b"" + + +class DeflateBuffer: + """DeflateStream decompress stream and feed data into specified stream.""" + + decompressor: Any + + def __init__(self, out: StreamReader, encoding: Optional[str]) -> None: + self.out = out + self.size = 0 + self.encoding = encoding + self._started_decoding = False + + self.decompressor: Union[BrotliDecompressor, ZLibDecompressor] + if encoding == "br": + if not HAS_BROTLI: # pragma: no cover + raise ContentEncodingError( + "Can not decode content-encoding: brotli (br). " + "Please install `Brotli`" + ) + self.decompressor = BrotliDecompressor() + else: + self.decompressor = ZLibDecompressor(encoding=encoding) + + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: + set_exception(self.out, exc, exc_cause) + + def feed_data(self, chunk: bytes, size: int) -> None: + if not size: + return + + self.size += size + + # RFC1950 + # bits 0..3 = CM = 0b1000 = 8 = "deflate" + # bits 4..7 = CINFO = 1..7 = windows size. + if ( + not self._started_decoding + and self.encoding == "deflate" + and chunk[0] & 0xF != 8 + ): + # Change the decoder to decompress incorrectly compressed data + # Actually we should issue a warning about non-RFC-compliant data. + self.decompressor = ZLibDecompressor( + encoding=self.encoding, suppress_deflate_header=True + ) + + try: + chunk = self.decompressor.decompress_sync(chunk) + except Exception: + raise ContentEncodingError( + "Can not decode content-encoding: %s" % self.encoding + ) + + self._started_decoding = True + + if chunk: + self.out.feed_data(chunk, len(chunk)) + + def feed_eof(self) -> None: + chunk = self.decompressor.flush() + + if chunk or self.size > 0: + self.out.feed_data(chunk, len(chunk)) + if self.encoding == "deflate" and not self.decompressor.eof: + raise ContentEncodingError("deflate") + + self.out.feed_eof() + + def begin_http_chunk_receiving(self) -> None: + self.out.begin_http_chunk_receiving() + + def end_http_chunk_receiving(self) -> None: + self.out.end_http_chunk_receiving() + + +HttpRequestParserPy = HttpRequestParser +HttpResponseParserPy = HttpResponseParser +RawRequestMessagePy = RawRequestMessage +RawResponseMessagePy = RawResponseMessage + +try: + if not NO_EXTENSIONS: + from ._http_parser import ( # type: ignore[import-not-found,no-redef] + HttpRequestParser, + HttpResponseParser, + RawRequestMessage, + RawResponseMessage, + ) + + HttpRequestParserC = HttpRequestParser + HttpResponseParserC = HttpResponseParser + RawRequestMessageC = RawRequestMessage + RawResponseMessageC = RawResponseMessage +except ImportError: # pragma: no cover + pass diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/http_websocket.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/http_websocket.py new file mode 100644 index 000000000..39f2e4a5c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/http_websocket.py @@ -0,0 +1,740 @@ +"""WebSocket protocol versions 13 and 8.""" + +import asyncio +import functools +import json +import random +import re +import sys +import zlib +from enum import IntEnum +from struct import Struct +from typing import ( + Any, + Callable, + Final, + List, + NamedTuple, + Optional, + Pattern, + Set, + Tuple, + Union, + cast, +) + +from .base_protocol import BaseProtocol +from .compression_utils import ZLibCompressor, ZLibDecompressor +from .helpers import NO_EXTENSIONS, set_exception +from .streams import DataQueue + +__all__ = ( + "WS_CLOSED_MESSAGE", + "WS_CLOSING_MESSAGE", + "WS_KEY", + "WebSocketReader", + "WebSocketWriter", + "WSMessage", + "WebSocketError", + "WSMsgType", + "WSCloseCode", +) + + +class WSCloseCode(IntEnum): + OK = 1000 + GOING_AWAY = 1001 + PROTOCOL_ERROR = 1002 + UNSUPPORTED_DATA = 1003 + ABNORMAL_CLOSURE = 1006 + INVALID_TEXT = 1007 + POLICY_VIOLATION = 1008 + MESSAGE_TOO_BIG = 1009 + MANDATORY_EXTENSION = 1010 + INTERNAL_ERROR = 1011 + SERVICE_RESTART = 1012 + TRY_AGAIN_LATER = 1013 + BAD_GATEWAY = 1014 + + +ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode} + +# For websockets, keeping latency low is extremely important as implementations +# generally expect to be able to send and receive messages quickly. We use a +# larger chunk size than the default to reduce the number of executor calls +# since the executor is a significant source of latency and overhead when +# the chunks are small. A size of 5KiB was chosen because it is also the +# same value python-zlib-ng choose to use as the threshold to release the GIL. + +WEBSOCKET_MAX_SYNC_CHUNK_SIZE = 5 * 1024 + + +class WSMsgType(IntEnum): + # websocket spec types + CONTINUATION = 0x0 + TEXT = 0x1 + BINARY = 0x2 + PING = 0x9 + PONG = 0xA + CLOSE = 0x8 + + # aiohttp specific types + CLOSING = 0x100 + CLOSED = 0x101 + ERROR = 0x102 + + text = TEXT + binary = BINARY + ping = PING + pong = PONG + close = CLOSE + closing = CLOSING + closed = CLOSED + error = ERROR + + +WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11" + + +UNPACK_LEN2 = Struct("!H").unpack_from +UNPACK_LEN3 = Struct("!Q").unpack_from +UNPACK_CLOSE_CODE = Struct("!H").unpack +PACK_LEN1 = Struct("!BB").pack +PACK_LEN2 = Struct("!BBH").pack +PACK_LEN3 = Struct("!BBQ").pack +PACK_CLOSE_CODE = Struct("!H").pack +MSG_SIZE: Final[int] = 2**14 +DEFAULT_LIMIT: Final[int] = 2**16 + + +class WSMessage(NamedTuple): + type: WSMsgType + # To type correctly, this would need some kind of tagged union for each type. + data: Any + extra: Optional[str] + + def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any: + """Return parsed JSON data. + + .. versionadded:: 0.22 + """ + return loads(self.data) + + +WS_CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None) +WS_CLOSING_MESSAGE = WSMessage(WSMsgType.CLOSING, None, None) + + +class WebSocketError(Exception): + """WebSocket protocol parser error.""" + + def __init__(self, code: int, message: str) -> None: + self.code = code + super().__init__(code, message) + + def __str__(self) -> str: + return cast(str, self.args[1]) + + +class WSHandshakeError(Exception): + """WebSocket protocol handshake error.""" + + +native_byteorder: Final[str] = sys.byteorder + + +# Used by _websocket_mask_python +@functools.lru_cache +def _xor_table() -> List[bytes]: + return [bytes(a ^ b for a in range(256)) for b in range(256)] + + +def _websocket_mask_python(mask: bytes, data: bytearray) -> None: + """Websocket masking function. + + `mask` is a `bytes` object of length 4; `data` is a `bytearray` + object of any length. The contents of `data` are masked with `mask`, + as specified in section 5.3 of RFC 6455. + + Note that this function mutates the `data` argument. + + This pure-python implementation may be replaced by an optimized + version when available. + + """ + assert isinstance(data, bytearray), data + assert len(mask) == 4, mask + + if data: + _XOR_TABLE = _xor_table() + a, b, c, d = (_XOR_TABLE[n] for n in mask) + data[::4] = data[::4].translate(a) + data[1::4] = data[1::4].translate(b) + data[2::4] = data[2::4].translate(c) + data[3::4] = data[3::4].translate(d) + + +if NO_EXTENSIONS: # pragma: no cover + _websocket_mask = _websocket_mask_python +else: + try: + from ._websocket import _websocket_mask_cython # type: ignore[import-not-found] + + _websocket_mask = _websocket_mask_cython + except ImportError: # pragma: no cover + _websocket_mask = _websocket_mask_python + +_WS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF]) + + +_WS_EXT_RE: Final[Pattern[str]] = re.compile( + r"^(?:;\s*(?:" + r"(server_no_context_takeover)|" + r"(client_no_context_takeover)|" + r"(server_max_window_bits(?:=(\d+))?)|" + r"(client_max_window_bits(?:=(\d+))?)))*$" +) + +_WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?") + + +def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]: + if not extstr: + return 0, False + + compress = 0 + notakeover = False + for ext in _WS_EXT_RE_SPLIT.finditer(extstr): + defext = ext.group(1) + # Return compress = 15 when get `permessage-deflate` + if not defext: + compress = 15 + break + match = _WS_EXT_RE.match(defext) + if match: + compress = 15 + if isserver: + # Server never fail to detect compress handshake. + # Server does not need to send max wbit to client + if match.group(4): + compress = int(match.group(4)) + # Group3 must match if group4 matches + # Compress wbit 8 does not support in zlib + # If compress level not support, + # CONTINUE to next extension + if compress > 15 or compress < 9: + compress = 0 + continue + if match.group(1): + notakeover = True + # Ignore regex group 5 & 6 for client_max_window_bits + break + else: + if match.group(6): + compress = int(match.group(6)) + # Group5 must match if group6 matches + # Compress wbit 8 does not support in zlib + # If compress level not support, + # FAIL the parse progress + if compress > 15 or compress < 9: + raise WSHandshakeError("Invalid window size") + if match.group(2): + notakeover = True + # Ignore regex group 5 & 6 for client_max_window_bits + break + # Return Fail if client side and not match + elif not isserver: + raise WSHandshakeError("Extension for deflate not supported" + ext.group(1)) + + return compress, notakeover + + +def ws_ext_gen( + compress: int = 15, isserver: bool = False, server_notakeover: bool = False +) -> str: + # client_notakeover=False not used for server + # compress wbit 8 does not support in zlib + if compress < 9 or compress > 15: + raise ValueError( + "Compress wbits must between 9 and 15, " "zlib does not support wbits=8" + ) + enabledext = ["permessage-deflate"] + if not isserver: + enabledext.append("client_max_window_bits") + + if compress < 15: + enabledext.append("server_max_window_bits=" + str(compress)) + if server_notakeover: + enabledext.append("server_no_context_takeover") + # if client_notakeover: + # enabledext.append('client_no_context_takeover') + return "; ".join(enabledext) + + +class WSParserState(IntEnum): + READ_HEADER = 1 + READ_PAYLOAD_LENGTH = 2 + READ_PAYLOAD_MASK = 3 + READ_PAYLOAD = 4 + + +class WebSocketReader: + def __init__( + self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True + ) -> None: + self.queue = queue + self._max_msg_size = max_msg_size + + self._exc: Optional[BaseException] = None + self._partial = bytearray() + self._state = WSParserState.READ_HEADER + + self._opcode: Optional[int] = None + self._frame_fin = False + self._frame_opcode: Optional[int] = None + self._frame_payload = bytearray() + + self._tail = b"" + self._has_mask = False + self._frame_mask: Optional[bytes] = None + self._payload_length = 0 + self._payload_length_flag = 0 + self._compressed: Optional[bool] = None + self._decompressobj: Optional[ZLibDecompressor] = None + self._compress = compress + + def feed_eof(self) -> None: + self.queue.feed_eof() + + def feed_data(self, data: bytes) -> Tuple[bool, bytes]: + if self._exc: + return True, data + + try: + return self._feed_data(data) + except Exception as exc: + self._exc = exc + set_exception(self.queue, exc) + return True, b"" + + def _feed_data(self, data: bytes) -> Tuple[bool, bytes]: + for fin, opcode, payload, compressed in self.parse_frame(data): + if compressed and not self._decompressobj: + self._decompressobj = ZLibDecompressor(suppress_deflate_header=True) + if opcode == WSMsgType.CLOSE: + if len(payload) >= 2: + close_code = UNPACK_CLOSE_CODE(payload[:2])[0] + if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + f"Invalid close code: {close_code}", + ) + try: + close_message = payload[2:].decode("utf-8") + except UnicodeDecodeError as exc: + raise WebSocketError( + WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" + ) from exc + msg = WSMessage(WSMsgType.CLOSE, close_code, close_message) + elif payload: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + f"Invalid close frame: {fin} {opcode} {payload!r}", + ) + else: + msg = WSMessage(WSMsgType.CLOSE, 0, "") + + self.queue.feed_data(msg, 0) + + elif opcode == WSMsgType.PING: + self.queue.feed_data( + WSMessage(WSMsgType.PING, payload, ""), len(payload) + ) + + elif opcode == WSMsgType.PONG: + self.queue.feed_data( + WSMessage(WSMsgType.PONG, payload, ""), len(payload) + ) + + elif ( + opcode not in (WSMsgType.TEXT, WSMsgType.BINARY) + and self._opcode is None + ): + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}" + ) + else: + # load text/binary + if not fin: + # got partial frame payload + if opcode != WSMsgType.CONTINUATION: + self._opcode = opcode + self._partial.extend(payload) + if self._max_msg_size and len(self._partial) >= self._max_msg_size: + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Message size {} exceeds limit {}".format( + len(self._partial), self._max_msg_size + ), + ) + else: + # previous frame was non finished + # we should get continuation opcode + if self._partial: + if opcode != WSMsgType.CONTINUATION: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "The opcode in non-fin frame is expected " + "to be zero, got {!r}".format(opcode), + ) + + if opcode == WSMsgType.CONTINUATION: + assert self._opcode is not None + opcode = self._opcode + self._opcode = None + + self._partial.extend(payload) + if self._max_msg_size and len(self._partial) >= self._max_msg_size: + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Message size {} exceeds limit {}".format( + len(self._partial), self._max_msg_size + ), + ) + + # Decompress process must to be done after all packets + # received. + if compressed: + assert self._decompressobj is not None + self._partial.extend(_WS_DEFLATE_TRAILING) + payload_merged = self._decompressobj.decompress_sync( + self._partial, self._max_msg_size + ) + if self._decompressobj.unconsumed_tail: + left = len(self._decompressobj.unconsumed_tail) + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Decompressed message size {} exceeds limit {}".format( + self._max_msg_size + left, self._max_msg_size + ), + ) + else: + payload_merged = bytes(self._partial) + + self._partial.clear() + + if opcode == WSMsgType.TEXT: + try: + text = payload_merged.decode("utf-8") + self.queue.feed_data( + WSMessage(WSMsgType.TEXT, text, ""), len(text) + ) + except UnicodeDecodeError as exc: + raise WebSocketError( + WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" + ) from exc + else: + self.queue.feed_data( + WSMessage(WSMsgType.BINARY, payload_merged, ""), + len(payload_merged), + ) + + return False, b"" + + def parse_frame( + self, buf: bytes + ) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]: + """Return the next frame from the socket.""" + frames = [] + if self._tail: + buf, self._tail = self._tail + buf, b"" + + start_pos = 0 + buf_length = len(buf) + + while True: + # read header + if self._state == WSParserState.READ_HEADER: + if buf_length - start_pos >= 2: + data = buf[start_pos : start_pos + 2] + start_pos += 2 + first_byte, second_byte = data + + fin = (first_byte >> 7) & 1 + rsv1 = (first_byte >> 6) & 1 + rsv2 = (first_byte >> 5) & 1 + rsv3 = (first_byte >> 4) & 1 + opcode = first_byte & 0xF + + # frame-fin = %x0 ; more frames of this message follow + # / %x1 ; final frame of this message + # frame-rsv1 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # frame-rsv2 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # frame-rsv3 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # + # Remove rsv1 from this test for deflate development + if rsv2 or rsv3 or (rsv1 and not self._compress): + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received frame with non-zero reserved bits", + ) + + if opcode > 0x7 and fin == 0: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received fragmented control frame", + ) + + has_mask = (second_byte >> 7) & 1 + length = second_byte & 0x7F + + # Control frames MUST have a payload + # length of 125 bytes or less + if opcode > 0x7 and length > 125: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Control frame payload cannot be " "larger than 125 bytes", + ) + + # Set compress status if last package is FIN + # OR set compress status if this is first fragment + # Raise error if not first fragment with rsv1 = 0x1 + if self._frame_fin or self._compressed is None: + self._compressed = True if rsv1 else False + elif rsv1: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received frame with non-zero reserved bits", + ) + + self._frame_fin = bool(fin) + self._frame_opcode = opcode + self._has_mask = bool(has_mask) + self._payload_length_flag = length + self._state = WSParserState.READ_PAYLOAD_LENGTH + else: + break + + # read payload length + if self._state == WSParserState.READ_PAYLOAD_LENGTH: + length = self._payload_length_flag + if length == 126: + if buf_length - start_pos >= 2: + data = buf[start_pos : start_pos + 2] + start_pos += 2 + length = UNPACK_LEN2(data)[0] + self._payload_length = length + self._state = ( + WSParserState.READ_PAYLOAD_MASK + if self._has_mask + else WSParserState.READ_PAYLOAD + ) + else: + break + elif length > 126: + if buf_length - start_pos >= 8: + data = buf[start_pos : start_pos + 8] + start_pos += 8 + length = UNPACK_LEN3(data)[0] + self._payload_length = length + self._state = ( + WSParserState.READ_PAYLOAD_MASK + if self._has_mask + else WSParserState.READ_PAYLOAD + ) + else: + break + else: + self._payload_length = length + self._state = ( + WSParserState.READ_PAYLOAD_MASK + if self._has_mask + else WSParserState.READ_PAYLOAD + ) + + # read payload mask + if self._state == WSParserState.READ_PAYLOAD_MASK: + if buf_length - start_pos >= 4: + self._frame_mask = buf[start_pos : start_pos + 4] + start_pos += 4 + self._state = WSParserState.READ_PAYLOAD + else: + break + + if self._state == WSParserState.READ_PAYLOAD: + length = self._payload_length + payload = self._frame_payload + + chunk_len = buf_length - start_pos + if length >= chunk_len: + self._payload_length = length - chunk_len + payload.extend(buf[start_pos:]) + start_pos = buf_length + else: + self._payload_length = 0 + payload.extend(buf[start_pos : start_pos + length]) + start_pos = start_pos + length + + if self._payload_length == 0: + if self._has_mask: + assert self._frame_mask is not None + _websocket_mask(self._frame_mask, payload) + + frames.append( + (self._frame_fin, self._frame_opcode, payload, self._compressed) + ) + + self._frame_payload = bytearray() + self._state = WSParserState.READ_HEADER + else: + break + + self._tail = buf[start_pos:] + + return frames + + +class WebSocketWriter: + def __init__( + self, + protocol: BaseProtocol, + transport: asyncio.Transport, + *, + use_mask: bool = False, + limit: int = DEFAULT_LIMIT, + random: random.Random = random.Random(), + compress: int = 0, + notakeover: bool = False, + ) -> None: + self.protocol = protocol + self.transport = transport + self.use_mask = use_mask + self.randrange = random.randrange + self.compress = compress + self.notakeover = notakeover + self._closing = False + self._limit = limit + self._output_size = 0 + self._compressobj: Any = None # actually compressobj + + async def _send_frame( + self, message: bytes, opcode: int, compress: Optional[int] = None + ) -> None: + """Send a frame over the websocket with message as its payload.""" + if self._closing and not (opcode & WSMsgType.CLOSE): + raise ConnectionResetError("Cannot write to closing transport") + + rsv = 0 + + # Only compress larger packets (disabled) + # Does small packet needs to be compressed? + # if self.compress and opcode < 8 and len(message) > 124: + if (compress or self.compress) and opcode < 8: + if compress: + # Do not set self._compress if compressing is for this frame + compressobj = self._make_compress_obj(compress) + else: # self.compress + if not self._compressobj: + self._compressobj = self._make_compress_obj(self.compress) + compressobj = self._compressobj + + message = await compressobj.compress(message) + # Its critical that we do not return control to the event + # loop until we have finished sending all the compressed + # data. Otherwise we could end up mixing compressed frames + # if there are multiple coroutines compressing data. + message += compressobj.flush( + zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH + ) + if message.endswith(_WS_DEFLATE_TRAILING): + message = message[:-4] + rsv = rsv | 0x40 + + msg_length = len(message) + + use_mask = self.use_mask + if use_mask: + mask_bit = 0x80 + else: + mask_bit = 0 + + if msg_length < 126: + header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit) + elif msg_length < (1 << 16): + header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length) + else: + header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length) + if use_mask: + mask_int = self.randrange(0, 0xFFFFFFFF) + mask = mask_int.to_bytes(4, "big") + message = bytearray(message) + _websocket_mask(mask, message) + self._write(header + mask + message) + self._output_size += len(header) + len(mask) + msg_length + else: + if msg_length > MSG_SIZE: + self._write(header) + self._write(message) + else: + self._write(header + message) + + self._output_size += len(header) + msg_length + + # It is safe to return control to the event loop when using compression + # after this point as we have already sent or buffered all the data. + + if self._output_size > self._limit: + self._output_size = 0 + await self.protocol._drain_helper() + + def _make_compress_obj(self, compress: int) -> ZLibCompressor: + return ZLibCompressor( + level=zlib.Z_BEST_SPEED, + wbits=-compress, + max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE, + ) + + def _write(self, data: bytes) -> None: + if self.transport is None or self.transport.is_closing(): + raise ConnectionResetError("Cannot write to closing transport") + self.transport.write(data) + + async def pong(self, message: Union[bytes, str] = b"") -> None: + """Send pong message.""" + if isinstance(message, str): + message = message.encode("utf-8") + await self._send_frame(message, WSMsgType.PONG) + + async def ping(self, message: Union[bytes, str] = b"") -> None: + """Send ping message.""" + if isinstance(message, str): + message = message.encode("utf-8") + await self._send_frame(message, WSMsgType.PING) + + async def send( + self, + message: Union[str, bytes], + binary: bool = False, + compress: Optional[int] = None, + ) -> None: + """Send a frame over the websocket with message as its payload.""" + if isinstance(message, str): + message = message.encode("utf-8") + if binary: + await self._send_frame(message, WSMsgType.BINARY, compress) + else: + await self._send_frame(message, WSMsgType.TEXT, compress) + + async def close(self, code: int = 1000, message: Union[bytes, str] = b"") -> None: + """Close the websocket, sending the specified code and message.""" + if isinstance(message, str): + message = message.encode("utf-8") + try: + await self._send_frame( + PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE + ) + finally: + self._closing = True diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/http_writer.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/http_writer.py new file mode 100644 index 000000000..d6b02e6f5 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/http_writer.py @@ -0,0 +1,198 @@ +"""Http related parsers and protocol.""" + +import asyncio +import zlib +from typing import Any, Awaitable, Callable, NamedTuple, Optional, Union # noqa + +from multidict import CIMultiDict + +from .abc import AbstractStreamWriter +from .base_protocol import BaseProtocol +from .compression_utils import ZLibCompressor +from .helpers import NO_EXTENSIONS + +__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11") + + +class HttpVersion(NamedTuple): + major: int + minor: int + + +HttpVersion10 = HttpVersion(1, 0) +HttpVersion11 = HttpVersion(1, 1) + + +_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]] +_T_OnHeadersSent = Optional[Callable[["CIMultiDict[str]"], Awaitable[None]]] + + +class StreamWriter(AbstractStreamWriter): + def __init__( + self, + protocol: BaseProtocol, + loop: asyncio.AbstractEventLoop, + on_chunk_sent: _T_OnChunkSent = None, + on_headers_sent: _T_OnHeadersSent = None, + ) -> None: + self._protocol = protocol + + self.loop = loop + self.length = None + self.chunked = False + self.buffer_size = 0 + self.output_size = 0 + + self._eof = False + self._compress: Optional[ZLibCompressor] = None + self._drain_waiter = None + + self._on_chunk_sent: _T_OnChunkSent = on_chunk_sent + self._on_headers_sent: _T_OnHeadersSent = on_headers_sent + + @property + def transport(self) -> Optional[asyncio.Transport]: + return self._protocol.transport + + @property + def protocol(self) -> BaseProtocol: + return self._protocol + + def enable_chunking(self) -> None: + self.chunked = True + + def enable_compression( + self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY + ) -> None: + self._compress = ZLibCompressor(encoding=encoding, strategy=strategy) + + def _write(self, chunk: bytes) -> None: + size = len(chunk) + self.buffer_size += size + self.output_size += size + transport = self.transport + if not self._protocol.connected or transport is None or transport.is_closing(): + raise ConnectionResetError("Cannot write to closing transport") + transport.write(chunk) + + async def write( + self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000 + ) -> None: + """Writes chunk of data to a stream. + + write_eof() indicates end of stream. + writer can't be used after write_eof() method being called. + write() return drain future. + """ + if self._on_chunk_sent is not None: + await self._on_chunk_sent(chunk) + + if isinstance(chunk, memoryview): + if chunk.nbytes != len(chunk): + # just reshape it + chunk = chunk.cast("c") + + if self._compress is not None: + chunk = await self._compress.compress(chunk) + if not chunk: + return + + if self.length is not None: + chunk_len = len(chunk) + if self.length >= chunk_len: + self.length = self.length - chunk_len + else: + chunk = chunk[: self.length] + self.length = 0 + if not chunk: + return + + if chunk: + if self.chunked: + chunk_len_pre = ("%x\r\n" % len(chunk)).encode("ascii") + chunk = chunk_len_pre + chunk + b"\r\n" + + self._write(chunk) + + if self.buffer_size > LIMIT and drain: + self.buffer_size = 0 + await self.drain() + + async def write_headers( + self, status_line: str, headers: "CIMultiDict[str]" + ) -> None: + """Write request/response status and headers.""" + if self._on_headers_sent is not None: + await self._on_headers_sent(headers) + + # status + headers + buf = _serialize_headers(status_line, headers) + self._write(buf) + + async def write_eof(self, chunk: bytes = b"") -> None: + if self._eof: + return + + if chunk and self._on_chunk_sent is not None: + await self._on_chunk_sent(chunk) + + if self._compress: + if chunk: + chunk = await self._compress.compress(chunk) + + chunk += self._compress.flush() + if chunk and self.chunked: + chunk_len = ("%x\r\n" % len(chunk)).encode("ascii") + chunk = chunk_len + chunk + b"\r\n0\r\n\r\n" + else: + if self.chunked: + if chunk: + chunk_len = ("%x\r\n" % len(chunk)).encode("ascii") + chunk = chunk_len + chunk + b"\r\n0\r\n\r\n" + else: + chunk = b"0\r\n\r\n" + + if chunk: + self._write(chunk) + + await self.drain() + + self._eof = True + + async def drain(self) -> None: + """Flush the write buffer. + + The intended use is to write + + await w.write(data) + await w.drain() + """ + if self._protocol.transport is not None: + await self._protocol._drain_helper() + + +def _safe_header(string: str) -> str: + if "\r" in string or "\n" in string: + raise ValueError( + "Newline or carriage return detected in headers. " + "Potential header injection attack." + ) + return string + + +def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes: + headers_gen = (_safe_header(k) + ": " + _safe_header(v) for k, v in headers.items()) + line = status_line + "\r\n" + "\r\n".join(headers_gen) + "\r\n\r\n" + return line.encode("utf-8") + + +_serialize_headers = _py_serialize_headers + +try: + import aiohttp._http_writer as _http_writer # type: ignore[import-not-found] + + _c_serialize_headers = _http_writer._serialize_headers + if not NO_EXTENSIONS: + _serialize_headers = _c_serialize_headers +except ImportError: + pass diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/locks.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/locks.py new file mode 100644 index 000000000..de2dc83d0 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/locks.py @@ -0,0 +1,41 @@ +import asyncio +import collections +from typing import Any, Deque, Optional + + +class EventResultOrError: + """Event asyncio lock helper class. + + Wraps the Event asyncio lock allowing either to awake the + locked Tasks without any error or raising an exception. + + thanks to @vorpalsmith for the simple design. + """ + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + self._loop = loop + self._exc: Optional[BaseException] = None + self._event = asyncio.Event() + self._waiters: Deque[asyncio.Future[Any]] = collections.deque() + + def set(self, exc: Optional[BaseException] = None) -> None: + self._exc = exc + self._event.set() + + async def wait(self) -> Any: + waiter = self._loop.create_task(self._event.wait()) + self._waiters.append(waiter) + try: + val = await waiter + finally: + self._waiters.remove(waiter) + + if self._exc is not None: + raise self._exc + + return val + + def cancel(self) -> None: + """Cancel all waiters""" + for waiter in self._waiters: + waiter.cancel() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/log.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/log.py new file mode 100644 index 000000000..3cecea2ba --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/log.py @@ -0,0 +1,8 @@ +import logging + +access_logger = logging.getLogger("aiohttp.access") +client_logger = logging.getLogger("aiohttp.client") +internal_logger = logging.getLogger("aiohttp.internal") +server_logger = logging.getLogger("aiohttp.server") +web_logger = logging.getLogger("aiohttp.web") +ws_logger = logging.getLogger("aiohttp.websocket") diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/multipart.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/multipart.py new file mode 100644 index 000000000..71fc2654a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/multipart.py @@ -0,0 +1,1015 @@ +import base64 +import binascii +import json +import re +import uuid +import warnings +import zlib +from collections import deque +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + AsyncIterator, + Deque, + Dict, + Iterator, + List, + Mapping, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +from urllib.parse import parse_qsl, unquote, urlencode + +from multidict import CIMultiDict, CIMultiDictProxy + +from .compression_utils import ZLibCompressor, ZLibDecompressor +from .hdrs import ( + CONTENT_DISPOSITION, + CONTENT_ENCODING, + CONTENT_LENGTH, + CONTENT_TRANSFER_ENCODING, + CONTENT_TYPE, +) +from .helpers import CHAR, TOKEN, parse_mimetype, reify +from .http import HeadersParser +from .payload import ( + JsonPayload, + LookupError, + Order, + Payload, + StringPayload, + get_payload, + payload_type, +) +from .streams import StreamReader + +__all__ = ( + "MultipartReader", + "MultipartWriter", + "BodyPartReader", + "BadContentDispositionHeader", + "BadContentDispositionParam", + "parse_content_disposition", + "content_disposition_filename", +) + + +if TYPE_CHECKING: + from .client_reqrep import ClientResponse + + +class BadContentDispositionHeader(RuntimeWarning): + pass + + +class BadContentDispositionParam(RuntimeWarning): + pass + + +def parse_content_disposition( + header: Optional[str], +) -> Tuple[Optional[str], Dict[str, str]]: + def is_token(string: str) -> bool: + return bool(string) and TOKEN >= set(string) + + def is_quoted(string: str) -> bool: + return string[0] == string[-1] == '"' + + def is_rfc5987(string: str) -> bool: + return is_token(string) and string.count("'") == 2 + + def is_extended_param(string: str) -> bool: + return string.endswith("*") + + def is_continuous_param(string: str) -> bool: + pos = string.find("*") + 1 + if not pos: + return False + substring = string[pos:-1] if string.endswith("*") else string[pos:] + return substring.isdigit() + + def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str: + return re.sub(f"\\\\([{chars}])", "\\1", text) + + if not header: + return None, {} + + disptype, *parts = header.split(";") + if not is_token(disptype): + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + params: Dict[str, str] = {} + while parts: + item = parts.pop(0) + + if "=" not in item: + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + key, value = item.split("=", 1) + key = key.lower().strip() + value = value.lstrip() + + if key in params: + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + if not is_token(key): + warnings.warn(BadContentDispositionParam(item)) + continue + + elif is_continuous_param(key): + if is_quoted(value): + value = unescape(value[1:-1]) + elif not is_token(value): + warnings.warn(BadContentDispositionParam(item)) + continue + + elif is_extended_param(key): + if is_rfc5987(value): + encoding, _, value = value.split("'", 2) + encoding = encoding or "utf-8" + else: + warnings.warn(BadContentDispositionParam(item)) + continue + + try: + value = unquote(value, encoding, "strict") + except UnicodeDecodeError: # pragma: nocover + warnings.warn(BadContentDispositionParam(item)) + continue + + else: + failed = True + if is_quoted(value): + failed = False + value = unescape(value[1:-1].lstrip("\\/")) + elif is_token(value): + failed = False + elif parts: + # maybe just ; in filename, in any case this is just + # one case fix, for proper fix we need to redesign parser + _value = f"{value};{parts[0]}" + if is_quoted(_value): + parts.pop(0) + value = unescape(_value[1:-1].lstrip("\\/")) + failed = False + + if failed: + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + params[key] = value + + return disptype.lower(), params + + +def content_disposition_filename( + params: Mapping[str, str], name: str = "filename" +) -> Optional[str]: + name_suf = "%s*" % name + if not params: + return None + elif name_suf in params: + return params[name_suf] + elif name in params: + return params[name] + else: + parts = [] + fnparams = sorted( + (key, value) for key, value in params.items() if key.startswith(name_suf) + ) + for num, (key, value) in enumerate(fnparams): + _, tail = key.split("*", 1) + if tail.endswith("*"): + tail = tail[:-1] + if tail == str(num): + parts.append(value) + else: + break + if not parts: + return None + value = "".join(parts) + if "'" in value: + encoding, _, value = value.split("'", 2) + encoding = encoding or "utf-8" + return unquote(value, encoding, "strict") + return value + + +class MultipartResponseWrapper: + """Wrapper around the MultipartReader. + + It takes care about + underlying connection and close it when it needs in. + """ + + def __init__( + self, + resp: "ClientResponse", + stream: "MultipartReader", + ) -> None: + self.resp = resp + self.stream = stream + + def __aiter__(self) -> "MultipartResponseWrapper": + return self + + async def __anext__( + self, + ) -> Union["MultipartReader", "BodyPartReader"]: + part = await self.next() + if part is None: + raise StopAsyncIteration + return part + + def at_eof(self) -> bool: + """Returns True when all response data had been read.""" + return self.resp.content.at_eof() + + async def next( + self, + ) -> Optional[Union["MultipartReader", "BodyPartReader"]]: + """Emits next multipart reader object.""" + item = await self.stream.next() + if self.stream.at_eof(): + await self.release() + return item + + async def release(self) -> None: + """Release the connection gracefully. + + All remaining content is read to the void. + """ + await self.resp.release() + + +class BodyPartReader: + """Multipart reader for single body part.""" + + chunk_size = 8192 + + def __init__( + self, + boundary: bytes, + headers: "CIMultiDictProxy[str]", + content: StreamReader, + *, + subtype: str = "mixed", + default_charset: Optional[str] = None, + ) -> None: + self.headers = headers + self._boundary = boundary + self._content = content + self._default_charset = default_charset + self._at_eof = False + self._is_form_data = subtype == "form-data" + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 + length = None if self._is_form_data else self.headers.get(CONTENT_LENGTH, None) + self._length = int(length) if length is not None else None + self._read_bytes = 0 + self._unread: Deque[bytes] = deque() + self._prev_chunk: Optional[bytes] = None + self._content_eof = 0 + self._cache: Dict[str, Any] = {} + + def __aiter__(self) -> AsyncIterator["BodyPartReader"]: + return self # type: ignore[return-value] + + async def __anext__(self) -> bytes: + part = await self.next() + if part is None: + raise StopAsyncIteration + return part + + async def next(self) -> Optional[bytes]: + item = await self.read() + if not item: + return None + return item + + async def read(self, *, decode: bool = False) -> bytes: + """Reads body part data. + + decode: Decodes data following by encoding + method from Content-Encoding header. If it missed + data remains untouched + """ + if self._at_eof: + return b"" + data = bytearray() + while not self._at_eof: + data.extend(await self.read_chunk(self.chunk_size)) + if decode: + return self.decode(data) + return data + + async def read_chunk(self, size: int = chunk_size) -> bytes: + """Reads body part content chunk of the specified size. + + size: chunk size + """ + if self._at_eof: + return b"" + if self._length: + chunk = await self._read_chunk_from_length(size) + else: + chunk = await self._read_chunk_from_stream(size) + + self._read_bytes += len(chunk) + if self._read_bytes == self._length: + self._at_eof = True + if self._at_eof: + clrf = await self._content.readline() + assert ( + b"\r\n" == clrf + ), "reader did not read all the data or it is malformed" + return chunk + + async def _read_chunk_from_length(self, size: int) -> bytes: + # Reads body part content chunk of the specified size. + # The body part must has Content-Length header with proper value. + assert self._length is not None, "Content-Length required for chunked read" + chunk_size = min(size, self._length - self._read_bytes) + chunk = await self._content.read(chunk_size) + if self._content.at_eof(): + self._at_eof = True + return chunk + + async def _read_chunk_from_stream(self, size: int) -> bytes: + # Reads content chunk of body part with unknown length. + # The Content-Length header for body part is not necessary. + assert ( + size >= len(self._boundary) + 2 + ), "Chunk size must be greater or equal than boundary length + 2" + first_chunk = self._prev_chunk is None + if first_chunk: + self._prev_chunk = await self._content.read(size) + + chunk = await self._content.read(size) + self._content_eof += int(self._content.at_eof()) + assert self._content_eof < 3, "Reading after EOF" + assert self._prev_chunk is not None + window = self._prev_chunk + chunk + sub = b"\r\n" + self._boundary + if first_chunk: + idx = window.find(sub) + else: + idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub))) + if idx >= 0: + # pushing boundary back to content + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=DeprecationWarning) + self._content.unread_data(window[idx:]) + if size > idx: + self._prev_chunk = self._prev_chunk[:idx] + chunk = window[len(self._prev_chunk) : idx] + if not chunk: + self._at_eof = True + result = self._prev_chunk + self._prev_chunk = chunk + return result + + async def readline(self) -> bytes: + """Reads body part by line by line.""" + if self._at_eof: + return b"" + + if self._unread: + line = self._unread.popleft() + else: + line = await self._content.readline() + + if line.startswith(self._boundary): + # the very last boundary may not come with \r\n, + # so set single rules for everyone + sline = line.rstrip(b"\r\n") + boundary = self._boundary + last_boundary = self._boundary + b"--" + # ensure that we read exactly the boundary, not something alike + if sline == boundary or sline == last_boundary: + self._at_eof = True + self._unread.append(line) + return b"" + else: + next_line = await self._content.readline() + if next_line.startswith(self._boundary): + line = line[:-2] # strip CRLF but only once + self._unread.append(next_line) + + return line + + async def release(self) -> None: + """Like read(), but reads all the data to the void.""" + if self._at_eof: + return + while not self._at_eof: + await self.read_chunk(self.chunk_size) + + async def text(self, *, encoding: Optional[str] = None) -> str: + """Like read(), but assumes that body part contains text data.""" + data = await self.read(decode=True) + # see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm + # and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send + encoding = encoding or self.get_charset(default="utf-8") + return data.decode(encoding) + + async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, Any]]: + """Like read(), but assumes that body parts contains JSON data.""" + data = await self.read(decode=True) + if not data: + return None + encoding = encoding or self.get_charset(default="utf-8") + return cast(Dict[str, Any], json.loads(data.decode(encoding))) + + async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]: + """Like read(), but assumes that body parts contain form urlencoded data.""" + data = await self.read(decode=True) + if not data: + return [] + if encoding is not None: + real_encoding = encoding + else: + real_encoding = self.get_charset(default="utf-8") + try: + decoded_data = data.rstrip().decode(real_encoding) + except UnicodeDecodeError: + raise ValueError("data cannot be decoded with %s encoding" % real_encoding) + + return parse_qsl( + decoded_data, + keep_blank_values=True, + encoding=real_encoding, + ) + + def at_eof(self) -> bool: + """Returns True if the boundary was reached or False otherwise.""" + return self._at_eof + + def decode(self, data: bytes) -> bytes: + """Decodes data. + + Decoding is done according the specified Content-Encoding + or Content-Transfer-Encoding headers value. + """ + if CONTENT_TRANSFER_ENCODING in self.headers: + data = self._decode_content_transfer(data) + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 + if not self._is_form_data and CONTENT_ENCODING in self.headers: + return self._decode_content(data) + return data + + def _decode_content(self, data: bytes) -> bytes: + encoding = self.headers.get(CONTENT_ENCODING, "").lower() + if encoding == "identity": + return data + if encoding in {"deflate", "gzip"}: + return ZLibDecompressor( + encoding=encoding, + suppress_deflate_header=True, + ).decompress_sync(data) + + raise RuntimeError(f"unknown content encoding: {encoding}") + + def _decode_content_transfer(self, data: bytes) -> bytes: + encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, "").lower() + + if encoding == "base64": + return base64.b64decode(data) + elif encoding == "quoted-printable": + return binascii.a2b_qp(data) + elif encoding in ("binary", "8bit", "7bit"): + return data + else: + raise RuntimeError( + "unknown content transfer encoding: {}" "".format(encoding) + ) + + def get_charset(self, default: str) -> str: + """Returns charset parameter from Content-Type header or default.""" + ctype = self.headers.get(CONTENT_TYPE, "") + mimetype = parse_mimetype(ctype) + return mimetype.parameters.get("charset", self._default_charset or default) + + @reify + def name(self) -> Optional[str]: + """Returns name specified in Content-Disposition header. + + If the header is missing or malformed, returns None. + """ + _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION)) + return content_disposition_filename(params, "name") + + @reify + def filename(self) -> Optional[str]: + """Returns filename specified in Content-Disposition header. + + Returns None if the header is missing or malformed. + """ + _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION)) + return content_disposition_filename(params, "filename") + + +@payload_type(BodyPartReader, order=Order.try_first) +class BodyPartReaderPayload(Payload): + def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None: + super().__init__(value, *args, **kwargs) + + params: Dict[str, str] = {} + if value.name is not None: + params["name"] = value.name + if value.filename is not None: + params["filename"] = value.filename + + if params: + self.set_content_disposition("attachment", True, **params) + + async def write(self, writer: Any) -> None: + field = self._value + chunk = await field.read_chunk(size=2**16) + while chunk: + await writer.write(field.decode(chunk)) + chunk = await field.read_chunk(size=2**16) + + +class MultipartReader: + """Multipart body reader.""" + + #: Response wrapper, used when multipart readers constructs from response. + response_wrapper_cls = MultipartResponseWrapper + #: Multipart reader class, used to handle multipart/* body parts. + #: None points to type(self) + multipart_reader_cls = None + #: Body part reader class for non multipart/* content types. + part_reader_cls = BodyPartReader + + def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None: + self._mimetype = parse_mimetype(headers[CONTENT_TYPE]) + assert self._mimetype.type == "multipart", "multipart/* content type expected" + if "boundary" not in self._mimetype.parameters: + raise ValueError( + "boundary missed for Content-Type: %s" % headers[CONTENT_TYPE] + ) + + self.headers = headers + self._boundary = ("--" + self._get_boundary()).encode() + self._content = content + self._default_charset: Optional[str] = None + self._last_part: Optional[Union["MultipartReader", BodyPartReader]] = None + self._at_eof = False + self._at_bof = True + self._unread: List[bytes] = [] + + def __aiter__( + self, + ) -> AsyncIterator["BodyPartReader"]: + return self # type: ignore[return-value] + + async def __anext__( + self, + ) -> Optional[Union["MultipartReader", BodyPartReader]]: + part = await self.next() + if part is None: + raise StopAsyncIteration + return part + + @classmethod + def from_response( + cls, + response: "ClientResponse", + ) -> MultipartResponseWrapper: + """Constructs reader instance from HTTP response. + + :param response: :class:`~aiohttp.client.ClientResponse` instance + """ + obj = cls.response_wrapper_cls( + response, cls(response.headers, response.content) + ) + return obj + + def at_eof(self) -> bool: + """Returns True if the final boundary was reached, false otherwise.""" + return self._at_eof + + async def next( + self, + ) -> Optional[Union["MultipartReader", BodyPartReader]]: + """Emits the next multipart body part.""" + # So, if we're at BOF, we need to skip till the boundary. + if self._at_eof: + return None + await self._maybe_release_last_part() + if self._at_bof: + await self._read_until_first_boundary() + self._at_bof = False + else: + await self._read_boundary() + if self._at_eof: # we just read the last boundary, nothing to do there + return None + + part = await self.fetch_next_part() + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.6 + if ( + self._last_part is None + and self._mimetype.subtype == "form-data" + and isinstance(part, BodyPartReader) + ): + _, params = parse_content_disposition(part.headers.get(CONTENT_DISPOSITION)) + if params.get("name") == "_charset_": + # Longest encoding in https://encoding.spec.whatwg.org/encodings.json + # is 19 characters, so 32 should be more than enough for any valid encoding. + charset = await part.read_chunk(32) + if len(charset) > 31: + raise RuntimeError("Invalid default charset") + self._default_charset = charset.strip().decode() + part = await self.fetch_next_part() + self._last_part = part + return self._last_part + + async def release(self) -> None: + """Reads all the body parts to the void till the final boundary.""" + while not self._at_eof: + item = await self.next() + if item is None: + break + await item.release() + + async def fetch_next_part( + self, + ) -> Union["MultipartReader", BodyPartReader]: + """Returns the next body part reader.""" + headers = await self._read_headers() + return self._get_part_reader(headers) + + def _get_part_reader( + self, + headers: "CIMultiDictProxy[str]", + ) -> Union["MultipartReader", BodyPartReader]: + """Dispatches the response by the `Content-Type` header. + + Returns a suitable reader instance. + + :param dict headers: Response headers + """ + ctype = headers.get(CONTENT_TYPE, "") + mimetype = parse_mimetype(ctype) + + if mimetype.type == "multipart": + if self.multipart_reader_cls is None: + return type(self)(headers, self._content) + return self.multipart_reader_cls(headers, self._content) + else: + return self.part_reader_cls( + self._boundary, + headers, + self._content, + subtype=self._mimetype.subtype, + default_charset=self._default_charset, + ) + + def _get_boundary(self) -> str: + boundary = self._mimetype.parameters["boundary"] + if len(boundary) > 70: + raise ValueError("boundary %r is too long (70 chars max)" % boundary) + + return boundary + + async def _readline(self) -> bytes: + if self._unread: + return self._unread.pop() + return await self._content.readline() + + async def _read_until_first_boundary(self) -> None: + while True: + chunk = await self._readline() + if chunk == b"": + raise ValueError( + "Could not find starting boundary %r" % (self._boundary) + ) + chunk = chunk.rstrip() + if chunk == self._boundary: + return + elif chunk == self._boundary + b"--": + self._at_eof = True + return + + async def _read_boundary(self) -> None: + chunk = (await self._readline()).rstrip() + if chunk == self._boundary: + pass + elif chunk == self._boundary + b"--": + self._at_eof = True + epilogue = await self._readline() + next_line = await self._readline() + + # the epilogue is expected and then either the end of input or the + # parent multipart boundary, if the parent boundary is found then + # it should be marked as unread and handed to the parent for + # processing + if next_line[:2] == b"--": + self._unread.append(next_line) + # otherwise the request is likely missing an epilogue and both + # lines should be passed to the parent for processing + # (this handles the old behavior gracefully) + else: + self._unread.extend([next_line, epilogue]) + else: + raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}") + + async def _read_headers(self) -> "CIMultiDictProxy[str]": + lines = [b""] + while True: + chunk = await self._content.readline() + chunk = chunk.strip() + lines.append(chunk) + if not chunk: + break + parser = HeadersParser() + headers, raw_headers = parser.parse_headers(lines) + return headers + + async def _maybe_release_last_part(self) -> None: + """Ensures that the last read body part is read completely.""" + if self._last_part is not None: + if not self._last_part.at_eof(): + await self._last_part.release() + self._unread.extend(self._last_part._unread) + self._last_part = None + + +_Part = Tuple[Payload, str, str] + + +class MultipartWriter(Payload): + """Multipart body writer.""" + + def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None: + boundary = boundary if boundary is not None else uuid.uuid4().hex + # The underlying Payload API demands a str (utf-8), not bytes, + # so we need to ensure we don't lose anything during conversion. + # As a result, require the boundary to be ASCII only. + # In both situations. + + try: + self._boundary = boundary.encode("ascii") + except UnicodeEncodeError: + raise ValueError("boundary should contain ASCII only chars") from None + ctype = f"multipart/{subtype}; boundary={self._boundary_value}" + + super().__init__(None, content_type=ctype) + + self._parts: List[_Part] = [] + self._is_form_data = subtype == "form-data" + + def __enter__(self) -> "MultipartWriter": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + pass + + def __iter__(self) -> Iterator[_Part]: + return iter(self._parts) + + def __len__(self) -> int: + return len(self._parts) + + def __bool__(self) -> bool: + return True + + _valid_tchar_regex = re.compile(rb"\A[!#$%&'*+\-.^_`|~\w]+\Z") + _invalid_qdtext_char_regex = re.compile(rb"[\x00-\x08\x0A-\x1F\x7F]") + + @property + def _boundary_value(self) -> str: + """Wrap boundary parameter value in quotes, if necessary. + + Reads self.boundary and returns a unicode string. + """ + # Refer to RFCs 7231, 7230, 5234. + # + # parameter = token "=" ( token / quoted-string ) + # token = 1*tchar + # quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE + # qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text + # obs-text = %x80-FF + # quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text ) + # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" + # / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" + # / DIGIT / ALPHA + # ; any VCHAR, except delimiters + # VCHAR = %x21-7E + value = self._boundary + if re.match(self._valid_tchar_regex, value): + return value.decode("ascii") # cannot fail + + if re.search(self._invalid_qdtext_char_regex, value): + raise ValueError("boundary value contains invalid characters") + + # escape %x5C and %x22 + quoted_value_content = value.replace(b"\\", b"\\\\") + quoted_value_content = quoted_value_content.replace(b'"', b'\\"') + + return '"' + quoted_value_content.decode("ascii") + '"' + + @property + def boundary(self) -> str: + return self._boundary.decode("ascii") + + def append(self, obj: Any, headers: Optional[Mapping[str, str]] = None) -> Payload: + if headers is None: + headers = CIMultiDict() + + if isinstance(obj, Payload): + obj.headers.update(headers) + return self.append_payload(obj) + else: + try: + payload = get_payload(obj, headers=headers) + except LookupError: + raise TypeError("Cannot create payload from %r" % obj) + else: + return self.append_payload(payload) + + def append_payload(self, payload: Payload) -> Payload: + """Adds a new body part to multipart writer.""" + encoding: Optional[str] = None + te_encoding: Optional[str] = None + if self._is_form_data: + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.7 + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 + assert ( + not {CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TRANSFER_ENCODING} + & payload.headers.keys() + ) + # Set default Content-Disposition in case user doesn't create one + if CONTENT_DISPOSITION not in payload.headers: + name = f"section-{len(self._parts)}" + payload.set_content_disposition("form-data", name=name) + else: + # compression + encoding = payload.headers.get(CONTENT_ENCODING, "").lower() + if encoding and encoding not in ("deflate", "gzip", "identity"): + raise RuntimeError(f"unknown content encoding: {encoding}") + if encoding == "identity": + encoding = None + + # te encoding + te_encoding = payload.headers.get(CONTENT_TRANSFER_ENCODING, "").lower() + if te_encoding not in ("", "base64", "quoted-printable", "binary"): + raise RuntimeError(f"unknown content transfer encoding: {te_encoding}") + if te_encoding == "binary": + te_encoding = None + + # size + size = payload.size + if size is not None and not (encoding or te_encoding): + payload.headers[CONTENT_LENGTH] = str(size) + + self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type] + return payload + + def append_json( + self, obj: Any, headers: Optional[Mapping[str, str]] = None + ) -> Payload: + """Helper to append JSON part.""" + if headers is None: + headers = CIMultiDict() + + return self.append_payload(JsonPayload(obj, headers=headers)) + + def append_form( + self, + obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]], + headers: Optional[Mapping[str, str]] = None, + ) -> Payload: + """Helper to append form urlencoded part.""" + assert isinstance(obj, (Sequence, Mapping)) + + if headers is None: + headers = CIMultiDict() + + if isinstance(obj, Mapping): + obj = list(obj.items()) + data = urlencode(obj, doseq=True) + + return self.append_payload( + StringPayload( + data, headers=headers, content_type="application/x-www-form-urlencoded" + ) + ) + + @property + def size(self) -> Optional[int]: + """Size of the payload.""" + total = 0 + for part, encoding, te_encoding in self._parts: + if encoding or te_encoding or part.size is None: + return None + + total += int( + 2 + + len(self._boundary) + + 2 + + part.size # b'--'+self._boundary+b'\r\n' + + len(part._binary_headers) + + 2 # b'\r\n' + ) + + total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n' + return total + + async def write(self, writer: Any, close_boundary: bool = True) -> None: + """Write body.""" + for part, encoding, te_encoding in self._parts: + if self._is_form_data: + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.2 + assert CONTENT_DISPOSITION in part.headers + assert "name=" in part.headers[CONTENT_DISPOSITION] + + await writer.write(b"--" + self._boundary + b"\r\n") + await writer.write(part._binary_headers) + + if encoding or te_encoding: + w = MultipartPayloadWriter(writer) + if encoding: + w.enable_compression(encoding) + if te_encoding: + w.enable_encoding(te_encoding) + await part.write(w) # type: ignore[arg-type] + await w.write_eof() + else: + await part.write(writer) + + await writer.write(b"\r\n") + + if close_boundary: + await writer.write(b"--" + self._boundary + b"--\r\n") + + +class MultipartPayloadWriter: + def __init__(self, writer: Any) -> None: + self._writer = writer + self._encoding: Optional[str] = None + self._compress: Optional[ZLibCompressor] = None + self._encoding_buffer: Optional[bytearray] = None + + def enable_encoding(self, encoding: str) -> None: + if encoding == "base64": + self._encoding = encoding + self._encoding_buffer = bytearray() + elif encoding == "quoted-printable": + self._encoding = "quoted-printable" + + def enable_compression( + self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY + ) -> None: + self._compress = ZLibCompressor( + encoding=encoding, + suppress_deflate_header=True, + strategy=strategy, + ) + + async def write_eof(self) -> None: + if self._compress is not None: + chunk = self._compress.flush() + if chunk: + self._compress = None + await self.write(chunk) + + if self._encoding == "base64": + if self._encoding_buffer: + await self._writer.write(base64.b64encode(self._encoding_buffer)) + + async def write(self, chunk: bytes) -> None: + if self._compress is not None: + if chunk: + chunk = await self._compress.compress(chunk) + if not chunk: + return + + if self._encoding == "base64": + buf = self._encoding_buffer + assert buf is not None + buf.extend(chunk) + + if buf: + div, mod = divmod(len(buf), 3) + enc_chunk, self._encoding_buffer = (buf[: div * 3], buf[div * 3 :]) + if enc_chunk: + b64chunk = base64.b64encode(enc_chunk) + await self._writer.write(b64chunk) + elif self._encoding == "quoted-printable": + await self._writer.write(binascii.b2a_qp(chunk)) + else: + await self._writer.write(chunk) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/payload.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/payload.py new file mode 100644 index 000000000..527139361 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/payload.py @@ -0,0 +1,464 @@ +import asyncio +import enum +import io +import json +import mimetypes +import os +import warnings +from abc import ABC, abstractmethod +from itertools import chain +from typing import ( + IO, + TYPE_CHECKING, + Any, + Dict, + Final, + Iterable, + Optional, + TextIO, + Tuple, + Type, + Union, +) + +from multidict import CIMultiDict + +from . import hdrs +from .abc import AbstractStreamWriter +from .helpers import ( + _SENTINEL, + content_disposition_header, + guess_filename, + parse_mimetype, + sentinel, +) +from .streams import StreamReader +from .typedefs import JSONEncoder, _CIMultiDict + +__all__ = ( + "PAYLOAD_REGISTRY", + "get_payload", + "payload_type", + "Payload", + "BytesPayload", + "StringPayload", + "IOBasePayload", + "BytesIOPayload", + "BufferedReaderPayload", + "TextIOPayload", + "StringIOPayload", + "JsonPayload", + "AsyncIterablePayload", +) + +TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB + +if TYPE_CHECKING: + from typing import List + + +class LookupError(Exception): + pass + + +class Order(str, enum.Enum): + normal = "normal" + try_first = "try_first" + try_last = "try_last" + + +def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload": + return PAYLOAD_REGISTRY.get(data, *args, **kwargs) + + +def register_payload( + factory: Type["Payload"], type: Any, *, order: Order = Order.normal +) -> None: + PAYLOAD_REGISTRY.register(factory, type, order=order) + + +class payload_type: + def __init__(self, type: Any, *, order: Order = Order.normal) -> None: + self.type = type + self.order = order + + def __call__(self, factory: Type["Payload"]) -> Type["Payload"]: + register_payload(factory, self.type, order=self.order) + return factory + + +PayloadType = Type["Payload"] +_PayloadRegistryItem = Tuple[PayloadType, Any] + + +class PayloadRegistry: + """Payload registry. + + note: we need zope.interface for more efficient adapter search + """ + + def __init__(self) -> None: + self._first: List[_PayloadRegistryItem] = [] + self._normal: List[_PayloadRegistryItem] = [] + self._last: List[_PayloadRegistryItem] = [] + + def get( + self, + data: Any, + *args: Any, + _CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain, + **kwargs: Any, + ) -> "Payload": + if isinstance(data, Payload): + return data + for factory, type in _CHAIN(self._first, self._normal, self._last): + if isinstance(data, type): + return factory(data, *args, **kwargs) + + raise LookupError() + + def register( + self, factory: PayloadType, type: Any, *, order: Order = Order.normal + ) -> None: + if order is Order.try_first: + self._first.append((factory, type)) + elif order is Order.normal: + self._normal.append((factory, type)) + elif order is Order.try_last: + self._last.append((factory, type)) + else: + raise ValueError(f"Unsupported order {order!r}") + + +class Payload(ABC): + + _default_content_type: str = "application/octet-stream" + _size: Optional[int] = None + + def __init__( + self, + value: Any, + headers: Optional[ + Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]] + ] = None, + content_type: Union[str, None, _SENTINEL] = sentinel, + filename: Optional[str] = None, + encoding: Optional[str] = None, + **kwargs: Any, + ) -> None: + self._encoding = encoding + self._filename = filename + self._headers: _CIMultiDict = CIMultiDict() + self._value = value + if content_type is not sentinel and content_type is not None: + self._headers[hdrs.CONTENT_TYPE] = content_type + elif self._filename is not None: + content_type = mimetypes.guess_type(self._filename)[0] + if content_type is None: + content_type = self._default_content_type + self._headers[hdrs.CONTENT_TYPE] = content_type + else: + self._headers[hdrs.CONTENT_TYPE] = self._default_content_type + self._headers.update(headers or {}) + + @property + def size(self) -> Optional[int]: + """Size of the payload.""" + return self._size + + @property + def filename(self) -> Optional[str]: + """Filename of the payload.""" + return self._filename + + @property + def headers(self) -> _CIMultiDict: + """Custom item headers""" + return self._headers + + @property + def _binary_headers(self) -> bytes: + return ( + "".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode( + "utf-8" + ) + + b"\r\n" + ) + + @property + def encoding(self) -> Optional[str]: + """Payload encoding""" + return self._encoding + + @property + def content_type(self) -> str: + """Content type""" + return self._headers[hdrs.CONTENT_TYPE] + + def set_content_disposition( + self, + disptype: str, + quote_fields: bool = True, + _charset: str = "utf-8", + **params: Any, + ) -> None: + """Sets ``Content-Disposition`` header.""" + self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header( + disptype, quote_fields=quote_fields, _charset=_charset, **params + ) + + @abstractmethod + async def write(self, writer: AbstractStreamWriter) -> None: + """Write payload. + + writer is an AbstractStreamWriter instance: + """ + + +class BytesPayload(Payload): + def __init__( + self, value: Union[bytes, bytearray, memoryview], *args: Any, **kwargs: Any + ) -> None: + if not isinstance(value, (bytes, bytearray, memoryview)): + raise TypeError(f"value argument must be byte-ish, not {type(value)!r}") + + if "content_type" not in kwargs: + kwargs["content_type"] = "application/octet-stream" + + super().__init__(value, *args, **kwargs) + + if isinstance(value, memoryview): + self._size = value.nbytes + else: + self._size = len(value) + + if self._size > TOO_LARGE_BYTES_BODY: + kwargs = {"source": self} + warnings.warn( + "Sending a large body directly with raw bytes might" + " lock the event loop. You should probably pass an " + "io.BytesIO object instead", + ResourceWarning, + **kwargs, + ) + + async def write(self, writer: AbstractStreamWriter) -> None: + await writer.write(self._value) + + +class StringPayload(BytesPayload): + def __init__( + self, + value: str, + *args: Any, + encoding: Optional[str] = None, + content_type: Optional[str] = None, + **kwargs: Any, + ) -> None: + + if encoding is None: + if content_type is None: + real_encoding = "utf-8" + content_type = "text/plain; charset=utf-8" + else: + mimetype = parse_mimetype(content_type) + real_encoding = mimetype.parameters.get("charset", "utf-8") + else: + if content_type is None: + content_type = "text/plain; charset=%s" % encoding + real_encoding = encoding + + super().__init__( + value.encode(real_encoding), + encoding=real_encoding, + content_type=content_type, + *args, + **kwargs, + ) + + +class StringIOPayload(StringPayload): + def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None: + super().__init__(value.read(), *args, **kwargs) + + +class IOBasePayload(Payload): + _value: IO[Any] + + def __init__( + self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any + ) -> None: + if "filename" not in kwargs: + kwargs["filename"] = guess_filename(value) + + super().__init__(value, *args, **kwargs) + + if self._filename is not None and disposition is not None: + if hdrs.CONTENT_DISPOSITION not in self.headers: + self.set_content_disposition(disposition, filename=self._filename) + + async def write(self, writer: AbstractStreamWriter) -> None: + loop = asyncio.get_event_loop() + try: + chunk = await loop.run_in_executor(None, self._value.read, 2**16) + while chunk: + await writer.write(chunk) + chunk = await loop.run_in_executor(None, self._value.read, 2**16) + finally: + await loop.run_in_executor(None, self._value.close) + + +class TextIOPayload(IOBasePayload): + _value: TextIO + + def __init__( + self, + value: TextIO, + *args: Any, + encoding: Optional[str] = None, + content_type: Optional[str] = None, + **kwargs: Any, + ) -> None: + + if encoding is None: + if content_type is None: + encoding = "utf-8" + content_type = "text/plain; charset=utf-8" + else: + mimetype = parse_mimetype(content_type) + encoding = mimetype.parameters.get("charset", "utf-8") + else: + if content_type is None: + content_type = "text/plain; charset=%s" % encoding + + super().__init__( + value, + content_type=content_type, + encoding=encoding, + *args, + **kwargs, + ) + + @property + def size(self) -> Optional[int]: + try: + return os.fstat(self._value.fileno()).st_size - self._value.tell() + except OSError: + return None + + async def write(self, writer: AbstractStreamWriter) -> None: + loop = asyncio.get_event_loop() + try: + chunk = await loop.run_in_executor(None, self._value.read, 2**16) + while chunk: + data = ( + chunk.encode(encoding=self._encoding) + if self._encoding + else chunk.encode() + ) + await writer.write(data) + chunk = await loop.run_in_executor(None, self._value.read, 2**16) + finally: + await loop.run_in_executor(None, self._value.close) + + +class BytesIOPayload(IOBasePayload): + @property + def size(self) -> int: + position = self._value.tell() + end = self._value.seek(0, os.SEEK_END) + self._value.seek(position) + return end - position + + +class BufferedReaderPayload(IOBasePayload): + @property + def size(self) -> Optional[int]: + try: + return os.fstat(self._value.fileno()).st_size - self._value.tell() + except OSError: + # data.fileno() is not supported, e.g. + # io.BufferedReader(io.BytesIO(b'data')) + return None + + +class JsonPayload(BytesPayload): + def __init__( + self, + value: Any, + encoding: str = "utf-8", + content_type: str = "application/json", + dumps: JSONEncoder = json.dumps, + *args: Any, + **kwargs: Any, + ) -> None: + + super().__init__( + dumps(value).encode(encoding), + content_type=content_type, + encoding=encoding, + *args, + **kwargs, + ) + + +if TYPE_CHECKING: + from typing import AsyncIterable, AsyncIterator + + _AsyncIterator = AsyncIterator[bytes] + _AsyncIterable = AsyncIterable[bytes] +else: + from collections.abc import AsyncIterable, AsyncIterator + + _AsyncIterator = AsyncIterator + _AsyncIterable = AsyncIterable + + +class AsyncIterablePayload(Payload): + + _iter: Optional[_AsyncIterator] = None + + def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None: + if not isinstance(value, AsyncIterable): + raise TypeError( + "value argument must support " + "collections.abc.AsyncIterable interface, " + "got {!r}".format(type(value)) + ) + + if "content_type" not in kwargs: + kwargs["content_type"] = "application/octet-stream" + + super().__init__(value, *args, **kwargs) + + self._iter = value.__aiter__() + + async def write(self, writer: AbstractStreamWriter) -> None: + if self._iter: + try: + # iter is not None check prevents rare cases + # when the case iterable is used twice + while True: + chunk = await self._iter.__anext__() + await writer.write(chunk) + except StopAsyncIteration: + self._iter = None + + +class StreamReaderPayload(AsyncIterablePayload): + def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None: + super().__init__(value.iter_any(), *args, **kwargs) + + +PAYLOAD_REGISTRY = PayloadRegistry() +PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview)) +PAYLOAD_REGISTRY.register(StringPayload, str) +PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO) +PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase) +PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO) +PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom)) +PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase) +PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader) +# try_last for giving a chance to more specialized async interables like +# multidict.BodyPartReaderPayload override the default +PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/payload_streamer.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/payload_streamer.py new file mode 100644 index 000000000..364f763ae --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/payload_streamer.py @@ -0,0 +1,75 @@ +""" +Payload implementation for coroutines as data provider. + +As a simple case, you can upload data from file:: + + @aiohttp.streamer + async def file_sender(writer, file_name=None): + with open(file_name, 'rb') as f: + chunk = f.read(2**16) + while chunk: + await writer.write(chunk) + + chunk = f.read(2**16) + +Then you can use `file_sender` like this: + + async with session.post('http://httpbin.org/post', + data=file_sender(file_name='huge_file')) as resp: + print(await resp.text()) + +..note:: Coroutine must accept `writer` as first argument + +""" + +import types +import warnings +from typing import Any, Awaitable, Callable, Dict, Tuple + +from .abc import AbstractStreamWriter +from .payload import Payload, payload_type + +__all__ = ("streamer",) + + +class _stream_wrapper: + def __init__( + self, + coro: Callable[..., Awaitable[None]], + args: Tuple[Any, ...], + kwargs: Dict[str, Any], + ) -> None: + self.coro = types.coroutine(coro) + self.args = args + self.kwargs = kwargs + + async def __call__(self, writer: AbstractStreamWriter) -> None: + await self.coro(writer, *self.args, **self.kwargs) + + +class streamer: + def __init__(self, coro: Callable[..., Awaitable[None]]) -> None: + warnings.warn( + "@streamer is deprecated, use async generators instead", + DeprecationWarning, + stacklevel=2, + ) + self.coro = coro + + def __call__(self, *args: Any, **kwargs: Any) -> _stream_wrapper: + return _stream_wrapper(self.coro, args, kwargs) + + +@payload_type(_stream_wrapper) +class StreamWrapperPayload(Payload): + async def write(self, writer: AbstractStreamWriter) -> None: + await self._value(writer) + + +@payload_type(streamer) +class StreamPayload(StreamWrapperPayload): + def __init__(self, value: Any, *args: Any, **kwargs: Any) -> None: + super().__init__(value(), *args, **kwargs) + + async def write(self, writer: AbstractStreamWriter) -> None: + await self._value(writer) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/py.typed b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/py.typed new file mode 100644 index 000000000..f5642f79f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/py.typed @@ -0,0 +1 @@ +Marker diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/pytest_plugin.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/pytest_plugin.py new file mode 100644 index 000000000..c862b4095 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/pytest_plugin.py @@ -0,0 +1,407 @@ +import asyncio +import contextlib +import inspect +import warnings +from typing import ( + Any, + Awaitable, + Callable, + Dict, + Iterator, + Optional, + Protocol, + Type, + Union, +) + +import pytest + +from aiohttp.web import Application + +from .test_utils import ( + BaseTestServer, + RawTestServer, + TestClient, + TestServer, + loop_context, + setup_test_loop, + teardown_test_loop, + unused_port as _unused_port, +) + +try: + import uvloop +except ImportError: # pragma: no cover + uvloop = None # type: ignore[assignment] + +AiohttpRawServer = Callable[[Application], Awaitable[RawTestServer]] + + +class AiohttpClient(Protocol): + def __call__( + self, + __param: Union[Application, BaseTestServer], + *, + server_kwargs: Optional[Dict[str, Any]] = None, + **kwargs: Any + ) -> Awaitable[TestClient]: ... + + +class AiohttpServer(Protocol): + def __call__( + self, app: Application, *, port: Optional[int] = None, **kwargs: Any + ) -> Awaitable[TestServer]: ... + + +def pytest_addoption(parser): # type: ignore[no-untyped-def] + parser.addoption( + "--aiohttp-fast", + action="store_true", + default=False, + help="run tests faster by disabling extra checks", + ) + parser.addoption( + "--aiohttp-loop", + action="store", + default="pyloop", + help="run tests with specific loop: pyloop, uvloop or all", + ) + parser.addoption( + "--aiohttp-enable-loop-debug", + action="store_true", + default=False, + help="enable event loop debug mode", + ) + + +def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def] + """Set up pytest fixture. + + Allow fixtures to be coroutines. Run coroutine fixtures in an event loop. + """ + func = fixturedef.func + + if inspect.isasyncgenfunction(func): + # async generator fixture + is_async_gen = True + elif asyncio.iscoroutinefunction(func): + # regular async fixture + is_async_gen = False + else: + # not an async fixture, nothing to do + return + + strip_request = False + if "request" not in fixturedef.argnames: + fixturedef.argnames += ("request",) + strip_request = True + + def wrapper(*args, **kwargs): # type: ignore[no-untyped-def] + request = kwargs["request"] + if strip_request: + del kwargs["request"] + + # if neither the fixture nor the test use the 'loop' fixture, + # 'getfixturevalue' will fail because the test is not parameterized + # (this can be removed someday if 'loop' is no longer parameterized) + if "loop" not in request.fixturenames: + raise Exception( + "Asynchronous fixtures must depend on the 'loop' fixture or " + "be used in tests depending from it." + ) + + _loop = request.getfixturevalue("loop") + + if is_async_gen: + # for async generators, we need to advance the generator once, + # then advance it again in a finalizer + gen = func(*args, **kwargs) + + def finalizer(): # type: ignore[no-untyped-def] + try: + return _loop.run_until_complete(gen.__anext__()) + except StopAsyncIteration: + pass + + request.addfinalizer(finalizer) + return _loop.run_until_complete(gen.__anext__()) + else: + return _loop.run_until_complete(func(*args, **kwargs)) + + fixturedef.func = wrapper + + +@pytest.fixture +def fast(request): # type: ignore[no-untyped-def] + """--fast config option""" + return request.config.getoption("--aiohttp-fast") + + +@pytest.fixture +def loop_debug(request): # type: ignore[no-untyped-def] + """--enable-loop-debug config option""" + return request.config.getoption("--aiohttp-enable-loop-debug") + + +@contextlib.contextmanager +def _runtime_warning_context(): # type: ignore[no-untyped-def] + """Context manager which checks for RuntimeWarnings. + + This exists specifically to + avoid "coroutine 'X' was never awaited" warnings being missed. + + If RuntimeWarnings occur in the context a RuntimeError is raised. + """ + with warnings.catch_warnings(record=True) as _warnings: + yield + rw = [ + "{w.filename}:{w.lineno}:{w.message}".format(w=w) + for w in _warnings + if w.category == RuntimeWarning + ] + if rw: + raise RuntimeError( + "{} Runtime Warning{},\n{}".format( + len(rw), "" if len(rw) == 1 else "s", "\n".join(rw) + ) + ) + + +@contextlib.contextmanager +def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def] + """Passthrough loop context. + + Sets up and tears down a loop unless one is passed in via the loop + argument when it's passed straight through. + """ + if loop: + # loop already exists, pass it straight through + yield loop + else: + # this shadows loop_context's standard behavior + loop = setup_test_loop() + yield loop + teardown_test_loop(loop, fast=fast) + + +def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def] + """Fix pytest collecting for coroutines.""" + if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj): + return list(collector._genfunctions(name, obj)) + + +def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def] + """Run coroutines in an event loop instead of a normal function call.""" + fast = pyfuncitem.config.getoption("--aiohttp-fast") + if asyncio.iscoroutinefunction(pyfuncitem.function): + existing_loop = pyfuncitem.funcargs.get( + "proactor_loop" + ) or pyfuncitem.funcargs.get("loop", None) + with _runtime_warning_context(): + with _passthrough_loop_context(existing_loop, fast=fast) as _loop: + testargs = { + arg: pyfuncitem.funcargs[arg] + for arg in pyfuncitem._fixtureinfo.argnames + } + _loop.run_until_complete(pyfuncitem.obj(**testargs)) + + return True + + +def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def] + if "loop_factory" not in metafunc.fixturenames: + return + + loops = metafunc.config.option.aiohttp_loop + avail_factories: Dict[str, Type[asyncio.AbstractEventLoopPolicy]] + avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy} + + if uvloop is not None: # pragma: no cover + avail_factories["uvloop"] = uvloop.EventLoopPolicy + + if loops == "all": + loops = "pyloop,uvloop?" + + factories = {} # type: ignore[var-annotated] + for name in loops.split(","): + required = not name.endswith("?") + name = name.strip(" ?") + if name not in avail_factories: # pragma: no cover + if required: + raise ValueError( + "Unknown loop '%s', available loops: %s" + % (name, list(factories.keys())) + ) + else: + continue + factories[name] = avail_factories[name] + metafunc.parametrize( + "loop_factory", list(factories.values()), ids=list(factories.keys()) + ) + + +@pytest.fixture +def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def] + """Return an instance of the event loop.""" + policy = loop_factory() + asyncio.set_event_loop_policy(policy) + with loop_context(fast=fast) as _loop: + if loop_debug: + _loop.set_debug(True) # pragma: no cover + asyncio.set_event_loop(_loop) + yield _loop + + +@pytest.fixture +def proactor_loop(): # type: ignore[no-untyped-def] + policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined] + asyncio.set_event_loop_policy(policy) + + with loop_context(policy.new_event_loop) as _loop: + asyncio.set_event_loop(_loop) + yield _loop + + +@pytest.fixture +def unused_port(aiohttp_unused_port: Callable[[], int]) -> Callable[[], int]: + warnings.warn( + "Deprecated, use aiohttp_unused_port fixture instead", + DeprecationWarning, + stacklevel=2, + ) + return aiohttp_unused_port + + +@pytest.fixture +def aiohttp_unused_port() -> Callable[[], int]: + """Return a port that is unused on the current host.""" + return _unused_port + + +@pytest.fixture +def aiohttp_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpServer]: + """Factory to create a TestServer instance, given an app. + + aiohttp_server(app, **kwargs) + """ + servers = [] + + async def go( + app: Application, *, port: Optional[int] = None, **kwargs: Any + ) -> TestServer: + server = TestServer(app, port=port) + await server.start_server(loop=loop, **kwargs) + servers.append(server) + return server + + yield go + + async def finalize() -> None: + while servers: + await servers.pop().close() + + loop.run_until_complete(finalize()) + + +@pytest.fixture +def test_server(aiohttp_server): # type: ignore[no-untyped-def] # pragma: no cover + warnings.warn( + "Deprecated, use aiohttp_server fixture instead", + DeprecationWarning, + stacklevel=2, + ) + return aiohttp_server + + +@pytest.fixture +def aiohttp_raw_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpRawServer]: + """Factory to create a RawTestServer instance, given a web handler. + + aiohttp_raw_server(handler, **kwargs) + """ + servers = [] + + async def go(handler, *, port=None, **kwargs): # type: ignore[no-untyped-def] + server = RawTestServer(handler, port=port) + await server.start_server(loop=loop, **kwargs) + servers.append(server) + return server + + yield go + + async def finalize() -> None: + while servers: + await servers.pop().close() + + loop.run_until_complete(finalize()) + + +@pytest.fixture +def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover + aiohttp_raw_server, +): + warnings.warn( + "Deprecated, use aiohttp_raw_server fixture instead", + DeprecationWarning, + stacklevel=2, + ) + return aiohttp_raw_server + + +@pytest.fixture +def aiohttp_client( + loop: asyncio.AbstractEventLoop, +) -> Iterator[AiohttpClient]: + """Factory to create a TestClient instance. + + aiohttp_client(app, **kwargs) + aiohttp_client(server, **kwargs) + aiohttp_client(raw_server, **kwargs) + """ + clients = [] + + async def go( + __param: Union[Application, BaseTestServer], + *args: Any, + server_kwargs: Optional[Dict[str, Any]] = None, + **kwargs: Any + ) -> TestClient: + + if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type] + __param, (Application, BaseTestServer) + ): + __param = __param(loop, *args, **kwargs) + kwargs = {} + else: + assert not args, "args should be empty" + + if isinstance(__param, Application): + server_kwargs = server_kwargs or {} + server = TestServer(__param, loop=loop, **server_kwargs) + client = TestClient(server, loop=loop, **kwargs) + elif isinstance(__param, BaseTestServer): + client = TestClient(__param, loop=loop, **kwargs) + else: + raise ValueError("Unknown argument type: %r" % type(__param)) + + await client.start_server() + clients.append(client) + return client + + yield go + + async def finalize() -> None: + while clients: + await clients.pop().close() + + loop.run_until_complete(finalize()) + + +@pytest.fixture +def test_client(aiohttp_client): # type: ignore[no-untyped-def] # pragma: no cover + warnings.warn( + "Deprecated, use aiohttp_client fixture instead", + DeprecationWarning, + stacklevel=2, + ) + return aiohttp_client diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/resolver.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/resolver.py new file mode 100644 index 000000000..10e36266a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/resolver.py @@ -0,0 +1,188 @@ +import asyncio +import socket +import sys +from typing import Any, Dict, List, Optional, Tuple, Type, Union + +from .abc import AbstractResolver, ResolveResult + +__all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver") + + +try: + import aiodns + + aiodns_default = hasattr(aiodns.DNSResolver, "getaddrinfo") +except ImportError: # pragma: no cover + aiodns = None # type: ignore[assignment] + aiodns_default = False + + +_NUMERIC_SOCKET_FLAGS = socket.AI_NUMERICHOST | socket.AI_NUMERICSERV +_SUPPORTS_SCOPE_ID = sys.version_info >= (3, 9, 0) + + +class ThreadedResolver(AbstractResolver): + """Threaded resolver. + + Uses an Executor for synchronous getaddrinfo() calls. + concurrent.futures.ThreadPoolExecutor is used by default. + """ + + def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: + self._loop = loop or asyncio.get_running_loop() + + async def resolve( + self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET + ) -> List[ResolveResult]: + infos = await self._loop.getaddrinfo( + host, + port, + type=socket.SOCK_STREAM, + family=family, + flags=socket.AI_ADDRCONFIG, + ) + + hosts: List[ResolveResult] = [] + for family, _, proto, _, address in infos: + if family == socket.AF_INET6: + if len(address) < 3: + # IPv6 is not supported by Python build, + # or IPv6 is not enabled in the host + continue + if address[3] and _SUPPORTS_SCOPE_ID: + # This is essential for link-local IPv6 addresses. + # LL IPv6 is a VERY rare case. Strictly speaking, we should use + # getnameinfo() unconditionally, but performance makes sense. + resolved_host, _port = await self._loop.getnameinfo( + address, _NUMERIC_SOCKET_FLAGS + ) + port = int(_port) + else: + resolved_host, port = address[:2] + else: # IPv4 + assert family == socket.AF_INET + resolved_host, port = address # type: ignore[misc] + hosts.append( + ResolveResult( + hostname=host, + host=resolved_host, + port=port, + family=family, + proto=proto, + flags=_NUMERIC_SOCKET_FLAGS, + ) + ) + + return hosts + + async def close(self) -> None: + pass + + +class AsyncResolver(AbstractResolver): + """Use the `aiodns` package to make asynchronous DNS lookups""" + + def __init__( + self, + loop: Optional[asyncio.AbstractEventLoop] = None, + *args: Any, + **kwargs: Any + ) -> None: + if aiodns is None: + raise RuntimeError("Resolver requires aiodns library") + + self._resolver = aiodns.DNSResolver(*args, **kwargs) + + if not hasattr(self._resolver, "gethostbyname"): + # aiodns 1.1 is not available, fallback to DNSResolver.query + self.resolve = self._resolve_with_query # type: ignore + + async def resolve( + self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET + ) -> List[ResolveResult]: + try: + resp = await self._resolver.getaddrinfo( + host, + port=port, + type=socket.SOCK_STREAM, + family=family, + flags=socket.AI_ADDRCONFIG, + ) + except aiodns.error.DNSError as exc: + msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" + raise OSError(msg) from exc + hosts: List[ResolveResult] = [] + for node in resp.nodes: + address: Union[Tuple[bytes, int], Tuple[bytes, int, int, int]] = node.addr + family = node.family + if family == socket.AF_INET6: + if len(address) > 3 and address[3] and _SUPPORTS_SCOPE_ID: + # This is essential for link-local IPv6 addresses. + # LL IPv6 is a VERY rare case. Strictly speaking, we should use + # getnameinfo() unconditionally, but performance makes sense. + result = await self._resolver.getnameinfo( + (address[0].decode("ascii"), *address[1:]), + _NUMERIC_SOCKET_FLAGS, + ) + resolved_host = result.node + else: + resolved_host = address[0].decode("ascii") + port = address[1] + else: # IPv4 + assert family == socket.AF_INET + resolved_host = address[0].decode("ascii") + port = address[1] + hosts.append( + ResolveResult( + hostname=host, + host=resolved_host, + port=port, + family=family, + proto=0, + flags=_NUMERIC_SOCKET_FLAGS, + ) + ) + + if not hosts: + raise OSError("DNS lookup failed") + + return hosts + + async def _resolve_with_query( + self, host: str, port: int = 0, family: int = socket.AF_INET + ) -> List[Dict[str, Any]]: + if family == socket.AF_INET6: + qtype = "AAAA" + else: + qtype = "A" + + try: + resp = await self._resolver.query(host, qtype) + except aiodns.error.DNSError as exc: + msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" + raise OSError(msg) from exc + + hosts = [] + for rr in resp: + hosts.append( + { + "hostname": host, + "host": rr.host, + "port": port, + "family": family, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + } + ) + + if not hosts: + raise OSError("DNS lookup failed") + + return hosts + + async def close(self) -> None: + self._resolver.cancel() + + +_DefaultType = Type[Union[AsyncResolver, ThreadedResolver]] +DefaultResolver: _DefaultType = AsyncResolver if aiodns_default else ThreadedResolver diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/streams.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/streams.py new file mode 100644 index 000000000..b9b9c3fd9 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/streams.py @@ -0,0 +1,684 @@ +import asyncio +import collections +import warnings +from typing import ( + Awaitable, + Callable, + Deque, + Final, + Generic, + List, + Optional, + Tuple, + TypeVar, +) + +from .base_protocol import BaseProtocol +from .helpers import ( + _EXC_SENTINEL, + BaseTimerContext, + TimerNoop, + set_exception, + set_result, +) +from .log import internal_logger + +__all__ = ( + "EMPTY_PAYLOAD", + "EofStream", + "StreamReader", + "DataQueue", + "FlowControlDataQueue", +) + +_T = TypeVar("_T") + + +class EofStream(Exception): + """eof stream indication.""" + + +class AsyncStreamIterator(Generic[_T]): + def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None: + self.read_func = read_func + + def __aiter__(self) -> "AsyncStreamIterator[_T]": + return self + + async def __anext__(self) -> _T: + try: + rv = await self.read_func() + except EofStream: + raise StopAsyncIteration + if rv == b"": + raise StopAsyncIteration + return rv + + +class ChunkTupleAsyncStreamIterator: + def __init__(self, stream: "StreamReader") -> None: + self._stream = stream + + def __aiter__(self) -> "ChunkTupleAsyncStreamIterator": + return self + + async def __anext__(self) -> Tuple[bytes, bool]: + rv = await self._stream.readchunk() + if rv == (b"", False): + raise StopAsyncIteration + return rv + + +class AsyncStreamReaderMixin: + def __aiter__(self) -> AsyncStreamIterator[bytes]: + return AsyncStreamIterator(self.readline) # type: ignore[attr-defined] + + def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]: + """Returns an asynchronous iterator that yields chunks of size n.""" + return AsyncStreamIterator(lambda: self.read(n)) # type: ignore[attr-defined] + + def iter_any(self) -> AsyncStreamIterator[bytes]: + """Yield all available data as soon as it is received.""" + return AsyncStreamIterator(self.readany) # type: ignore[attr-defined] + + def iter_chunks(self) -> ChunkTupleAsyncStreamIterator: + """Yield chunks of data as they are received by the server. + + The yielded objects are tuples + of (bytes, bool) as returned by the StreamReader.readchunk method. + """ + return ChunkTupleAsyncStreamIterator(self) # type: ignore[arg-type] + + +class StreamReader(AsyncStreamReaderMixin): + """An enhancement of asyncio.StreamReader. + + Supports asynchronous iteration by line, chunk or as available:: + + async for line in reader: + ... + async for chunk in reader.iter_chunked(1024): + ... + async for slice in reader.iter_any(): + ... + + """ + + total_bytes = 0 + + def __init__( + self, + protocol: BaseProtocol, + limit: int, + *, + timer: Optional[BaseTimerContext] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> None: + self._protocol = protocol + self._low_water = limit + self._high_water = limit * 2 + if loop is None: + loop = asyncio.get_event_loop() + self._loop = loop + self._size = 0 + self._cursor = 0 + self._http_chunk_splits: Optional[List[int]] = None + self._buffer: Deque[bytes] = collections.deque() + self._buffer_offset = 0 + self._eof = False + self._waiter: Optional[asyncio.Future[None]] = None + self._eof_waiter: Optional[asyncio.Future[None]] = None + self._exception: Optional[BaseException] = None + self._timer = TimerNoop() if timer is None else timer + self._eof_callbacks: List[Callable[[], None]] = [] + + def __repr__(self) -> str: + info = [self.__class__.__name__] + if self._size: + info.append("%d bytes" % self._size) + if self._eof: + info.append("eof") + if self._low_water != 2**16: # default limit + info.append("low=%d high=%d" % (self._low_water, self._high_water)) + if self._waiter: + info.append("w=%r" % self._waiter) + if self._exception: + info.append("e=%r" % self._exception) + return "<%s>" % " ".join(info) + + def get_read_buffer_limits(self) -> Tuple[int, int]: + return (self._low_water, self._high_water) + + def exception(self) -> Optional[BaseException]: + return self._exception + + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: + self._exception = exc + self._eof_callbacks.clear() + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_exception(waiter, exc, exc_cause) + + waiter = self._eof_waiter + if waiter is not None: + self._eof_waiter = None + set_exception(waiter, exc, exc_cause) + + def on_eof(self, callback: Callable[[], None]) -> None: + if self._eof: + try: + callback() + except Exception: + internal_logger.exception("Exception in eof callback") + else: + self._eof_callbacks.append(callback) + + def feed_eof(self) -> None: + self._eof = True + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, None) + + waiter = self._eof_waiter + if waiter is not None: + self._eof_waiter = None + set_result(waiter, None) + + for cb in self._eof_callbacks: + try: + cb() + except Exception: + internal_logger.exception("Exception in eof callback") + + self._eof_callbacks.clear() + + def is_eof(self) -> bool: + """Return True if 'feed_eof' was called.""" + return self._eof + + def at_eof(self) -> bool: + """Return True if the buffer is empty and 'feed_eof' was called.""" + return self._eof and not self._buffer + + async def wait_eof(self) -> None: + if self._eof: + return + + assert self._eof_waiter is None + self._eof_waiter = self._loop.create_future() + try: + await self._eof_waiter + finally: + self._eof_waiter = None + + def unread_data(self, data: bytes) -> None: + """rollback reading some data from stream, inserting it to buffer head.""" + warnings.warn( + "unread_data() is deprecated " + "and will be removed in future releases (#3260)", + DeprecationWarning, + stacklevel=2, + ) + if not data: + return + + if self._buffer_offset: + self._buffer[0] = self._buffer[0][self._buffer_offset :] + self._buffer_offset = 0 + self._size += len(data) + self._cursor -= len(data) + self._buffer.appendleft(data) + self._eof_counter = 0 + + # TODO: size is ignored, remove the param later + def feed_data(self, data: bytes, size: int = 0) -> None: + assert not self._eof, "feed_data after feed_eof" + + if not data: + return + + self._size += len(data) + self._buffer.append(data) + self.total_bytes += len(data) + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, None) + + if self._size > self._high_water and not self._protocol._reading_paused: + self._protocol.pause_reading() + + def begin_http_chunk_receiving(self) -> None: + if self._http_chunk_splits is None: + if self.total_bytes: + raise RuntimeError( + "Called begin_http_chunk_receiving when" "some data was already fed" + ) + self._http_chunk_splits = [] + + def end_http_chunk_receiving(self) -> None: + if self._http_chunk_splits is None: + raise RuntimeError( + "Called end_chunk_receiving without calling " + "begin_chunk_receiving first" + ) + + # self._http_chunk_splits contains logical byte offsets from start of + # the body transfer. Each offset is the offset of the end of a chunk. + # "Logical" means bytes, accessible for a user. + # If no chunks containing logical data were received, current position + # is difinitely zero. + pos = self._http_chunk_splits[-1] if self._http_chunk_splits else 0 + + if self.total_bytes == pos: + # We should not add empty chunks here. So we check for that. + # Note, when chunked + gzip is used, we can receive a chunk + # of compressed data, but that data may not be enough for gzip FSM + # to yield any uncompressed data. That's why current position may + # not change after receiving a chunk. + return + + self._http_chunk_splits.append(self.total_bytes) + + # wake up readchunk when end of http chunk received + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, None) + + async def _wait(self, func_name: str) -> None: + # StreamReader uses a future to link the protocol feed_data() method + # to a read coroutine. Running two read coroutines at the same time + # would have an unexpected behaviour. It would not possible to know + # which coroutine would get the next data. + if self._waiter is not None: + raise RuntimeError( + "%s() called while another coroutine is " + "already waiting for incoming data" % func_name + ) + + waiter = self._waiter = self._loop.create_future() + try: + with self._timer: + await waiter + finally: + self._waiter = None + + async def readline(self) -> bytes: + return await self.readuntil() + + async def readuntil(self, separator: bytes = b"\n") -> bytes: + seplen = len(separator) + if seplen == 0: + raise ValueError("Separator should be at least one-byte string") + + if self._exception is not None: + raise self._exception + + chunk = b"" + chunk_size = 0 + not_enough = True + + while not_enough: + while self._buffer and not_enough: + offset = self._buffer_offset + ichar = self._buffer[0].find(separator, offset) + 1 + # Read from current offset to found separator or to the end. + data = self._read_nowait_chunk( + ichar - offset + seplen - 1 if ichar else -1 + ) + chunk += data + chunk_size += len(data) + if ichar: + not_enough = False + + if chunk_size > self._high_water: + raise ValueError("Chunk too big") + + if self._eof: + break + + if not_enough: + await self._wait("readuntil") + + return chunk + + async def read(self, n: int = -1) -> bytes: + if self._exception is not None: + raise self._exception + + # migration problem; with DataQueue you have to catch + # EofStream exception, so common way is to run payload.read() inside + # infinite loop. what can cause real infinite loop with StreamReader + # lets keep this code one major release. + if __debug__: + if self._eof and not self._buffer: + self._eof_counter = getattr(self, "_eof_counter", 0) + 1 + if self._eof_counter > 5: + internal_logger.warning( + "Multiple access to StreamReader in eof state, " + "might be infinite loop.", + stack_info=True, + ) + + if not n: + return b"" + + if n < 0: + # This used to just loop creating a new waiter hoping to + # collect everything in self._buffer, but that would + # deadlock if the subprocess sends more than self.limit + # bytes. So just call self.readany() until EOF. + blocks = [] + while True: + block = await self.readany() + if not block: + break + blocks.append(block) + return b"".join(blocks) + + # TODO: should be `if` instead of `while` + # because waiter maybe triggered on chunk end, + # without feeding any data + while not self._buffer and not self._eof: + await self._wait("read") + + return self._read_nowait(n) + + async def readany(self) -> bytes: + if self._exception is not None: + raise self._exception + + # TODO: should be `if` instead of `while` + # because waiter maybe triggered on chunk end, + # without feeding any data + while not self._buffer and not self._eof: + await self._wait("readany") + + return self._read_nowait(-1) + + async def readchunk(self) -> Tuple[bytes, bool]: + """Returns a tuple of (data, end_of_http_chunk). + + When chunked transfer + encoding is used, end_of_http_chunk is a boolean indicating if the end + of the data corresponds to the end of a HTTP chunk , otherwise it is + always False. + """ + while True: + if self._exception is not None: + raise self._exception + + while self._http_chunk_splits: + pos = self._http_chunk_splits.pop(0) + if pos == self._cursor: + return (b"", True) + if pos > self._cursor: + return (self._read_nowait(pos - self._cursor), True) + internal_logger.warning( + "Skipping HTTP chunk end due to data " + "consumption beyond chunk boundary" + ) + + if self._buffer: + return (self._read_nowait_chunk(-1), False) + # return (self._read_nowait(-1), False) + + if self._eof: + # Special case for signifying EOF. + # (b'', True) is not a final return value actually. + return (b"", False) + + await self._wait("readchunk") + + async def readexactly(self, n: int) -> bytes: + if self._exception is not None: + raise self._exception + + blocks: List[bytes] = [] + while n > 0: + block = await self.read(n) + if not block: + partial = b"".join(blocks) + raise asyncio.IncompleteReadError(partial, len(partial) + n) + blocks.append(block) + n -= len(block) + + return b"".join(blocks) + + def read_nowait(self, n: int = -1) -> bytes: + # default was changed to be consistent with .read(-1) + # + # I believe the most users don't know about the method and + # they are not affected. + if self._exception is not None: + raise self._exception + + if self._waiter and not self._waiter.done(): + raise RuntimeError( + "Called while some coroutine is waiting for incoming data." + ) + + return self._read_nowait(n) + + def _read_nowait_chunk(self, n: int) -> bytes: + first_buffer = self._buffer[0] + offset = self._buffer_offset + if n != -1 and len(first_buffer) - offset > n: + data = first_buffer[offset : offset + n] + self._buffer_offset += n + + elif offset: + self._buffer.popleft() + data = first_buffer[offset:] + self._buffer_offset = 0 + + else: + data = self._buffer.popleft() + + self._size -= len(data) + self._cursor += len(data) + + chunk_splits = self._http_chunk_splits + # Prevent memory leak: drop useless chunk splits + while chunk_splits and chunk_splits[0] < self._cursor: + chunk_splits.pop(0) + + if self._size < self._low_water and self._protocol._reading_paused: + self._protocol.resume_reading() + return data + + def _read_nowait(self, n: int) -> bytes: + """Read not more than n bytes, or whole buffer if n == -1""" + self._timer.assert_timeout() + + chunks = [] + while self._buffer: + chunk = self._read_nowait_chunk(n) + chunks.append(chunk) + if n != -1: + n -= len(chunk) + if n == 0: + break + + return b"".join(chunks) if chunks else b"" + + +class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init] + def __init__(self) -> None: + self._read_eof_chunk = False + + def __repr__(self) -> str: + return "<%s>" % self.__class__.__name__ + + def exception(self) -> Optional[BaseException]: + return None + + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: + pass + + def on_eof(self, callback: Callable[[], None]) -> None: + try: + callback() + except Exception: + internal_logger.exception("Exception in eof callback") + + def feed_eof(self) -> None: + pass + + def is_eof(self) -> bool: + return True + + def at_eof(self) -> bool: + return True + + async def wait_eof(self) -> None: + return + + def feed_data(self, data: bytes, n: int = 0) -> None: + pass + + async def readline(self) -> bytes: + return b"" + + async def read(self, n: int = -1) -> bytes: + return b"" + + # TODO add async def readuntil + + async def readany(self) -> bytes: + return b"" + + async def readchunk(self) -> Tuple[bytes, bool]: + if not self._read_eof_chunk: + self._read_eof_chunk = True + return (b"", False) + + return (b"", True) + + async def readexactly(self, n: int) -> bytes: + raise asyncio.IncompleteReadError(b"", n) + + def read_nowait(self, n: int = -1) -> bytes: + return b"" + + +EMPTY_PAYLOAD: Final[StreamReader] = EmptyStreamReader() + + +class DataQueue(Generic[_T]): + """DataQueue is a general-purpose blocking queue with one reader.""" + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + self._loop = loop + self._eof = False + self._waiter: Optional[asyncio.Future[None]] = None + self._exception: Optional[BaseException] = None + self._size = 0 + self._buffer: Deque[Tuple[_T, int]] = collections.deque() + + def __len__(self) -> int: + return len(self._buffer) + + def is_eof(self) -> bool: + return self._eof + + def at_eof(self) -> bool: + return self._eof and not self._buffer + + def exception(self) -> Optional[BaseException]: + return self._exception + + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: + self._eof = True + self._exception = exc + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_exception(waiter, exc, exc_cause) + + def feed_data(self, data: _T, size: int = 0) -> None: + self._size += size + self._buffer.append((data, size)) + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, None) + + def feed_eof(self) -> None: + self._eof = True + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, None) + + async def read(self) -> _T: + if not self._buffer and not self._eof: + assert not self._waiter + self._waiter = self._loop.create_future() + try: + await self._waiter + except (asyncio.CancelledError, asyncio.TimeoutError): + self._waiter = None + raise + + if self._buffer: + data, size = self._buffer.popleft() + self._size -= size + return data + else: + if self._exception is not None: + raise self._exception + else: + raise EofStream + + def __aiter__(self) -> AsyncStreamIterator[_T]: + return AsyncStreamIterator(self.read) + + +class FlowControlDataQueue(DataQueue[_T]): + """FlowControlDataQueue resumes and pauses an underlying stream. + + It is a destination for parsed data. + """ + + def __init__( + self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop + ) -> None: + super().__init__(loop=loop) + + self._protocol = protocol + self._limit = limit * 2 + + def feed_data(self, data: _T, size: int = 0) -> None: + super().feed_data(data, size) + + if self._size > self._limit and not self._protocol._reading_paused: + self._protocol.pause_reading() + + async def read(self) -> _T: + try: + return await super().read() + finally: + if self._size < self._limit and self._protocol._reading_paused: + self._protocol.resume_reading() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/tcp_helpers.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/tcp_helpers.py new file mode 100644 index 000000000..88b244223 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/tcp_helpers.py @@ -0,0 +1,37 @@ +"""Helper methods to tune a TCP connection""" + +import asyncio +import socket +from contextlib import suppress +from typing import Optional # noqa + +__all__ = ("tcp_keepalive", "tcp_nodelay") + + +if hasattr(socket, "SO_KEEPALIVE"): + + def tcp_keepalive(transport: asyncio.Transport) -> None: + sock = transport.get_extra_info("socket") + if sock is not None: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) + +else: + + def tcp_keepalive(transport: asyncio.Transport) -> None: # pragma: no cover + pass + + +def tcp_nodelay(transport: asyncio.Transport, value: bool) -> None: + sock = transport.get_extra_info("socket") + + if sock is None: + return + + if sock.family not in (socket.AF_INET, socket.AF_INET6): + return + + value = bool(value) + + # socket may be closed already, on windows OSError get raised + with suppress(OSError): + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, value) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/test_utils.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/test_utils.py new file mode 100644 index 000000000..97c1469dd --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/test_utils.py @@ -0,0 +1,731 @@ +"""Utilities shared by tests.""" + +import asyncio +import contextlib +import gc +import inspect +import ipaddress +import os +import socket +import sys +import warnings +from abc import ABC, abstractmethod +from types import TracebackType +from typing import TYPE_CHECKING, Any, Callable, Iterator, List, Optional, Type, cast +from unittest import IsolatedAsyncioTestCase, mock + +from aiosignal import Signal +from multidict import CIMultiDict, CIMultiDictProxy +from yarl import URL + +import aiohttp +from aiohttp.client import ( + _RequestContextManager, + _RequestOptions, + _WSRequestContextManager, +) + +from . import ClientSession, hdrs +from .abc import AbstractCookieJar +from .client_reqrep import ClientResponse +from .client_ws import ClientWebSocketResponse +from .helpers import sentinel +from .http import HttpVersion, RawRequestMessage +from .typedefs import StrOrURL +from .web import ( + Application, + AppRunner, + BaseRunner, + Request, + Server, + ServerRunner, + SockSite, + UrlMappingMatchInfo, +) +from .web_protocol import _RequestHandler + +if TYPE_CHECKING: + from ssl import SSLContext +else: + SSLContext = None + +if sys.version_info >= (3, 11) and TYPE_CHECKING: + from typing import Unpack + +REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin" + + +def get_unused_port_socket( + host: str, family: socket.AddressFamily = socket.AF_INET +) -> socket.socket: + return get_port_socket(host, 0, family) + + +def get_port_socket( + host: str, port: int, family: socket.AddressFamily +) -> socket.socket: + s = socket.socket(family, socket.SOCK_STREAM) + if REUSE_ADDRESS: + # Windows has different semantics for SO_REUSEADDR, + # so don't set it. Ref: + # https://docs.microsoft.com/en-us/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse + s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + s.bind((host, port)) + return s + + +def unused_port() -> int: + """Return a port that is unused on the current host.""" + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + s.bind(("127.0.0.1", 0)) + return cast(int, s.getsockname()[1]) + + +class BaseTestServer(ABC): + __test__ = False + + def __init__( + self, + *, + scheme: str = "", + loop: Optional[asyncio.AbstractEventLoop] = None, + host: str = "127.0.0.1", + port: Optional[int] = None, + skip_url_asserts: bool = False, + socket_factory: Callable[ + [str, int, socket.AddressFamily], socket.socket + ] = get_port_socket, + **kwargs: Any, + ) -> None: + self._loop = loop + self.runner: Optional[BaseRunner] = None + self._root: Optional[URL] = None + self.host = host + self.port = port + self._closed = False + self.scheme = scheme + self.skip_url_asserts = skip_url_asserts + self.socket_factory = socket_factory + + async def start_server( + self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any + ) -> None: + if self.runner: + return + self._loop = loop + self._ssl = kwargs.pop("ssl", None) + self.runner = await self._make_runner(handler_cancellation=True, **kwargs) + await self.runner.setup() + if not self.port: + self.port = 0 + try: + version = ipaddress.ip_address(self.host).version + except ValueError: + version = 4 + family = socket.AF_INET6 if version == 6 else socket.AF_INET + _sock = self.socket_factory(self.host, self.port, family) + self.host, self.port = _sock.getsockname()[:2] + site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl) + await site.start() + server = site._server + assert server is not None + sockets = server.sockets # type: ignore[attr-defined] + assert sockets is not None + self.port = sockets[0].getsockname()[1] + if not self.scheme: + self.scheme = "https" if self._ssl else "http" + self._root = URL(f"{self.scheme}://{self.host}:{self.port}") + + @abstractmethod # pragma: no cover + async def _make_runner(self, **kwargs: Any) -> BaseRunner: + pass + + def make_url(self, path: StrOrURL) -> URL: + assert self._root is not None + url = URL(path) + if not self.skip_url_asserts: + assert not url.is_absolute() + return self._root.join(url) + else: + return URL(str(self._root) + str(path)) + + @property + def started(self) -> bool: + return self.runner is not None + + @property + def closed(self) -> bool: + return self._closed + + @property + def handler(self) -> Server: + # for backward compatibility + # web.Server instance + runner = self.runner + assert runner is not None + assert runner.server is not None + return runner.server + + async def close(self) -> None: + """Close all fixtures created by the test client. + + After that point, the TestClient is no longer usable. + + This is an idempotent function: running close multiple times + will not have any additional effects. + + close is also run when the object is garbage collected, and on + exit when used as a context manager. + + """ + if self.started and not self.closed: + assert self.runner is not None + await self.runner.cleanup() + self._root = None + self.port = None + self._closed = True + + def __enter__(self) -> None: + raise TypeError("Use async with instead") + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> None: + # __exit__ should exist in pair with __enter__ but never executed + pass # pragma: no cover + + async def __aenter__(self) -> "BaseTestServer": + await self.start_server(loop=self._loop) + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> None: + await self.close() + + +class TestServer(BaseTestServer): + def __init__( + self, + app: Application, + *, + scheme: str = "", + host: str = "127.0.0.1", + port: Optional[int] = None, + **kwargs: Any, + ): + self.app = app + super().__init__(scheme=scheme, host=host, port=port, **kwargs) + + async def _make_runner(self, **kwargs: Any) -> BaseRunner: + return AppRunner(self.app, **kwargs) + + +class RawTestServer(BaseTestServer): + def __init__( + self, + handler: _RequestHandler, + *, + scheme: str = "", + host: str = "127.0.0.1", + port: Optional[int] = None, + **kwargs: Any, + ) -> None: + self._handler = handler + super().__init__(scheme=scheme, host=host, port=port, **kwargs) + + async def _make_runner(self, debug: bool = True, **kwargs: Any) -> ServerRunner: + srv = Server(self._handler, loop=self._loop, debug=debug, **kwargs) + return ServerRunner(srv, debug=debug, **kwargs) + + +class TestClient: + """ + A test client implementation. + + To write functional tests for aiohttp based servers. + + """ + + __test__ = False + + def __init__( + self, + server: BaseTestServer, + *, + cookie_jar: Optional[AbstractCookieJar] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + **kwargs: Any, + ) -> None: + if not isinstance(server, BaseTestServer): + raise TypeError( + "server must be TestServer " "instance, found type: %r" % type(server) + ) + self._server = server + self._loop = loop + if cookie_jar is None: + cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop) + self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs) + self._closed = False + self._responses: List[ClientResponse] = [] + self._websockets: List[ClientWebSocketResponse] = [] + + async def start_server(self) -> None: + await self._server.start_server(loop=self._loop) + + @property + def host(self) -> str: + return self._server.host + + @property + def port(self) -> Optional[int]: + return self._server.port + + @property + def server(self) -> BaseTestServer: + return self._server + + @property + def app(self) -> Optional[Application]: + return cast(Optional[Application], getattr(self._server, "app", None)) + + @property + def session(self) -> ClientSession: + """An internal aiohttp.ClientSession. + + Unlike the methods on the TestClient, client session requests + do not automatically include the host in the url queried, and + will require an absolute path to the resource. + + """ + return self._session + + def make_url(self, path: StrOrURL) -> URL: + return self._server.make_url(path) + + async def _request( + self, method: str, path: StrOrURL, **kwargs: Any + ) -> ClientResponse: + resp = await self._session.request(method, self.make_url(path), **kwargs) + # save it to close later + self._responses.append(resp) + return resp + + if sys.version_info >= (3, 11) and TYPE_CHECKING: + + def request( + self, method: str, path: StrOrURL, **kwargs: Unpack[_RequestOptions] + ) -> _RequestContextManager: ... + + def get( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def options( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def head( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def post( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def put( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def patch( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def delete( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + else: + + def request( + self, method: str, path: StrOrURL, **kwargs: Any + ) -> _RequestContextManager: + """Routes a request to tested http server. + + The interface is identical to aiohttp.ClientSession.request, + except the loop kwarg is overridden by the instance used by the + test server. + + """ + return _RequestContextManager(self._request(method, path, **kwargs)) + + def get(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP GET request.""" + return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs)) + + def post(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP POST request.""" + return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs)) + + def options(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP OPTIONS request.""" + return _RequestContextManager( + self._request(hdrs.METH_OPTIONS, path, **kwargs) + ) + + def head(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP HEAD request.""" + return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs)) + + def put(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PUT request.""" + return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs)) + + def patch(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PATCH request.""" + return _RequestContextManager( + self._request(hdrs.METH_PATCH, path, **kwargs) + ) + + def delete(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PATCH request.""" + return _RequestContextManager( + self._request(hdrs.METH_DELETE, path, **kwargs) + ) + + def ws_connect(self, path: StrOrURL, **kwargs: Any) -> _WSRequestContextManager: + """Initiate websocket connection. + + The api corresponds to aiohttp.ClientSession.ws_connect. + + """ + return _WSRequestContextManager(self._ws_connect(path, **kwargs)) + + async def _ws_connect( + self, path: StrOrURL, **kwargs: Any + ) -> ClientWebSocketResponse: + ws = await self._session.ws_connect(self.make_url(path), **kwargs) + self._websockets.append(ws) + return ws + + async def close(self) -> None: + """Close all fixtures created by the test client. + + After that point, the TestClient is no longer usable. + + This is an idempotent function: running close multiple times + will not have any additional effects. + + close is also run on exit when used as a(n) (asynchronous) + context manager. + + """ + if not self._closed: + for resp in self._responses: + resp.close() + for ws in self._websockets: + await ws.close() + await self._session.close() + await self._server.close() + self._closed = True + + def __enter__(self) -> None: + raise TypeError("Use async with instead") + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + # __exit__ should exist in pair with __enter__ but never executed + pass # pragma: no cover + + async def __aenter__(self) -> "TestClient": + await self.start_server() + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + await self.close() + + +class AioHTTPTestCase(IsolatedAsyncioTestCase): + """A base class to allow for unittest web applications using aiohttp. + + Provides the following: + + * self.client (aiohttp.test_utils.TestClient): an aiohttp test client. + * self.loop (asyncio.BaseEventLoop): the event loop in which the + application and server are running. + * self.app (aiohttp.web.Application): the application returned by + self.get_application() + + Note that the TestClient's methods are asynchronous: you have to + execute function on the test client using asynchronous methods. + """ + + async def get_application(self) -> Application: + """Get application. + + This method should be overridden + to return the aiohttp.web.Application + object to test. + """ + return self.get_app() + + def get_app(self) -> Application: + """Obsolete method used to constructing web application. + + Use .get_application() coroutine instead. + """ + raise RuntimeError("Did you forget to define get_application()?") + + async def asyncSetUp(self) -> None: + self.loop = asyncio.get_running_loop() + return await self.setUpAsync() + + async def setUpAsync(self) -> None: + self.app = await self.get_application() + self.server = await self.get_server(self.app) + self.client = await self.get_client(self.server) + + await self.client.start_server() + + async def asyncTearDown(self) -> None: + return await self.tearDownAsync() + + async def tearDownAsync(self) -> None: + await self.client.close() + + async def get_server(self, app: Application) -> TestServer: + """Return a TestServer instance.""" + return TestServer(app, loop=self.loop) + + async def get_client(self, server: TestServer) -> TestClient: + """Return a TestClient instance.""" + return TestClient(server, loop=self.loop) + + +def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any: + """ + A decorator dedicated to use with asynchronous AioHTTPTestCase test methods. + + In 3.8+, this does nothing. + """ + warnings.warn( + "Decorator `@unittest_run_loop` is no longer needed in aiohttp 3.8+", + DeprecationWarning, + stacklevel=2, + ) + return func + + +_LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop] + + +@contextlib.contextmanager +def loop_context( + loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, fast: bool = False +) -> Iterator[asyncio.AbstractEventLoop]: + """A contextmanager that creates an event_loop, for test purposes. + + Handles the creation and cleanup of a test loop. + """ + loop = setup_test_loop(loop_factory) + yield loop + teardown_test_loop(loop, fast=fast) + + +def setup_test_loop( + loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, +) -> asyncio.AbstractEventLoop: + """Create and return an asyncio.BaseEventLoop instance. + + The caller should also call teardown_test_loop, + once they are done with the loop. + """ + loop = loop_factory() + asyncio.set_event_loop(loop) + return loop + + +def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None: + """Teardown and cleanup an event_loop created by setup_test_loop.""" + closed = loop.is_closed() + if not closed: + loop.call_soon(loop.stop) + loop.run_forever() + loop.close() + + if not fast: + gc.collect() + + asyncio.set_event_loop(None) + + +def _create_app_mock() -> mock.MagicMock: + def get_dict(app: Any, key: str) -> Any: + return app.__app_dict[key] + + def set_dict(app: Any, key: str, value: Any) -> None: + app.__app_dict[key] = value + + app = mock.MagicMock(spec=Application) + app.__app_dict = {} + app.__getitem__ = get_dict + app.__setitem__ = set_dict + + app._debug = False + app.on_response_prepare = Signal(app) + app.on_response_prepare.freeze() + return app + + +def _create_transport(sslcontext: Optional[SSLContext] = None) -> mock.Mock: + transport = mock.Mock() + + def get_extra_info(key: str) -> Optional[SSLContext]: + if key == "sslcontext": + return sslcontext + else: + return None + + transport.get_extra_info.side_effect = get_extra_info + return transport + + +def make_mocked_request( + method: str, + path: str, + headers: Any = None, + *, + match_info: Any = sentinel, + version: HttpVersion = HttpVersion(1, 1), + closing: bool = False, + app: Any = None, + writer: Any = sentinel, + protocol: Any = sentinel, + transport: Any = sentinel, + payload: Any = sentinel, + sslcontext: Optional[SSLContext] = None, + client_max_size: int = 1024**2, + loop: Any = ..., +) -> Request: + """Creates mocked web.Request testing purposes. + + Useful in unit tests, when spinning full web server is overkill or + specific conditions and errors are hard to trigger. + """ + task = mock.Mock() + if loop is ...: + # no loop passed, try to get the current one if + # its is running as we need a real loop to create + # executor jobs to be able to do testing + # with a real executor + try: + loop = asyncio.get_running_loop() + except RuntimeError: + loop = mock.Mock() + loop.create_future.return_value = () + + if version < HttpVersion(1, 1): + closing = True + + if headers: + headers = CIMultiDictProxy(CIMultiDict(headers)) + raw_hdrs = tuple( + (k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items() + ) + else: + headers = CIMultiDictProxy(CIMultiDict()) + raw_hdrs = () + + chunked = "chunked" in headers.get(hdrs.TRANSFER_ENCODING, "").lower() + + message = RawRequestMessage( + method, + path, + version, + headers, + raw_hdrs, + closing, + None, + False, + chunked, + URL(path), + ) + if app is None: + app = _create_app_mock() + + if transport is sentinel: + transport = _create_transport(sslcontext) + + if protocol is sentinel: + protocol = mock.Mock() + protocol.transport = transport + + if writer is sentinel: + writer = mock.Mock() + writer.write_headers = make_mocked_coro(None) + writer.write = make_mocked_coro(None) + writer.write_eof = make_mocked_coro(None) + writer.drain = make_mocked_coro(None) + writer.transport = transport + + protocol.transport = transport + protocol.writer = writer + + if payload is sentinel: + payload = mock.Mock() + + req = Request( + message, payload, protocol, writer, task, loop, client_max_size=client_max_size + ) + + match_info = UrlMappingMatchInfo( + {} if match_info is sentinel else match_info, mock.Mock() + ) + match_info.add_app(app) + req._match_info = match_info + + return req + + +def make_mocked_coro( + return_value: Any = sentinel, raise_exception: Any = sentinel +) -> Any: + """Creates a coroutine mock.""" + + async def mock_coro(*args: Any, **kwargs: Any) -> Any: + if raise_exception is not sentinel: + raise raise_exception + if not inspect.isawaitable(return_value): + return return_value + await return_value + + return mock.Mock(wraps=mock_coro) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/tracing.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/tracing.py new file mode 100644 index 000000000..012ed7bda --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/tracing.py @@ -0,0 +1,470 @@ +from types import SimpleNamespace +from typing import TYPE_CHECKING, Awaitable, Mapping, Optional, Protocol, Type, TypeVar + +import attr +from aiosignal import Signal +from multidict import CIMultiDict +from yarl import URL + +from .client_reqrep import ClientResponse + +if TYPE_CHECKING: + from .client import ClientSession + + _ParamT_contra = TypeVar("_ParamT_contra", contravariant=True) + + class _SignalCallback(Protocol[_ParamT_contra]): + def __call__( + self, + __client_session: ClientSession, + __trace_config_ctx: SimpleNamespace, + __params: _ParamT_contra, + ) -> Awaitable[None]: ... + + +__all__ = ( + "TraceConfig", + "TraceRequestStartParams", + "TraceRequestEndParams", + "TraceRequestExceptionParams", + "TraceConnectionQueuedStartParams", + "TraceConnectionQueuedEndParams", + "TraceConnectionCreateStartParams", + "TraceConnectionCreateEndParams", + "TraceConnectionReuseconnParams", + "TraceDnsResolveHostStartParams", + "TraceDnsResolveHostEndParams", + "TraceDnsCacheHitParams", + "TraceDnsCacheMissParams", + "TraceRequestRedirectParams", + "TraceRequestChunkSentParams", + "TraceResponseChunkReceivedParams", + "TraceRequestHeadersSentParams", +) + + +class TraceConfig: + """First-class used to trace requests launched via ClientSession objects.""" + + def __init__( + self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace + ) -> None: + self._on_request_start: Signal[_SignalCallback[TraceRequestStartParams]] = ( + Signal(self) + ) + self._on_request_chunk_sent: Signal[ + _SignalCallback[TraceRequestChunkSentParams] + ] = Signal(self) + self._on_response_chunk_received: Signal[ + _SignalCallback[TraceResponseChunkReceivedParams] + ] = Signal(self) + self._on_request_end: Signal[_SignalCallback[TraceRequestEndParams]] = Signal( + self + ) + self._on_request_exception: Signal[ + _SignalCallback[TraceRequestExceptionParams] + ] = Signal(self) + self._on_request_redirect: Signal[ + _SignalCallback[TraceRequestRedirectParams] + ] = Signal(self) + self._on_connection_queued_start: Signal[ + _SignalCallback[TraceConnectionQueuedStartParams] + ] = Signal(self) + self._on_connection_queued_end: Signal[ + _SignalCallback[TraceConnectionQueuedEndParams] + ] = Signal(self) + self._on_connection_create_start: Signal[ + _SignalCallback[TraceConnectionCreateStartParams] + ] = Signal(self) + self._on_connection_create_end: Signal[ + _SignalCallback[TraceConnectionCreateEndParams] + ] = Signal(self) + self._on_connection_reuseconn: Signal[ + _SignalCallback[TraceConnectionReuseconnParams] + ] = Signal(self) + self._on_dns_resolvehost_start: Signal[ + _SignalCallback[TraceDnsResolveHostStartParams] + ] = Signal(self) + self._on_dns_resolvehost_end: Signal[ + _SignalCallback[TraceDnsResolveHostEndParams] + ] = Signal(self) + self._on_dns_cache_hit: Signal[_SignalCallback[TraceDnsCacheHitParams]] = ( + Signal(self) + ) + self._on_dns_cache_miss: Signal[_SignalCallback[TraceDnsCacheMissParams]] = ( + Signal(self) + ) + self._on_request_headers_sent: Signal[ + _SignalCallback[TraceRequestHeadersSentParams] + ] = Signal(self) + + self._trace_config_ctx_factory = trace_config_ctx_factory + + def trace_config_ctx( + self, trace_request_ctx: Optional[Mapping[str, str]] = None + ) -> SimpleNamespace: + """Return a new trace_config_ctx instance""" + return self._trace_config_ctx_factory(trace_request_ctx=trace_request_ctx) + + def freeze(self) -> None: + self._on_request_start.freeze() + self._on_request_chunk_sent.freeze() + self._on_response_chunk_received.freeze() + self._on_request_end.freeze() + self._on_request_exception.freeze() + self._on_request_redirect.freeze() + self._on_connection_queued_start.freeze() + self._on_connection_queued_end.freeze() + self._on_connection_create_start.freeze() + self._on_connection_create_end.freeze() + self._on_connection_reuseconn.freeze() + self._on_dns_resolvehost_start.freeze() + self._on_dns_resolvehost_end.freeze() + self._on_dns_cache_hit.freeze() + self._on_dns_cache_miss.freeze() + self._on_request_headers_sent.freeze() + + @property + def on_request_start(self) -> "Signal[_SignalCallback[TraceRequestStartParams]]": + return self._on_request_start + + @property + def on_request_chunk_sent( + self, + ) -> "Signal[_SignalCallback[TraceRequestChunkSentParams]]": + return self._on_request_chunk_sent + + @property + def on_response_chunk_received( + self, + ) -> "Signal[_SignalCallback[TraceResponseChunkReceivedParams]]": + return self._on_response_chunk_received + + @property + def on_request_end(self) -> "Signal[_SignalCallback[TraceRequestEndParams]]": + return self._on_request_end + + @property + def on_request_exception( + self, + ) -> "Signal[_SignalCallback[TraceRequestExceptionParams]]": + return self._on_request_exception + + @property + def on_request_redirect( + self, + ) -> "Signal[_SignalCallback[TraceRequestRedirectParams]]": + return self._on_request_redirect + + @property + def on_connection_queued_start( + self, + ) -> "Signal[_SignalCallback[TraceConnectionQueuedStartParams]]": + return self._on_connection_queued_start + + @property + def on_connection_queued_end( + self, + ) -> "Signal[_SignalCallback[TraceConnectionQueuedEndParams]]": + return self._on_connection_queued_end + + @property + def on_connection_create_start( + self, + ) -> "Signal[_SignalCallback[TraceConnectionCreateStartParams]]": + return self._on_connection_create_start + + @property + def on_connection_create_end( + self, + ) -> "Signal[_SignalCallback[TraceConnectionCreateEndParams]]": + return self._on_connection_create_end + + @property + def on_connection_reuseconn( + self, + ) -> "Signal[_SignalCallback[TraceConnectionReuseconnParams]]": + return self._on_connection_reuseconn + + @property + def on_dns_resolvehost_start( + self, + ) -> "Signal[_SignalCallback[TraceDnsResolveHostStartParams]]": + return self._on_dns_resolvehost_start + + @property + def on_dns_resolvehost_end( + self, + ) -> "Signal[_SignalCallback[TraceDnsResolveHostEndParams]]": + return self._on_dns_resolvehost_end + + @property + def on_dns_cache_hit(self) -> "Signal[_SignalCallback[TraceDnsCacheHitParams]]": + return self._on_dns_cache_hit + + @property + def on_dns_cache_miss(self) -> "Signal[_SignalCallback[TraceDnsCacheMissParams]]": + return self._on_dns_cache_miss + + @property + def on_request_headers_sent( + self, + ) -> "Signal[_SignalCallback[TraceRequestHeadersSentParams]]": + return self._on_request_headers_sent + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceRequestStartParams: + """Parameters sent by the `on_request_start` signal""" + + method: str + url: URL + headers: "CIMultiDict[str]" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceRequestChunkSentParams: + """Parameters sent by the `on_request_chunk_sent` signal""" + + method: str + url: URL + chunk: bytes + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceResponseChunkReceivedParams: + """Parameters sent by the `on_response_chunk_received` signal""" + + method: str + url: URL + chunk: bytes + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceRequestEndParams: + """Parameters sent by the `on_request_end` signal""" + + method: str + url: URL + headers: "CIMultiDict[str]" + response: ClientResponse + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceRequestExceptionParams: + """Parameters sent by the `on_request_exception` signal""" + + method: str + url: URL + headers: "CIMultiDict[str]" + exception: BaseException + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceRequestRedirectParams: + """Parameters sent by the `on_request_redirect` signal""" + + method: str + url: URL + headers: "CIMultiDict[str]" + response: ClientResponse + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceConnectionQueuedStartParams: + """Parameters sent by the `on_connection_queued_start` signal""" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceConnectionQueuedEndParams: + """Parameters sent by the `on_connection_queued_end` signal""" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceConnectionCreateStartParams: + """Parameters sent by the `on_connection_create_start` signal""" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceConnectionCreateEndParams: + """Parameters sent by the `on_connection_create_end` signal""" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceConnectionReuseconnParams: + """Parameters sent by the `on_connection_reuseconn` signal""" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceDnsResolveHostStartParams: + """Parameters sent by the `on_dns_resolvehost_start` signal""" + + host: str + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceDnsResolveHostEndParams: + """Parameters sent by the `on_dns_resolvehost_end` signal""" + + host: str + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceDnsCacheHitParams: + """Parameters sent by the `on_dns_cache_hit` signal""" + + host: str + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceDnsCacheMissParams: + """Parameters sent by the `on_dns_cache_miss` signal""" + + host: str + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceRequestHeadersSentParams: + """Parameters sent by the `on_request_headers_sent` signal""" + + method: str + url: URL + headers: "CIMultiDict[str]" + + +class Trace: + """Internal dependency holder class. + + Used to keep together the main dependencies used + at the moment of send a signal. + """ + + def __init__( + self, + session: "ClientSession", + trace_config: TraceConfig, + trace_config_ctx: SimpleNamespace, + ) -> None: + self._trace_config = trace_config + self._trace_config_ctx = trace_config_ctx + self._session = session + + async def send_request_start( + self, method: str, url: URL, headers: "CIMultiDict[str]" + ) -> None: + return await self._trace_config.on_request_start.send( + self._session, + self._trace_config_ctx, + TraceRequestStartParams(method, url, headers), + ) + + async def send_request_chunk_sent( + self, method: str, url: URL, chunk: bytes + ) -> None: + return await self._trace_config.on_request_chunk_sent.send( + self._session, + self._trace_config_ctx, + TraceRequestChunkSentParams(method, url, chunk), + ) + + async def send_response_chunk_received( + self, method: str, url: URL, chunk: bytes + ) -> None: + return await self._trace_config.on_response_chunk_received.send( + self._session, + self._trace_config_ctx, + TraceResponseChunkReceivedParams(method, url, chunk), + ) + + async def send_request_end( + self, + method: str, + url: URL, + headers: "CIMultiDict[str]", + response: ClientResponse, + ) -> None: + return await self._trace_config.on_request_end.send( + self._session, + self._trace_config_ctx, + TraceRequestEndParams(method, url, headers, response), + ) + + async def send_request_exception( + self, + method: str, + url: URL, + headers: "CIMultiDict[str]", + exception: BaseException, + ) -> None: + return await self._trace_config.on_request_exception.send( + self._session, + self._trace_config_ctx, + TraceRequestExceptionParams(method, url, headers, exception), + ) + + async def send_request_redirect( + self, + method: str, + url: URL, + headers: "CIMultiDict[str]", + response: ClientResponse, + ) -> None: + return await self._trace_config._on_request_redirect.send( + self._session, + self._trace_config_ctx, + TraceRequestRedirectParams(method, url, headers, response), + ) + + async def send_connection_queued_start(self) -> None: + return await self._trace_config.on_connection_queued_start.send( + self._session, self._trace_config_ctx, TraceConnectionQueuedStartParams() + ) + + async def send_connection_queued_end(self) -> None: + return await self._trace_config.on_connection_queued_end.send( + self._session, self._trace_config_ctx, TraceConnectionQueuedEndParams() + ) + + async def send_connection_create_start(self) -> None: + return await self._trace_config.on_connection_create_start.send( + self._session, self._trace_config_ctx, TraceConnectionCreateStartParams() + ) + + async def send_connection_create_end(self) -> None: + return await self._trace_config.on_connection_create_end.send( + self._session, self._trace_config_ctx, TraceConnectionCreateEndParams() + ) + + async def send_connection_reuseconn(self) -> None: + return await self._trace_config.on_connection_reuseconn.send( + self._session, self._trace_config_ctx, TraceConnectionReuseconnParams() + ) + + async def send_dns_resolvehost_start(self, host: str) -> None: + return await self._trace_config.on_dns_resolvehost_start.send( + self._session, self._trace_config_ctx, TraceDnsResolveHostStartParams(host) + ) + + async def send_dns_resolvehost_end(self, host: str) -> None: + return await self._trace_config.on_dns_resolvehost_end.send( + self._session, self._trace_config_ctx, TraceDnsResolveHostEndParams(host) + ) + + async def send_dns_cache_hit(self, host: str) -> None: + return await self._trace_config.on_dns_cache_hit.send( + self._session, self._trace_config_ctx, TraceDnsCacheHitParams(host) + ) + + async def send_dns_cache_miss(self, host: str) -> None: + return await self._trace_config.on_dns_cache_miss.send( + self._session, self._trace_config_ctx, TraceDnsCacheMissParams(host) + ) + + async def send_request_headers( + self, method: str, url: URL, headers: "CIMultiDict[str]" + ) -> None: + return await self._trace_config._on_request_headers_sent.send( + self._session, + self._trace_config_ctx, + TraceRequestHeadersSentParams(method, url, headers), + ) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/typedefs.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/typedefs.py new file mode 100644 index 000000000..9fb21c15f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/typedefs.py @@ -0,0 +1,67 @@ +import json +import os +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Iterable, + Mapping, + Protocol, + Tuple, + Union, +) + +from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr +from yarl import URL + +DEFAULT_JSON_ENCODER = json.dumps +DEFAULT_JSON_DECODER = json.loads + +if TYPE_CHECKING: + _CIMultiDict = CIMultiDict[str] + _CIMultiDictProxy = CIMultiDictProxy[str] + _MultiDict = MultiDict[str] + _MultiDictProxy = MultiDictProxy[str] + from http.cookies import BaseCookie, Morsel + + from .web import Request, StreamResponse +else: + _CIMultiDict = CIMultiDict + _CIMultiDictProxy = CIMultiDictProxy + _MultiDict = MultiDict + _MultiDictProxy = MultiDictProxy + +Byteish = Union[bytes, bytearray, memoryview] +JSONEncoder = Callable[[Any], str] +JSONDecoder = Callable[[str], Any] +LooseHeaders = Union[ + Mapping[str, str], + Mapping[istr, str], + _CIMultiDict, + _CIMultiDictProxy, + Iterable[Tuple[Union[str, istr], str]], +] +RawHeaders = Tuple[Tuple[bytes, bytes], ...] +StrOrURL = Union[str, URL] + +LooseCookiesMappings = Mapping[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]] +LooseCookiesIterables = Iterable[ + Tuple[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]] +] +LooseCookies = Union[ + LooseCookiesMappings, + LooseCookiesIterables, + "BaseCookie[str]", +] + +Handler = Callable[["Request"], Awaitable["StreamResponse"]] + + +class Middleware(Protocol): + def __call__( + self, request: "Request", handler: Handler + ) -> Awaitable["StreamResponse"]: ... + + +PathLike = Union[str, "os.PathLike[str]"] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web.py new file mode 100644 index 000000000..8708f1fcb --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web.py @@ -0,0 +1,590 @@ +import asyncio +import logging +import os +import socket +import sys +import warnings +from argparse import ArgumentParser +from collections.abc import Iterable +from importlib import import_module +from typing import ( + Any, + Awaitable, + Callable, + Iterable as TypingIterable, + List, + Optional, + Set, + Type, + Union, + cast, +) + +from .abc import AbstractAccessLogger +from .helpers import AppKey as AppKey +from .log import access_logger +from .typedefs import PathLike +from .web_app import Application as Application, CleanupError as CleanupError +from .web_exceptions import ( + HTTPAccepted as HTTPAccepted, + HTTPBadGateway as HTTPBadGateway, + HTTPBadRequest as HTTPBadRequest, + HTTPClientError as HTTPClientError, + HTTPConflict as HTTPConflict, + HTTPCreated as HTTPCreated, + HTTPError as HTTPError, + HTTPException as HTTPException, + HTTPExpectationFailed as HTTPExpectationFailed, + HTTPFailedDependency as HTTPFailedDependency, + HTTPForbidden as HTTPForbidden, + HTTPFound as HTTPFound, + HTTPGatewayTimeout as HTTPGatewayTimeout, + HTTPGone as HTTPGone, + HTTPInsufficientStorage as HTTPInsufficientStorage, + HTTPInternalServerError as HTTPInternalServerError, + HTTPLengthRequired as HTTPLengthRequired, + HTTPMethodNotAllowed as HTTPMethodNotAllowed, + HTTPMisdirectedRequest as HTTPMisdirectedRequest, + HTTPMove as HTTPMove, + HTTPMovedPermanently as HTTPMovedPermanently, + HTTPMultipleChoices as HTTPMultipleChoices, + HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired, + HTTPNoContent as HTTPNoContent, + HTTPNonAuthoritativeInformation as HTTPNonAuthoritativeInformation, + HTTPNotAcceptable as HTTPNotAcceptable, + HTTPNotExtended as HTTPNotExtended, + HTTPNotFound as HTTPNotFound, + HTTPNotImplemented as HTTPNotImplemented, + HTTPNotModified as HTTPNotModified, + HTTPOk as HTTPOk, + HTTPPartialContent as HTTPPartialContent, + HTTPPaymentRequired as HTTPPaymentRequired, + HTTPPermanentRedirect as HTTPPermanentRedirect, + HTTPPreconditionFailed as HTTPPreconditionFailed, + HTTPPreconditionRequired as HTTPPreconditionRequired, + HTTPProxyAuthenticationRequired as HTTPProxyAuthenticationRequired, + HTTPRedirection as HTTPRedirection, + HTTPRequestEntityTooLarge as HTTPRequestEntityTooLarge, + HTTPRequestHeaderFieldsTooLarge as HTTPRequestHeaderFieldsTooLarge, + HTTPRequestRangeNotSatisfiable as HTTPRequestRangeNotSatisfiable, + HTTPRequestTimeout as HTTPRequestTimeout, + HTTPRequestURITooLong as HTTPRequestURITooLong, + HTTPResetContent as HTTPResetContent, + HTTPSeeOther as HTTPSeeOther, + HTTPServerError as HTTPServerError, + HTTPServiceUnavailable as HTTPServiceUnavailable, + HTTPSuccessful as HTTPSuccessful, + HTTPTemporaryRedirect as HTTPTemporaryRedirect, + HTTPTooManyRequests as HTTPTooManyRequests, + HTTPUnauthorized as HTTPUnauthorized, + HTTPUnavailableForLegalReasons as HTTPUnavailableForLegalReasons, + HTTPUnprocessableEntity as HTTPUnprocessableEntity, + HTTPUnsupportedMediaType as HTTPUnsupportedMediaType, + HTTPUpgradeRequired as HTTPUpgradeRequired, + HTTPUseProxy as HTTPUseProxy, + HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates, + HTTPVersionNotSupported as HTTPVersionNotSupported, + NotAppKeyWarning as NotAppKeyWarning, +) +from .web_fileresponse import FileResponse as FileResponse +from .web_log import AccessLogger +from .web_middlewares import ( + middleware as middleware, + normalize_path_middleware as normalize_path_middleware, +) +from .web_protocol import ( + PayloadAccessError as PayloadAccessError, + RequestHandler as RequestHandler, + RequestPayloadError as RequestPayloadError, +) +from .web_request import ( + BaseRequest as BaseRequest, + FileField as FileField, + Request as Request, +) +from .web_response import ( + ContentCoding as ContentCoding, + Response as Response, + StreamResponse as StreamResponse, + json_response as json_response, +) +from .web_routedef import ( + AbstractRouteDef as AbstractRouteDef, + RouteDef as RouteDef, + RouteTableDef as RouteTableDef, + StaticDef as StaticDef, + delete as delete, + get as get, + head as head, + options as options, + patch as patch, + post as post, + put as put, + route as route, + static as static, + view as view, +) +from .web_runner import ( + AppRunner as AppRunner, + BaseRunner as BaseRunner, + BaseSite as BaseSite, + GracefulExit as GracefulExit, + NamedPipeSite as NamedPipeSite, + ServerRunner as ServerRunner, + SockSite as SockSite, + TCPSite as TCPSite, + UnixSite as UnixSite, +) +from .web_server import Server as Server +from .web_urldispatcher import ( + AbstractResource as AbstractResource, + AbstractRoute as AbstractRoute, + DynamicResource as DynamicResource, + PlainResource as PlainResource, + PrefixedSubAppResource as PrefixedSubAppResource, + Resource as Resource, + ResourceRoute as ResourceRoute, + StaticResource as StaticResource, + UrlDispatcher as UrlDispatcher, + UrlMappingMatchInfo as UrlMappingMatchInfo, + View as View, +) +from .web_ws import ( + WebSocketReady as WebSocketReady, + WebSocketResponse as WebSocketResponse, + WSMsgType as WSMsgType, +) + +__all__ = ( + # web_app + "AppKey", + "Application", + "CleanupError", + # web_exceptions + "NotAppKeyWarning", + "HTTPAccepted", + "HTTPBadGateway", + "HTTPBadRequest", + "HTTPClientError", + "HTTPConflict", + "HTTPCreated", + "HTTPError", + "HTTPException", + "HTTPExpectationFailed", + "HTTPFailedDependency", + "HTTPForbidden", + "HTTPFound", + "HTTPGatewayTimeout", + "HTTPGone", + "HTTPInsufficientStorage", + "HTTPInternalServerError", + "HTTPLengthRequired", + "HTTPMethodNotAllowed", + "HTTPMisdirectedRequest", + "HTTPMove", + "HTTPMovedPermanently", + "HTTPMultipleChoices", + "HTTPNetworkAuthenticationRequired", + "HTTPNoContent", + "HTTPNonAuthoritativeInformation", + "HTTPNotAcceptable", + "HTTPNotExtended", + "HTTPNotFound", + "HTTPNotImplemented", + "HTTPNotModified", + "HTTPOk", + "HTTPPartialContent", + "HTTPPaymentRequired", + "HTTPPermanentRedirect", + "HTTPPreconditionFailed", + "HTTPPreconditionRequired", + "HTTPProxyAuthenticationRequired", + "HTTPRedirection", + "HTTPRequestEntityTooLarge", + "HTTPRequestHeaderFieldsTooLarge", + "HTTPRequestRangeNotSatisfiable", + "HTTPRequestTimeout", + "HTTPRequestURITooLong", + "HTTPResetContent", + "HTTPSeeOther", + "HTTPServerError", + "HTTPServiceUnavailable", + "HTTPSuccessful", + "HTTPTemporaryRedirect", + "HTTPTooManyRequests", + "HTTPUnauthorized", + "HTTPUnavailableForLegalReasons", + "HTTPUnprocessableEntity", + "HTTPUnsupportedMediaType", + "HTTPUpgradeRequired", + "HTTPUseProxy", + "HTTPVariantAlsoNegotiates", + "HTTPVersionNotSupported", + # web_fileresponse + "FileResponse", + # web_middlewares + "middleware", + "normalize_path_middleware", + # web_protocol + "PayloadAccessError", + "RequestHandler", + "RequestPayloadError", + # web_request + "BaseRequest", + "FileField", + "Request", + # web_response + "ContentCoding", + "Response", + "StreamResponse", + "json_response", + # web_routedef + "AbstractRouteDef", + "RouteDef", + "RouteTableDef", + "StaticDef", + "delete", + "get", + "head", + "options", + "patch", + "post", + "put", + "route", + "static", + "view", + # web_runner + "AppRunner", + "BaseRunner", + "BaseSite", + "GracefulExit", + "ServerRunner", + "SockSite", + "TCPSite", + "UnixSite", + "NamedPipeSite", + # web_server + "Server", + # web_urldispatcher + "AbstractResource", + "AbstractRoute", + "DynamicResource", + "PlainResource", + "PrefixedSubAppResource", + "Resource", + "ResourceRoute", + "StaticResource", + "UrlDispatcher", + "UrlMappingMatchInfo", + "View", + # web_ws + "WebSocketReady", + "WebSocketResponse", + "WSMsgType", + # web + "run_app", +) + + +try: + from ssl import SSLContext +except ImportError: # pragma: no cover + SSLContext = Any # type: ignore[misc,assignment] + +# Only display warning when using -Wdefault, -We, -X dev or similar. +warnings.filterwarnings("ignore", category=NotAppKeyWarning, append=True) + +HostSequence = TypingIterable[str] + + +async def _run_app( + app: Union[Application, Awaitable[Application]], + *, + host: Optional[Union[str, HostSequence]] = None, + port: Optional[int] = None, + path: Union[PathLike, TypingIterable[PathLike], None] = None, + sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None, + shutdown_timeout: float = 60.0, + keepalive_timeout: float = 75.0, + ssl_context: Optional[SSLContext] = None, + print: Optional[Callable[..., None]] = print, + backlog: int = 128, + access_log_class: Type[AbstractAccessLogger] = AccessLogger, + access_log_format: str = AccessLogger.LOG_FORMAT, + access_log: Optional[logging.Logger] = access_logger, + handle_signals: bool = True, + reuse_address: Optional[bool] = None, + reuse_port: Optional[bool] = None, + handler_cancellation: bool = False, +) -> None: + # An internal function to actually do all dirty job for application running + if asyncio.iscoroutine(app): + app = await app + + app = cast(Application, app) + + runner = AppRunner( + app, + handle_signals=handle_signals, + access_log_class=access_log_class, + access_log_format=access_log_format, + access_log=access_log, + keepalive_timeout=keepalive_timeout, + shutdown_timeout=shutdown_timeout, + handler_cancellation=handler_cancellation, + ) + + await runner.setup() + + sites: List[BaseSite] = [] + + try: + if host is not None: + if isinstance(host, (str, bytes, bytearray, memoryview)): + sites.append( + TCPSite( + runner, + host, + port, + ssl_context=ssl_context, + backlog=backlog, + reuse_address=reuse_address, + reuse_port=reuse_port, + ) + ) + else: + for h in host: + sites.append( + TCPSite( + runner, + h, + port, + ssl_context=ssl_context, + backlog=backlog, + reuse_address=reuse_address, + reuse_port=reuse_port, + ) + ) + elif path is None and sock is None or port is not None: + sites.append( + TCPSite( + runner, + port=port, + ssl_context=ssl_context, + backlog=backlog, + reuse_address=reuse_address, + reuse_port=reuse_port, + ) + ) + + if path is not None: + if isinstance(path, (str, os.PathLike)): + sites.append( + UnixSite( + runner, + path, + ssl_context=ssl_context, + backlog=backlog, + ) + ) + else: + for p in path: + sites.append( + UnixSite( + runner, + p, + ssl_context=ssl_context, + backlog=backlog, + ) + ) + + if sock is not None: + if not isinstance(sock, Iterable): + sites.append( + SockSite( + runner, + sock, + ssl_context=ssl_context, + backlog=backlog, + ) + ) + else: + for s in sock: + sites.append( + SockSite( + runner, + s, + ssl_context=ssl_context, + backlog=backlog, + ) + ) + for site in sites: + await site.start() + + if print: # pragma: no branch + names = sorted(str(s.name) for s in runner.sites) + print( + "======== Running on {} ========\n" + "(Press CTRL+C to quit)".format(", ".join(names)) + ) + + # sleep forever by 1 hour intervals, + while True: + await asyncio.sleep(3600) + finally: + await runner.cleanup() + + +def _cancel_tasks( + to_cancel: Set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop +) -> None: + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + + loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True)) + + for task in to_cancel: + if task.cancelled(): + continue + if task.exception() is not None: + loop.call_exception_handler( + { + "message": "unhandled exception during asyncio.run() shutdown", + "exception": task.exception(), + "task": task, + } + ) + + +def run_app( + app: Union[Application, Awaitable[Application]], + *, + host: Optional[Union[str, HostSequence]] = None, + port: Optional[int] = None, + path: Union[PathLike, TypingIterable[PathLike], None] = None, + sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None, + shutdown_timeout: float = 60.0, + keepalive_timeout: float = 75.0, + ssl_context: Optional[SSLContext] = None, + print: Optional[Callable[..., None]] = print, + backlog: int = 128, + access_log_class: Type[AbstractAccessLogger] = AccessLogger, + access_log_format: str = AccessLogger.LOG_FORMAT, + access_log: Optional[logging.Logger] = access_logger, + handle_signals: bool = True, + reuse_address: Optional[bool] = None, + reuse_port: Optional[bool] = None, + handler_cancellation: bool = False, + loop: Optional[asyncio.AbstractEventLoop] = None, +) -> None: + """Run an app locally""" + if loop is None: + loop = asyncio.new_event_loop() + + # Configure if and only if in debugging mode and using the default logger + if loop.get_debug() and access_log and access_log.name == "aiohttp.access": + if access_log.level == logging.NOTSET: + access_log.setLevel(logging.DEBUG) + if not access_log.hasHandlers(): + access_log.addHandler(logging.StreamHandler()) + + main_task = loop.create_task( + _run_app( + app, + host=host, + port=port, + path=path, + sock=sock, + shutdown_timeout=shutdown_timeout, + keepalive_timeout=keepalive_timeout, + ssl_context=ssl_context, + print=print, + backlog=backlog, + access_log_class=access_log_class, + access_log_format=access_log_format, + access_log=access_log, + handle_signals=handle_signals, + reuse_address=reuse_address, + reuse_port=reuse_port, + handler_cancellation=handler_cancellation, + ) + ) + + try: + asyncio.set_event_loop(loop) + loop.run_until_complete(main_task) + except (GracefulExit, KeyboardInterrupt): # pragma: no cover + pass + finally: + _cancel_tasks({main_task}, loop) + _cancel_tasks(asyncio.all_tasks(loop), loop) + loop.run_until_complete(loop.shutdown_asyncgens()) + loop.close() + + +def main(argv: List[str]) -> None: + arg_parser = ArgumentParser( + description="aiohttp.web Application server", prog="aiohttp.web" + ) + arg_parser.add_argument( + "entry_func", + help=( + "Callable returning the `aiohttp.web.Application` instance to " + "run. Should be specified in the 'module:function' syntax." + ), + metavar="entry-func", + ) + arg_parser.add_argument( + "-H", + "--hostname", + help="TCP/IP hostname to serve on (default: %(default)r)", + default="localhost", + ) + arg_parser.add_argument( + "-P", + "--port", + help="TCP/IP port to serve on (default: %(default)r)", + type=int, + default="8080", + ) + arg_parser.add_argument( + "-U", + "--path", + help="Unix file system path to serve on. Specifying a path will cause " + "hostname and port arguments to be ignored.", + ) + args, extra_argv = arg_parser.parse_known_args(argv) + + # Import logic + mod_str, _, func_str = args.entry_func.partition(":") + if not func_str or not mod_str: + arg_parser.error("'entry-func' not in 'module:function' syntax") + if mod_str.startswith("."): + arg_parser.error("relative module names not supported") + try: + module = import_module(mod_str) + except ImportError as ex: + arg_parser.error(f"unable to import {mod_str}: {ex}") + try: + func = getattr(module, func_str) + except AttributeError: + arg_parser.error(f"module {mod_str!r} has no attribute {func_str!r}") + + # Compatibility logic + if args.path is not None and not hasattr(socket, "AF_UNIX"): + arg_parser.error( + "file system paths not supported by your operating" " environment" + ) + + logging.basicConfig(level=logging.DEBUG) + + app = func(extra_argv) + run_app(app, host=args.hostname, port=args.port, path=args.path) + arg_parser.exit(message="Stopped\n") + + +if __name__ == "__main__": # pragma: no branch + main(sys.argv[1:]) # pragma: no cover diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_app.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_app.py new file mode 100644 index 000000000..3b4b6489e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_app.py @@ -0,0 +1,590 @@ +import asyncio +import logging +import warnings +from functools import partial, update_wrapper +from typing import ( + TYPE_CHECKING, + Any, + AsyncIterator, + Awaitable, + Callable, + Dict, + Iterable, + Iterator, + List, + Mapping, + MutableMapping, + Optional, + Sequence, + Tuple, + Type, + TypeVar, + Union, + cast, + overload, +) + +from aiosignal import Signal +from frozenlist import FrozenList + +from . import hdrs +from .abc import ( + AbstractAccessLogger, + AbstractMatchInfo, + AbstractRouter, + AbstractStreamWriter, +) +from .helpers import DEBUG, AppKey +from .http_parser import RawRequestMessage +from .log import web_logger +from .streams import StreamReader +from .typedefs import Middleware +from .web_exceptions import NotAppKeyWarning +from .web_log import AccessLogger +from .web_middlewares import _fix_request_current_app +from .web_protocol import RequestHandler +from .web_request import Request +from .web_response import StreamResponse +from .web_routedef import AbstractRouteDef +from .web_server import Server +from .web_urldispatcher import ( + AbstractResource, + AbstractRoute, + Domain, + MaskDomain, + MatchedSubAppResource, + PrefixedSubAppResource, + UrlDispatcher, +) + +__all__ = ("Application", "CleanupError") + + +if TYPE_CHECKING: + _AppSignal = Signal[Callable[["Application"], Awaitable[None]]] + _RespPrepareSignal = Signal[Callable[[Request, StreamResponse], Awaitable[None]]] + _Middlewares = FrozenList[Middleware] + _MiddlewaresHandlers = Optional[Sequence[Tuple[Middleware, bool]]] + _Subapps = List["Application"] +else: + # No type checker mode, skip types + _AppSignal = Signal + _RespPrepareSignal = Signal + _Middlewares = FrozenList + _MiddlewaresHandlers = Optional[Sequence] + _Subapps = List + +_T = TypeVar("_T") +_U = TypeVar("_U") +_Resource = TypeVar("_Resource", bound=AbstractResource) + + +class Application(MutableMapping[Union[str, AppKey[Any]], Any]): + ATTRS = frozenset( + [ + "logger", + "_debug", + "_router", + "_loop", + "_handler_args", + "_middlewares", + "_middlewares_handlers", + "_run_middlewares", + "_state", + "_frozen", + "_pre_frozen", + "_subapps", + "_on_response_prepare", + "_on_startup", + "_on_shutdown", + "_on_cleanup", + "_client_max_size", + "_cleanup_ctx", + ] + ) + + def __init__( + self, + *, + logger: logging.Logger = web_logger, + router: Optional[UrlDispatcher] = None, + middlewares: Iterable[Middleware] = (), + handler_args: Optional[Mapping[str, Any]] = None, + client_max_size: int = 1024**2, + loop: Optional[asyncio.AbstractEventLoop] = None, + debug: Any = ..., # mypy doesn't support ellipsis + ) -> None: + if router is None: + router = UrlDispatcher() + else: + warnings.warn( + "router argument is deprecated", DeprecationWarning, stacklevel=2 + ) + assert isinstance(router, AbstractRouter), router + + if loop is not None: + warnings.warn( + "loop argument is deprecated", DeprecationWarning, stacklevel=2 + ) + + if debug is not ...: + warnings.warn( + "debug argument is deprecated", DeprecationWarning, stacklevel=2 + ) + self._debug = debug + self._router: UrlDispatcher = router + self._loop = loop + self._handler_args = handler_args + self.logger = logger + + self._middlewares: _Middlewares = FrozenList(middlewares) + + # initialized on freezing + self._middlewares_handlers: _MiddlewaresHandlers = None + # initialized on freezing + self._run_middlewares: Optional[bool] = None + + self._state: Dict[Union[AppKey[Any], str], object] = {} + self._frozen = False + self._pre_frozen = False + self._subapps: _Subapps = [] + + self._on_response_prepare: _RespPrepareSignal = Signal(self) + self._on_startup: _AppSignal = Signal(self) + self._on_shutdown: _AppSignal = Signal(self) + self._on_cleanup: _AppSignal = Signal(self) + self._cleanup_ctx = CleanupContext() + self._on_startup.append(self._cleanup_ctx._on_startup) + self._on_cleanup.append(self._cleanup_ctx._on_cleanup) + self._client_max_size = client_max_size + + def __init_subclass__(cls: Type["Application"]) -> None: + warnings.warn( + "Inheritance class {} from web.Application " + "is discouraged".format(cls.__name__), + DeprecationWarning, + stacklevel=3, + ) + + if DEBUG: # pragma: no cover + + def __setattr__(self, name: str, val: Any) -> None: + if name not in self.ATTRS: + warnings.warn( + "Setting custom web.Application.{} attribute " + "is discouraged".format(name), + DeprecationWarning, + stacklevel=2, + ) + super().__setattr__(name, val) + + # MutableMapping API + + def __eq__(self, other: object) -> bool: + return self is other + + @overload # type: ignore[override] + def __getitem__(self, key: AppKey[_T]) -> _T: ... + + @overload + def __getitem__(self, key: str) -> Any: ... + + def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any: + return self._state[key] + + def _check_frozen(self) -> None: + if self._frozen: + warnings.warn( + "Changing state of started or joined " "application is deprecated", + DeprecationWarning, + stacklevel=3, + ) + + @overload # type: ignore[override] + def __setitem__(self, key: AppKey[_T], value: _T) -> None: ... + + @overload + def __setitem__(self, key: str, value: Any) -> None: ... + + def __setitem__(self, key: Union[str, AppKey[_T]], value: Any) -> None: + self._check_frozen() + if not isinstance(key, AppKey): + warnings.warn( + "It is recommended to use web.AppKey instances for keys.\n" + + "https://docs.aiohttp.org/en/stable/web_advanced.html" + + "#application-s-config", + category=NotAppKeyWarning, + stacklevel=2, + ) + self._state[key] = value + + def __delitem__(self, key: Union[str, AppKey[_T]]) -> None: + self._check_frozen() + del self._state[key] + + def __len__(self) -> int: + return len(self._state) + + def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]: + return iter(self._state) + + @overload # type: ignore[override] + def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: ... + + @overload + def get(self, key: AppKey[_T], default: _U) -> Union[_T, _U]: ... + + @overload + def get(self, key: str, default: Any = ...) -> Any: ... + + def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any: + return self._state.get(key, default) + + ######## + @property + def loop(self) -> asyncio.AbstractEventLoop: + # Technically the loop can be None + # but we mask it by explicit type cast + # to provide more convenient type annotation + warnings.warn("loop property is deprecated", DeprecationWarning, stacklevel=2) + return cast(asyncio.AbstractEventLoop, self._loop) + + def _set_loop(self, loop: Optional[asyncio.AbstractEventLoop]) -> None: + if loop is None: + loop = asyncio.get_event_loop() + if self._loop is not None and self._loop is not loop: + raise RuntimeError( + "web.Application instance initialized with different loop" + ) + + self._loop = loop + + # set loop debug + if self._debug is ...: + self._debug = loop.get_debug() + + # set loop to sub applications + for subapp in self._subapps: + subapp._set_loop(loop) + + @property + def pre_frozen(self) -> bool: + return self._pre_frozen + + def pre_freeze(self) -> None: + if self._pre_frozen: + return + + self._pre_frozen = True + self._middlewares.freeze() + self._router.freeze() + self._on_response_prepare.freeze() + self._cleanup_ctx.freeze() + self._on_startup.freeze() + self._on_shutdown.freeze() + self._on_cleanup.freeze() + self._middlewares_handlers = tuple(self._prepare_middleware()) + + # If current app and any subapp do not have middlewares avoid run all + # of the code footprint that it implies, which have a middleware + # hardcoded per app that sets up the current_app attribute. If no + # middlewares are configured the handler will receive the proper + # current_app without needing all of this code. + self._run_middlewares = True if self.middlewares else False + + for subapp in self._subapps: + subapp.pre_freeze() + self._run_middlewares = self._run_middlewares or subapp._run_middlewares + + @property + def frozen(self) -> bool: + return self._frozen + + def freeze(self) -> None: + if self._frozen: + return + + self.pre_freeze() + self._frozen = True + for subapp in self._subapps: + subapp.freeze() + + @property + def debug(self) -> bool: + warnings.warn("debug property is deprecated", DeprecationWarning, stacklevel=2) + return self._debug # type: ignore[no-any-return] + + def _reg_subapp_signals(self, subapp: "Application") -> None: + def reg_handler(signame: str) -> None: + subsig = getattr(subapp, signame) + + async def handler(app: "Application") -> None: + await subsig.send(subapp) + + appsig = getattr(self, signame) + appsig.append(handler) + + reg_handler("on_startup") + reg_handler("on_shutdown") + reg_handler("on_cleanup") + + def add_subapp(self, prefix: str, subapp: "Application") -> PrefixedSubAppResource: + if not isinstance(prefix, str): + raise TypeError("Prefix must be str") + prefix = prefix.rstrip("/") + if not prefix: + raise ValueError("Prefix cannot be empty") + factory = partial(PrefixedSubAppResource, prefix, subapp) + return self._add_subapp(factory, subapp) + + def _add_subapp( + self, resource_factory: Callable[[], _Resource], subapp: "Application" + ) -> _Resource: + if self.frozen: + raise RuntimeError("Cannot add sub application to frozen application") + if subapp.frozen: + raise RuntimeError("Cannot add frozen application") + resource = resource_factory() + self.router.register_resource(resource) + self._reg_subapp_signals(subapp) + self._subapps.append(subapp) + subapp.pre_freeze() + if self._loop is not None: + subapp._set_loop(self._loop) + return resource + + def add_domain(self, domain: str, subapp: "Application") -> MatchedSubAppResource: + if not isinstance(domain, str): + raise TypeError("Domain must be str") + elif "*" in domain: + rule: Domain = MaskDomain(domain) + else: + rule = Domain(domain) + factory = partial(MatchedSubAppResource, rule, subapp) + return self._add_subapp(factory, subapp) + + def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]: + return self.router.add_routes(routes) + + @property + def on_response_prepare(self) -> _RespPrepareSignal: + return self._on_response_prepare + + @property + def on_startup(self) -> _AppSignal: + return self._on_startup + + @property + def on_shutdown(self) -> _AppSignal: + return self._on_shutdown + + @property + def on_cleanup(self) -> _AppSignal: + return self._on_cleanup + + @property + def cleanup_ctx(self) -> "CleanupContext": + return self._cleanup_ctx + + @property + def router(self) -> UrlDispatcher: + return self._router + + @property + def middlewares(self) -> _Middlewares: + return self._middlewares + + def _make_handler( + self, + *, + loop: Optional[asyncio.AbstractEventLoop] = None, + access_log_class: Type[AbstractAccessLogger] = AccessLogger, + **kwargs: Any, + ) -> Server: + + if not issubclass(access_log_class, AbstractAccessLogger): + raise TypeError( + "access_log_class must be subclass of " + "aiohttp.abc.AbstractAccessLogger, got {}".format(access_log_class) + ) + + self._set_loop(loop) + self.freeze() + + kwargs["debug"] = self._debug + kwargs["access_log_class"] = access_log_class + if self._handler_args: + for k, v in self._handler_args.items(): + kwargs[k] = v + + return Server( + self._handle, # type: ignore[arg-type] + request_factory=self._make_request, + loop=self._loop, + **kwargs, + ) + + def make_handler( + self, + *, + loop: Optional[asyncio.AbstractEventLoop] = None, + access_log_class: Type[AbstractAccessLogger] = AccessLogger, + **kwargs: Any, + ) -> Server: + + warnings.warn( + "Application.make_handler(...) is deprecated, " "use AppRunner API instead", + DeprecationWarning, + stacklevel=2, + ) + + return self._make_handler( + loop=loop, access_log_class=access_log_class, **kwargs + ) + + async def startup(self) -> None: + """Causes on_startup signal + + Should be called in the event loop along with the request handler. + """ + await self.on_startup.send(self) + + async def shutdown(self) -> None: + """Causes on_shutdown signal + + Should be called before cleanup() + """ + await self.on_shutdown.send(self) + + async def cleanup(self) -> None: + """Causes on_cleanup signal + + Should be called after shutdown() + """ + if self.on_cleanup.frozen: + await self.on_cleanup.send(self) + else: + # If an exception occurs in startup, ensure cleanup contexts are completed. + await self._cleanup_ctx._on_cleanup(self) + + def _make_request( + self, + message: RawRequestMessage, + payload: StreamReader, + protocol: RequestHandler, + writer: AbstractStreamWriter, + task: "asyncio.Task[None]", + _cls: Type[Request] = Request, + ) -> Request: + return _cls( + message, + payload, + protocol, + writer, + task, + self._loop, + client_max_size=self._client_max_size, + ) + + def _prepare_middleware(self) -> Iterator[Tuple[Middleware, bool]]: + for m in reversed(self._middlewares): + if getattr(m, "__middleware_version__", None) == 1: + yield m, True + else: + warnings.warn( + 'old-style middleware "{!r}" deprecated, ' "see #2252".format(m), + DeprecationWarning, + stacklevel=2, + ) + yield m, False + + yield _fix_request_current_app(self), True + + async def _handle(self, request: Request) -> StreamResponse: + loop = asyncio.get_event_loop() + debug = loop.get_debug() + match_info = await self._router.resolve(request) + if debug: # pragma: no cover + if not isinstance(match_info, AbstractMatchInfo): + raise TypeError( + "match_info should be AbstractMatchInfo " + "instance, not {!r}".format(match_info) + ) + match_info.add_app(self) + + match_info.freeze() + + resp = None + request._match_info = match_info + expect = request.headers.get(hdrs.EXPECT) + if expect: + resp = await match_info.expect_handler(request) + await request.writer.drain() + + if resp is None: + handler = match_info.handler + + if self._run_middlewares: + for app in match_info.apps[::-1]: + for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] + if new_style: + handler = update_wrapper( + partial(m, handler=handler), handler # type: ignore[misc] + ) + else: + handler = await m(app, handler) # type: ignore[arg-type,assignment] + + resp = await handler(request) + + return resp + + def __call__(self) -> "Application": + """gunicorn compatibility""" + return self + + def __repr__(self) -> str: + return f"" + + def __bool__(self) -> bool: + return True + + +class CleanupError(RuntimeError): + @property + def exceptions(self) -> List[BaseException]: + return cast(List[BaseException], self.args[1]) + + +if TYPE_CHECKING: + _CleanupContextBase = FrozenList[Callable[[Application], AsyncIterator[None]]] +else: + _CleanupContextBase = FrozenList + + +class CleanupContext(_CleanupContextBase): + def __init__(self) -> None: + super().__init__() + self._exits: List[AsyncIterator[None]] = [] + + async def _on_startup(self, app: Application) -> None: + for cb in self: + it = cb(app).__aiter__() + await it.__anext__() + self._exits.append(it) + + async def _on_cleanup(self, app: Application) -> None: + errors = [] + for it in reversed(self._exits): + try: + await it.__anext__() + except StopAsyncIteration: + pass + except Exception as exc: + errors.append(exc) + else: + errors.append(RuntimeError(f"{it!r} has more than one 'yield'")) + if errors: + if len(errors) == 1: + raise errors[0] + else: + raise CleanupError("Multiple errors on cleanup stage", errors) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_exceptions.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_exceptions.py new file mode 100644 index 000000000..ee2c1e72d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_exceptions.py @@ -0,0 +1,452 @@ +import warnings +from typing import Any, Dict, Iterable, List, Optional, Set # noqa + +from yarl import URL + +from .typedefs import LooseHeaders, StrOrURL +from .web_response import Response + +__all__ = ( + "HTTPException", + "HTTPError", + "HTTPRedirection", + "HTTPSuccessful", + "HTTPOk", + "HTTPCreated", + "HTTPAccepted", + "HTTPNonAuthoritativeInformation", + "HTTPNoContent", + "HTTPResetContent", + "HTTPPartialContent", + "HTTPMove", + "HTTPMultipleChoices", + "HTTPMovedPermanently", + "HTTPFound", + "HTTPSeeOther", + "HTTPNotModified", + "HTTPUseProxy", + "HTTPTemporaryRedirect", + "HTTPPermanentRedirect", + "HTTPClientError", + "HTTPBadRequest", + "HTTPUnauthorized", + "HTTPPaymentRequired", + "HTTPForbidden", + "HTTPNotFound", + "HTTPMethodNotAllowed", + "HTTPNotAcceptable", + "HTTPProxyAuthenticationRequired", + "HTTPRequestTimeout", + "HTTPConflict", + "HTTPGone", + "HTTPLengthRequired", + "HTTPPreconditionFailed", + "HTTPRequestEntityTooLarge", + "HTTPRequestURITooLong", + "HTTPUnsupportedMediaType", + "HTTPRequestRangeNotSatisfiable", + "HTTPExpectationFailed", + "HTTPMisdirectedRequest", + "HTTPUnprocessableEntity", + "HTTPFailedDependency", + "HTTPUpgradeRequired", + "HTTPPreconditionRequired", + "HTTPTooManyRequests", + "HTTPRequestHeaderFieldsTooLarge", + "HTTPUnavailableForLegalReasons", + "HTTPServerError", + "HTTPInternalServerError", + "HTTPNotImplemented", + "HTTPBadGateway", + "HTTPServiceUnavailable", + "HTTPGatewayTimeout", + "HTTPVersionNotSupported", + "HTTPVariantAlsoNegotiates", + "HTTPInsufficientStorage", + "HTTPNotExtended", + "HTTPNetworkAuthenticationRequired", +) + + +class NotAppKeyWarning(UserWarning): + """Warning when not using AppKey in Application.""" + + +############################################################ +# HTTP Exceptions +############################################################ + + +class HTTPException(Response, Exception): + + # You should set in subclasses: + # status = 200 + + status_code = -1 + empty_body = False + + __http_exception__ = True + + def __init__( + self, + *, + headers: Optional[LooseHeaders] = None, + reason: Optional[str] = None, + body: Any = None, + text: Optional[str] = None, + content_type: Optional[str] = None, + ) -> None: + if body is not None: + warnings.warn( + "body argument is deprecated for http web exceptions", + DeprecationWarning, + ) + Response.__init__( + self, + status=self.status_code, + headers=headers, + reason=reason, + body=body, + text=text, + content_type=content_type, + ) + Exception.__init__(self, self.reason) + if self.body is None and not self.empty_body: + self.text = f"{self.status}: {self.reason}" + + def __bool__(self) -> bool: + return True + + +class HTTPError(HTTPException): + """Base class for exceptions with status codes in the 400s and 500s.""" + + +class HTTPRedirection(HTTPException): + """Base class for exceptions with status codes in the 300s.""" + + +class HTTPSuccessful(HTTPException): + """Base class for exceptions with status codes in the 200s.""" + + +class HTTPOk(HTTPSuccessful): + status_code = 200 + + +class HTTPCreated(HTTPSuccessful): + status_code = 201 + + +class HTTPAccepted(HTTPSuccessful): + status_code = 202 + + +class HTTPNonAuthoritativeInformation(HTTPSuccessful): + status_code = 203 + + +class HTTPNoContent(HTTPSuccessful): + status_code = 204 + empty_body = True + + +class HTTPResetContent(HTTPSuccessful): + status_code = 205 + empty_body = True + + +class HTTPPartialContent(HTTPSuccessful): + status_code = 206 + + +############################################################ +# 3xx redirection +############################################################ + + +class HTTPMove(HTTPRedirection): + def __init__( + self, + location: StrOrURL, + *, + headers: Optional[LooseHeaders] = None, + reason: Optional[str] = None, + body: Any = None, + text: Optional[str] = None, + content_type: Optional[str] = None, + ) -> None: + if not location: + raise ValueError("HTTP redirects need a location to redirect to.") + super().__init__( + headers=headers, + reason=reason, + body=body, + text=text, + content_type=content_type, + ) + self.headers["Location"] = str(URL(location)) + self.location = location + + +class HTTPMultipleChoices(HTTPMove): + status_code = 300 + + +class HTTPMovedPermanently(HTTPMove): + status_code = 301 + + +class HTTPFound(HTTPMove): + status_code = 302 + + +# This one is safe after a POST (the redirected location will be +# retrieved with GET): +class HTTPSeeOther(HTTPMove): + status_code = 303 + + +class HTTPNotModified(HTTPRedirection): + # FIXME: this should include a date or etag header + status_code = 304 + empty_body = True + + +class HTTPUseProxy(HTTPMove): + # Not a move, but looks a little like one + status_code = 305 + + +class HTTPTemporaryRedirect(HTTPMove): + status_code = 307 + + +class HTTPPermanentRedirect(HTTPMove): + status_code = 308 + + +############################################################ +# 4xx client error +############################################################ + + +class HTTPClientError(HTTPError): + pass + + +class HTTPBadRequest(HTTPClientError): + status_code = 400 + + +class HTTPUnauthorized(HTTPClientError): + status_code = 401 + + +class HTTPPaymentRequired(HTTPClientError): + status_code = 402 + + +class HTTPForbidden(HTTPClientError): + status_code = 403 + + +class HTTPNotFound(HTTPClientError): + status_code = 404 + + +class HTTPMethodNotAllowed(HTTPClientError): + status_code = 405 + + def __init__( + self, + method: str, + allowed_methods: Iterable[str], + *, + headers: Optional[LooseHeaders] = None, + reason: Optional[str] = None, + body: Any = None, + text: Optional[str] = None, + content_type: Optional[str] = None, + ) -> None: + allow = ",".join(sorted(allowed_methods)) + super().__init__( + headers=headers, + reason=reason, + body=body, + text=text, + content_type=content_type, + ) + self.headers["Allow"] = allow + self.allowed_methods: Set[str] = set(allowed_methods) + self.method = method.upper() + + +class HTTPNotAcceptable(HTTPClientError): + status_code = 406 + + +class HTTPProxyAuthenticationRequired(HTTPClientError): + status_code = 407 + + +class HTTPRequestTimeout(HTTPClientError): + status_code = 408 + + +class HTTPConflict(HTTPClientError): + status_code = 409 + + +class HTTPGone(HTTPClientError): + status_code = 410 + + +class HTTPLengthRequired(HTTPClientError): + status_code = 411 + + +class HTTPPreconditionFailed(HTTPClientError): + status_code = 412 + + +class HTTPRequestEntityTooLarge(HTTPClientError): + status_code = 413 + + def __init__(self, max_size: float, actual_size: float, **kwargs: Any) -> None: + kwargs.setdefault( + "text", + "Maximum request body size {} exceeded, " + "actual body size {}".format(max_size, actual_size), + ) + super().__init__(**kwargs) + + +class HTTPRequestURITooLong(HTTPClientError): + status_code = 414 + + +class HTTPUnsupportedMediaType(HTTPClientError): + status_code = 415 + + +class HTTPRequestRangeNotSatisfiable(HTTPClientError): + status_code = 416 + + +class HTTPExpectationFailed(HTTPClientError): + status_code = 417 + + +class HTTPMisdirectedRequest(HTTPClientError): + status_code = 421 + + +class HTTPUnprocessableEntity(HTTPClientError): + status_code = 422 + + +class HTTPFailedDependency(HTTPClientError): + status_code = 424 + + +class HTTPUpgradeRequired(HTTPClientError): + status_code = 426 + + +class HTTPPreconditionRequired(HTTPClientError): + status_code = 428 + + +class HTTPTooManyRequests(HTTPClientError): + status_code = 429 + + +class HTTPRequestHeaderFieldsTooLarge(HTTPClientError): + status_code = 431 + + +class HTTPUnavailableForLegalReasons(HTTPClientError): + status_code = 451 + + def __init__( + self, + link: Optional[StrOrURL], + *, + headers: Optional[LooseHeaders] = None, + reason: Optional[str] = None, + body: Any = None, + text: Optional[str] = None, + content_type: Optional[str] = None, + ) -> None: + super().__init__( + headers=headers, + reason=reason, + body=body, + text=text, + content_type=content_type, + ) + self._link = None + if link: + self._link = URL(link) + self.headers["Link"] = f'<{str(self._link)}>; rel="blocked-by"' + + @property + def link(self) -> Optional[URL]: + return self._link + + +############################################################ +# 5xx Server Error +############################################################ +# Response status codes beginning with the digit "5" indicate cases in +# which the server is aware that it has erred or is incapable of +# performing the request. Except when responding to a HEAD request, the +# server SHOULD include an entity containing an explanation of the error +# situation, and whether it is a temporary or permanent condition. User +# agents SHOULD display any included entity to the user. These response +# codes are applicable to any request method. + + +class HTTPServerError(HTTPError): + pass + + +class HTTPInternalServerError(HTTPServerError): + status_code = 500 + + +class HTTPNotImplemented(HTTPServerError): + status_code = 501 + + +class HTTPBadGateway(HTTPServerError): + status_code = 502 + + +class HTTPServiceUnavailable(HTTPServerError): + status_code = 503 + + +class HTTPGatewayTimeout(HTTPServerError): + status_code = 504 + + +class HTTPVersionNotSupported(HTTPServerError): + status_code = 505 + + +class HTTPVariantAlsoNegotiates(HTTPServerError): + status_code = 506 + + +class HTTPInsufficientStorage(HTTPServerError): + status_code = 507 + + +class HTTPNotExtended(HTTPServerError): + status_code = 510 + + +class HTTPNetworkAuthenticationRequired(HTTPServerError): + status_code = 511 diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_fileresponse.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_fileresponse.py new file mode 100644 index 000000000..0c23e375d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_fileresponse.py @@ -0,0 +1,357 @@ +import asyncio +import os +import pathlib +import sys +from contextlib import suppress +from mimetypes import MimeTypes +from stat import S_ISREG +from types import MappingProxyType +from typing import ( # noqa + IO, + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Final, + Iterator, + List, + Optional, + Tuple, + Union, + cast, +) + +from . import hdrs +from .abc import AbstractStreamWriter +from .helpers import ETAG_ANY, ETag, must_be_empty_body +from .typedefs import LooseHeaders, PathLike +from .web_exceptions import ( + HTTPForbidden, + HTTPNotFound, + HTTPNotModified, + HTTPPartialContent, + HTTPPreconditionFailed, + HTTPRequestRangeNotSatisfiable, +) +from .web_response import StreamResponse + +__all__ = ("FileResponse",) + +if TYPE_CHECKING: + from .web_request import BaseRequest + + +_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]] + + +NOSENDFILE: Final[bool] = bool(os.environ.get("AIOHTTP_NOSENDFILE")) + +CONTENT_TYPES: Final[MimeTypes] = MimeTypes() + +if sys.version_info < (3, 9): + CONTENT_TYPES.encodings_map[".br"] = "br" + +# File extension to IANA encodings map that will be checked in the order defined. +ENCODING_EXTENSIONS = MappingProxyType( + {ext: CONTENT_TYPES.encodings_map[ext] for ext in (".br", ".gz")} +) + +FALLBACK_CONTENT_TYPE = "application/octet-stream" + +# Provide additional MIME type/extension pairs to be recognized. +# https://en.wikipedia.org/wiki/List_of_archive_formats#Compression_only +ADDITIONAL_CONTENT_TYPES = MappingProxyType( + { + "application/gzip": ".gz", + "application/x-brotli": ".br", + "application/x-bzip2": ".bz2", + "application/x-compress": ".Z", + "application/x-xz": ".xz", + } +) + +# Add custom pairs and clear the encodings map so guess_type ignores them. +CONTENT_TYPES.encodings_map.clear() +for content_type, extension in ADDITIONAL_CONTENT_TYPES.items(): + CONTENT_TYPES.add_type(content_type, extension) # type: ignore[attr-defined] + + +class FileResponse(StreamResponse): + """A response object can be used to send files.""" + + def __init__( + self, + path: PathLike, + chunk_size: int = 256 * 1024, + status: int = 200, + reason: Optional[str] = None, + headers: Optional[LooseHeaders] = None, + ) -> None: + super().__init__(status=status, reason=reason, headers=headers) + + self._path = pathlib.Path(path) + self._chunk_size = chunk_size + + async def _sendfile_fallback( + self, writer: AbstractStreamWriter, fobj: IO[Any], offset: int, count: int + ) -> AbstractStreamWriter: + # To keep memory usage low,fobj is transferred in chunks + # controlled by the constructor's chunk_size argument. + + chunk_size = self._chunk_size + loop = asyncio.get_event_loop() + + await loop.run_in_executor(None, fobj.seek, offset) + + chunk = await loop.run_in_executor(None, fobj.read, chunk_size) + while chunk: + await writer.write(chunk) + count = count - chunk_size + if count <= 0: + break + chunk = await loop.run_in_executor(None, fobj.read, min(chunk_size, count)) + + await writer.drain() + return writer + + async def _sendfile( + self, request: "BaseRequest", fobj: IO[Any], offset: int, count: int + ) -> AbstractStreamWriter: + writer = await super().prepare(request) + assert writer is not None + + if NOSENDFILE or self.compression: + return await self._sendfile_fallback(writer, fobj, offset, count) + + loop = request._loop + transport = request.transport + assert transport is not None + + try: + await loop.sendfile(transport, fobj, offset, count) + except NotImplementedError: + return await self._sendfile_fallback(writer, fobj, offset, count) + + await super().write_eof() + return writer + + @staticmethod + def _strong_etag_match(etag_value: str, etags: Tuple[ETag, ...]) -> bool: + if len(etags) == 1 and etags[0].value == ETAG_ANY: + return True + return any(etag.value == etag_value for etag in etags if not etag.is_weak) + + async def _not_modified( + self, request: "BaseRequest", etag_value: str, last_modified: float + ) -> Optional[AbstractStreamWriter]: + self.set_status(HTTPNotModified.status_code) + self._length_check = False + self.etag = etag_value # type: ignore[assignment] + self.last_modified = last_modified # type: ignore[assignment] + # Delete any Content-Length headers provided by user. HTTP 304 + # should always have empty response body + return await super().prepare(request) + + async def _precondition_failed( + self, request: "BaseRequest" + ) -> Optional[AbstractStreamWriter]: + self.set_status(HTTPPreconditionFailed.status_code) + self.content_length = 0 + return await super().prepare(request) + + def _get_file_path_stat_encoding( + self, accept_encoding: str + ) -> Tuple[pathlib.Path, os.stat_result, Optional[str]]: + """Return the file path, stat result, and encoding. + + If an uncompressed file is returned, the encoding is set to + :py:data:`None`. + + This method should be called from a thread executor + since it calls os.stat which may block. + """ + file_path = self._path + for file_extension, file_encoding in ENCODING_EXTENSIONS.items(): + if file_encoding not in accept_encoding: + continue + + compressed_path = file_path.with_suffix(file_path.suffix + file_extension) + with suppress(OSError): + # Do not follow symlinks and ignore any non-regular files. + st = compressed_path.lstat() + if S_ISREG(st.st_mode): + return compressed_path, st, file_encoding + + # Fallback to the uncompressed file + return file_path, file_path.stat(), None + + async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: + loop = asyncio.get_running_loop() + # Encoding comparisons should be case-insensitive + # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 + accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() + try: + file_path, st, file_encoding = await loop.run_in_executor( + None, self._get_file_path_stat_encoding, accept_encoding + ) + except OSError: + # Most likely to be FileNotFoundError or OSError for circular + # symlinks in python >= 3.13, so respond with 404. + self.set_status(HTTPNotFound.status_code) + return await super().prepare(request) + + # Forbid special files like sockets, pipes, devices, etc. + if not S_ISREG(st.st_mode): + self.set_status(HTTPForbidden.status_code) + return await super().prepare(request) + + etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}" + last_modified = st.st_mtime + + # https://tools.ietf.org/html/rfc7232#section-6 + ifmatch = request.if_match + if ifmatch is not None and not self._strong_etag_match(etag_value, ifmatch): + return await self._precondition_failed(request) + + unmodsince = request.if_unmodified_since + if ( + unmodsince is not None + and ifmatch is None + and st.st_mtime > unmodsince.timestamp() + ): + return await self._precondition_failed(request) + + ifnonematch = request.if_none_match + if ifnonematch is not None and self._strong_etag_match(etag_value, ifnonematch): + return await self._not_modified(request, etag_value, last_modified) + + modsince = request.if_modified_since + if ( + modsince is not None + and ifnonematch is None + and st.st_mtime <= modsince.timestamp() + ): + return await self._not_modified(request, etag_value, last_modified) + + status = self._status + file_size = st.st_size + count = file_size + + start = None + + ifrange = request.if_range + if ifrange is None or st.st_mtime <= ifrange.timestamp(): + # If-Range header check: + # condition = cached date >= last modification date + # return 206 if True else 200. + # if False: + # Range header would not be processed, return 200 + # if True but Range header missing + # return 200 + try: + rng = request.http_range + start = rng.start + end = rng.stop + except ValueError: + # https://tools.ietf.org/html/rfc7233: + # A server generating a 416 (Range Not Satisfiable) response to + # a byte-range request SHOULD send a Content-Range header field + # with an unsatisfied-range value. + # The complete-length in a 416 response indicates the current + # length of the selected representation. + # + # Will do the same below. Many servers ignore this and do not + # send a Content-Range header with HTTP 416 + self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}" + self.set_status(HTTPRequestRangeNotSatisfiable.status_code) + return await super().prepare(request) + + # If a range request has been made, convert start, end slice + # notation into file pointer offset and count + if start is not None or end is not None: + if start < 0 and end is None: # return tail of file + start += file_size + if start < 0: + # if Range:bytes=-1000 in request header but file size + # is only 200, there would be trouble without this + start = 0 + count = file_size - start + else: + # rfc7233:If the last-byte-pos value is + # absent, or if the value is greater than or equal to + # the current length of the representation data, + # the byte range is interpreted as the remainder + # of the representation (i.e., the server replaces the + # value of last-byte-pos with a value that is one less than + # the current length of the selected representation). + count = ( + min(end if end is not None else file_size, file_size) - start + ) + + if start >= file_size: + # HTTP 416 should be returned in this case. + # + # According to https://tools.ietf.org/html/rfc7233: + # If a valid byte-range-set includes at least one + # byte-range-spec with a first-byte-pos that is less than + # the current length of the representation, or at least one + # suffix-byte-range-spec with a non-zero suffix-length, + # then the byte-range-set is satisfiable. Otherwise, the + # byte-range-set is unsatisfiable. + self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}" + self.set_status(HTTPRequestRangeNotSatisfiable.status_code) + return await super().prepare(request) + + status = HTTPPartialContent.status_code + # Even though you are sending the whole file, you should still + # return a HTTP 206 for a Range request. + self.set_status(status) + + # If the Content-Type header is not already set, guess it based on the + # extension of the request path. The encoding returned by guess_type + # can be ignored since the map was cleared above. + if hdrs.CONTENT_TYPE not in self.headers: + self.content_type = ( + CONTENT_TYPES.guess_type(self._path)[0] or FALLBACK_CONTENT_TYPE + ) + + if file_encoding: + self.headers[hdrs.CONTENT_ENCODING] = file_encoding + self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING + # Disable compression if we are already sending + # a compressed file since we don't want to double + # compress. + self._compression = False + + self.etag = etag_value # type: ignore[assignment] + self.last_modified = st.st_mtime # type: ignore[assignment] + self.content_length = count + + self.headers[hdrs.ACCEPT_RANGES] = "bytes" + + real_start = cast(int, start) + + if status == HTTPPartialContent.status_code: + self.headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format( + real_start, real_start + count - 1, file_size + ) + + # If we are sending 0 bytes calling sendfile() will throw a ValueError + if count == 0 or must_be_empty_body(request.method, self.status): + return await super().prepare(request) + + try: + fobj = await loop.run_in_executor(None, file_path.open, "rb") + except PermissionError: + self.set_status(HTTPForbidden.status_code) + return await super().prepare(request) + + if start: # be aware that start could be None or int=0 here. + offset = start + else: + offset = 0 + + try: + return await self._sendfile(request, fobj, offset, count) + finally: + await asyncio.shield(loop.run_in_executor(None, fobj.close)) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_log.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_log.py new file mode 100644 index 000000000..633e9e3ae --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_log.py @@ -0,0 +1,213 @@ +import datetime +import functools +import logging +import os +import re +import time as time_mod +from collections import namedtuple +from typing import Any, Callable, Dict, Iterable, List, Tuple # noqa + +from .abc import AbstractAccessLogger +from .web_request import BaseRequest +from .web_response import StreamResponse + +KeyMethod = namedtuple("KeyMethod", "key method") + + +class AccessLogger(AbstractAccessLogger): + """Helper object to log access. + + Usage: + log = logging.getLogger("spam") + log_format = "%a %{User-Agent}i" + access_logger = AccessLogger(log, log_format) + access_logger.log(request, response, time) + + Format: + %% The percent sign + %a Remote IP-address (IP-address of proxy if using reverse proxy) + %t Time when the request was started to process + %P The process ID of the child that serviced the request + %r First line of request + %s Response status code + %b Size of response in bytes, including HTTP headers + %T Time taken to serve the request, in seconds + %Tf Time taken to serve the request, in seconds with floating fraction + in .06f format + %D Time taken to serve the request, in microseconds + %{FOO}i request.headers['FOO'] + %{FOO}o response.headers['FOO'] + %{FOO}e os.environ['FOO'] + + """ + + LOG_FORMAT_MAP = { + "a": "remote_address", + "t": "request_start_time", + "P": "process_id", + "r": "first_request_line", + "s": "response_status", + "b": "response_size", + "T": "request_time", + "Tf": "request_time_frac", + "D": "request_time_micro", + "i": "request_header", + "o": "response_header", + } + + LOG_FORMAT = '%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"' + FORMAT_RE = re.compile(r"%(\{([A-Za-z0-9\-_]+)\}([ioe])|[atPrsbOD]|Tf?)") + CLEANUP_RE = re.compile(r"(%[^s])") + _FORMAT_CACHE: Dict[str, Tuple[str, List[KeyMethod]]] = {} + + def __init__(self, logger: logging.Logger, log_format: str = LOG_FORMAT) -> None: + """Initialise the logger. + + logger is a logger object to be used for logging. + log_format is a string with apache compatible log format description. + + """ + super().__init__(logger, log_format=log_format) + + _compiled_format = AccessLogger._FORMAT_CACHE.get(log_format) + if not _compiled_format: + _compiled_format = self.compile_format(log_format) + AccessLogger._FORMAT_CACHE[log_format] = _compiled_format + + self._log_format, self._methods = _compiled_format + + def compile_format(self, log_format: str) -> Tuple[str, List[KeyMethod]]: + """Translate log_format into form usable by modulo formatting + + All known atoms will be replaced with %s + Also methods for formatting of those atoms will be added to + _methods in appropriate order + + For example we have log_format = "%a %t" + This format will be translated to "%s %s" + Also contents of _methods will be + [self._format_a, self._format_t] + These method will be called and results will be passed + to translated string format. + + Each _format_* method receive 'args' which is list of arguments + given to self.log + + Exceptions are _format_e, _format_i and _format_o methods which + also receive key name (by functools.partial) + + """ + # list of (key, method) tuples, we don't use an OrderedDict as users + # can repeat the same key more than once + methods = list() + + for atom in self.FORMAT_RE.findall(log_format): + if atom[1] == "": + format_key1 = self.LOG_FORMAT_MAP[atom[0]] + m = getattr(AccessLogger, "_format_%s" % atom[0]) + key_method = KeyMethod(format_key1, m) + else: + format_key2 = (self.LOG_FORMAT_MAP[atom[2]], atom[1]) + m = getattr(AccessLogger, "_format_%s" % atom[2]) + key_method = KeyMethod(format_key2, functools.partial(m, atom[1])) + + methods.append(key_method) + + log_format = self.FORMAT_RE.sub(r"%s", log_format) + log_format = self.CLEANUP_RE.sub(r"%\1", log_format) + return log_format, methods + + @staticmethod + def _format_i( + key: str, request: BaseRequest, response: StreamResponse, time: float + ) -> str: + if request is None: + return "(no headers)" + + # suboptimal, make istr(key) once + return request.headers.get(key, "-") + + @staticmethod + def _format_o( + key: str, request: BaseRequest, response: StreamResponse, time: float + ) -> str: + # suboptimal, make istr(key) once + return response.headers.get(key, "-") + + @staticmethod + def _format_a(request: BaseRequest, response: StreamResponse, time: float) -> str: + if request is None: + return "-" + ip = request.remote + return ip if ip is not None else "-" + + @staticmethod + def _format_t(request: BaseRequest, response: StreamResponse, time: float) -> str: + tz = datetime.timezone(datetime.timedelta(seconds=-time_mod.timezone)) + now = datetime.datetime.now(tz) + start_time = now - datetime.timedelta(seconds=time) + return start_time.strftime("[%d/%b/%Y:%H:%M:%S %z]") + + @staticmethod + def _format_P(request: BaseRequest, response: StreamResponse, time: float) -> str: + return "<%s>" % os.getpid() + + @staticmethod + def _format_r(request: BaseRequest, response: StreamResponse, time: float) -> str: + if request is None: + return "-" + return "{} {} HTTP/{}.{}".format( + request.method, + request.path_qs, + request.version.major, + request.version.minor, + ) + + @staticmethod + def _format_s(request: BaseRequest, response: StreamResponse, time: float) -> int: + return response.status + + @staticmethod + def _format_b(request: BaseRequest, response: StreamResponse, time: float) -> int: + return response.body_length + + @staticmethod + def _format_T(request: BaseRequest, response: StreamResponse, time: float) -> str: + return str(round(time)) + + @staticmethod + def _format_Tf(request: BaseRequest, response: StreamResponse, time: float) -> str: + return "%06f" % time + + @staticmethod + def _format_D(request: BaseRequest, response: StreamResponse, time: float) -> str: + return str(round(time * 1000000)) + + def _format_line( + self, request: BaseRequest, response: StreamResponse, time: float + ) -> Iterable[Tuple[str, Callable[[BaseRequest, StreamResponse, float], str]]]: + return [(key, method(request, response, time)) for key, method in self._methods] + + def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None: + if not self.logger.isEnabledFor(logging.INFO): + # Avoid formatting the log line if it will not be emitted. + return + try: + fmt_info = self._format_line(request, response, time) + + values = list() + extra = dict() + for key, value in fmt_info: + values.append(value) + + if key.__class__ is str: + extra[key] = value + else: + k1, k2 = key # type: ignore[misc] + dct = extra.get(k1, {}) # type: ignore[var-annotated,has-type] + dct[k2] = value # type: ignore[index,has-type] + extra[k1] = dct # type: ignore[has-type,assignment] + + self.logger.info(self._log_format % tuple(values), extra=extra) + except Exception: + self.logger.exception("Error in logging") diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_middlewares.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_middlewares.py new file mode 100644 index 000000000..5da1533c0 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_middlewares.py @@ -0,0 +1,116 @@ +import re +from typing import TYPE_CHECKING, Tuple, Type, TypeVar + +from .typedefs import Handler, Middleware +from .web_exceptions import HTTPMove, HTTPPermanentRedirect +from .web_request import Request +from .web_response import StreamResponse +from .web_urldispatcher import SystemRoute + +__all__ = ( + "middleware", + "normalize_path_middleware", +) + +if TYPE_CHECKING: + from .web_app import Application + +_Func = TypeVar("_Func") + + +async def _check_request_resolves(request: Request, path: str) -> Tuple[bool, Request]: + alt_request = request.clone(rel_url=path) + + match_info = await request.app.router.resolve(alt_request) + alt_request._match_info = match_info + + if match_info.http_exception is None: + return True, alt_request + + return False, request + + +def middleware(f: _Func) -> _Func: + f.__middleware_version__ = 1 # type: ignore[attr-defined] + return f + + +def normalize_path_middleware( + *, + append_slash: bool = True, + remove_slash: bool = False, + merge_slashes: bool = True, + redirect_class: Type[HTTPMove] = HTTPPermanentRedirect, +) -> Middleware: + """Factory for producing a middleware that normalizes the path of a request. + + Normalizing means: + - Add or remove a trailing slash to the path. + - Double slashes are replaced by one. + + The middleware returns as soon as it finds a path that resolves + correctly. The order if both merge and append/remove are enabled is + 1) merge slashes + 2) append/remove slash + 3) both merge slashes and append/remove slash. + If the path resolves with at least one of those conditions, it will + redirect to the new path. + + Only one of `append_slash` and `remove_slash` can be enabled. If both + are `True` the factory will raise an assertion error + + If `append_slash` is `True` the middleware will append a slash when + needed. If a resource is defined with trailing slash and the request + comes without it, it will append it automatically. + + If `remove_slash` is `True`, `append_slash` must be `False`. When enabled + the middleware will remove trailing slashes and redirect if the resource + is defined + + If merge_slashes is True, merge multiple consecutive slashes in the + path into one. + """ + correct_configuration = not (append_slash and remove_slash) + assert correct_configuration, "Cannot both remove and append slash" + + @middleware + async def impl(request: Request, handler: Handler) -> StreamResponse: + if isinstance(request.match_info.route, SystemRoute): + paths_to_check = [] + if "?" in request.raw_path: + path, query = request.raw_path.split("?", 1) + query = "?" + query + else: + query = "" + path = request.raw_path + + if merge_slashes: + paths_to_check.append(re.sub("//+", "/", path)) + if append_slash and not request.path.endswith("/"): + paths_to_check.append(path + "/") + if remove_slash and request.path.endswith("/"): + paths_to_check.append(path[:-1]) + if merge_slashes and append_slash: + paths_to_check.append(re.sub("//+", "/", path + "/")) + if merge_slashes and remove_slash: + merged_slashes = re.sub("//+", "/", path) + paths_to_check.append(merged_slashes[:-1]) + + for path in paths_to_check: + path = re.sub("^//+", "/", path) # SECURITY: GHSA-v6wp-4m6f-gcjg + resolves, request = await _check_request_resolves(request, path) + if resolves: + raise redirect_class(request.raw_path + query) + + return await handler(request) + + return impl + + +def _fix_request_current_app(app: "Application") -> Middleware: + @middleware + async def impl(request: Request, handler: Handler) -> StreamResponse: + with request.match_info.set_current_app(app): + return await handler(request) + + return impl diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_protocol.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_protocol.py new file mode 100644 index 000000000..9ba05a08e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_protocol.py @@ -0,0 +1,708 @@ +import asyncio +import asyncio.streams +import traceback +import warnings +from collections import deque +from contextlib import suppress +from html import escape as html_escape +from http import HTTPStatus +from logging import Logger +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Deque, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +import attr +import yarl + +from .abc import AbstractAccessLogger, AbstractStreamWriter +from .base_protocol import BaseProtocol +from .helpers import ceil_timeout, set_exception +from .http import ( + HttpProcessingError, + HttpRequestParser, + HttpVersion10, + RawRequestMessage, + StreamWriter, +) +from .log import access_logger, server_logger +from .streams import EMPTY_PAYLOAD, StreamReader +from .tcp_helpers import tcp_keepalive +from .web_exceptions import HTTPException +from .web_log import AccessLogger +from .web_request import BaseRequest +from .web_response import Response, StreamResponse + +__all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError") + +if TYPE_CHECKING: + from .web_server import Server + + +_RequestFactory = Callable[ + [ + RawRequestMessage, + StreamReader, + "RequestHandler", + AbstractStreamWriter, + "asyncio.Task[None]", + ], + BaseRequest, +] + +_RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]] + +ERROR = RawRequestMessage( + "UNKNOWN", + "/", + HttpVersion10, + {}, # type: ignore[arg-type] + {}, # type: ignore[arg-type] + True, + None, + False, + False, + yarl.URL("/"), +) + + +class RequestPayloadError(Exception): + """Payload parsing error.""" + + +class PayloadAccessError(Exception): + """Payload was accessed after response was sent.""" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class _ErrInfo: + status: int + exc: BaseException + message: str + + +_MsgType = Tuple[Union[RawRequestMessage, _ErrInfo], StreamReader] + + +class RequestHandler(BaseProtocol): + """HTTP protocol implementation. + + RequestHandler handles incoming HTTP request. It reads request line, + request headers and request payload and calls handle_request() method. + By default it always returns with 404 response. + + RequestHandler handles errors in incoming request, like bad + status line, bad headers or incomplete payload. If any error occurs, + connection gets closed. + + keepalive_timeout -- number of seconds before closing + keep-alive connection + + tcp_keepalive -- TCP keep-alive is on, default is on + + debug -- enable debug mode + + logger -- custom logger object + + access_log_class -- custom class for access_logger + + access_log -- custom logging object + + access_log_format -- access log format string + + loop -- Optional event loop + + max_line_size -- Optional maximum header line size + + max_field_size -- Optional maximum header field size + + max_headers -- Optional maximum header size + + timeout_ceil_threshold -- Optional value to specify + threshold to ceil() timeout + values + + """ + + KEEPALIVE_RESCHEDULE_DELAY = 1 + + __slots__ = ( + "_request_count", + "_keepalive", + "_manager", + "_request_handler", + "_request_factory", + "_tcp_keepalive", + "_keepalive_time", + "_keepalive_handle", + "_keepalive_timeout", + "_lingering_time", + "_messages", + "_message_tail", + "_handler_waiter", + "_waiter", + "_task_handler", + "_upgrade", + "_payload_parser", + "_request_parser", + "_reading_paused", + "logger", + "debug", + "access_log", + "access_logger", + "_close", + "_force_close", + "_current_request", + "_timeout_ceil_threshold", + ) + + def __init__( + self, + manager: "Server", + *, + loop: asyncio.AbstractEventLoop, + keepalive_timeout: float = 75.0, # NGINX default is 75 secs + tcp_keepalive: bool = True, + logger: Logger = server_logger, + access_log_class: Type[AbstractAccessLogger] = AccessLogger, + access_log: Logger = access_logger, + access_log_format: str = AccessLogger.LOG_FORMAT, + debug: bool = False, + max_line_size: int = 8190, + max_headers: int = 32768, + max_field_size: int = 8190, + lingering_time: float = 10.0, + read_bufsize: int = 2**16, + auto_decompress: bool = True, + timeout_ceil_threshold: float = 5, + ): + super().__init__(loop) + + self._request_count = 0 + self._keepalive = False + self._current_request: Optional[BaseRequest] = None + self._manager: Optional[Server] = manager + self._request_handler: Optional[_RequestHandler] = manager.request_handler + self._request_factory: Optional[_RequestFactory] = manager.request_factory + + self._tcp_keepalive = tcp_keepalive + # placeholder to be replaced on keepalive timeout setup + self._keepalive_time = 0.0 + self._keepalive_handle: Optional[asyncio.Handle] = None + self._keepalive_timeout = keepalive_timeout + self._lingering_time = float(lingering_time) + + self._messages: Deque[_MsgType] = deque() + self._message_tail = b"" + + self._waiter: Optional[asyncio.Future[None]] = None + self._handler_waiter: Optional[asyncio.Future[None]] = None + self._task_handler: Optional[asyncio.Task[None]] = None + + self._upgrade = False + self._payload_parser: Any = None + self._request_parser: Optional[HttpRequestParser] = HttpRequestParser( + self, + loop, + read_bufsize, + max_line_size=max_line_size, + max_field_size=max_field_size, + max_headers=max_headers, + payload_exception=RequestPayloadError, + auto_decompress=auto_decompress, + ) + + self._timeout_ceil_threshold: float = 5 + try: + self._timeout_ceil_threshold = float(timeout_ceil_threshold) + except (TypeError, ValueError): + pass + + self.logger = logger + self.debug = debug + self.access_log = access_log + if access_log: + self.access_logger: Optional[AbstractAccessLogger] = access_log_class( + access_log, access_log_format + ) + else: + self.access_logger = None + + self._close = False + self._force_close = False + + def __repr__(self) -> str: + return "<{} {}>".format( + self.__class__.__name__, + "connected" if self.transport is not None else "disconnected", + ) + + @property + def keepalive_timeout(self) -> float: + return self._keepalive_timeout + + async def shutdown(self, timeout: Optional[float] = 15.0) -> None: + """Do worker process exit preparations. + + We need to clean up everything and stop accepting requests. + It is especially important for keep-alive connections. + """ + self._force_close = True + + if self._keepalive_handle is not None: + self._keepalive_handle.cancel() + + if self._waiter: + self._waiter.cancel() + + # Wait for graceful handler completion + if self._handler_waiter is not None: + with suppress(asyncio.CancelledError, asyncio.TimeoutError): + async with ceil_timeout(timeout): + await self._handler_waiter + # Then cancel handler and wait + with suppress(asyncio.CancelledError, asyncio.TimeoutError): + async with ceil_timeout(timeout): + if self._current_request is not None: + self._current_request._cancel(asyncio.CancelledError()) + + if self._task_handler is not None and not self._task_handler.done(): + await self._task_handler + + # force-close non-idle handler + if self._task_handler is not None: + self._task_handler.cancel() + + if self.transport is not None: + self.transport.close() + self.transport = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + super().connection_made(transport) + + real_transport = cast(asyncio.Transport, transport) + if self._tcp_keepalive: + tcp_keepalive(real_transport) + + self._task_handler = self._loop.create_task(self.start()) + assert self._manager is not None + self._manager.connection_made(self, real_transport) + + def connection_lost(self, exc: Optional[BaseException]) -> None: + if self._manager is None: + return + self._manager.connection_lost(self, exc) + + super().connection_lost(exc) + + # Grab value before setting _manager to None. + handler_cancellation = self._manager.handler_cancellation + + self._manager = None + self._force_close = True + self._request_factory = None + self._request_handler = None + self._request_parser = None + + if self._keepalive_handle is not None: + self._keepalive_handle.cancel() + + if self._current_request is not None: + if exc is None: + exc = ConnectionResetError("Connection lost") + self._current_request._cancel(exc) + + if self._waiter is not None: + self._waiter.cancel() + + if handler_cancellation and self._task_handler is not None: + self._task_handler.cancel() + + self._task_handler = None + + if self._payload_parser is not None: + self._payload_parser.feed_eof() + self._payload_parser = None + + def set_parser(self, parser: Any) -> None: + # Actual type is WebReader + assert self._payload_parser is None + + self._payload_parser = parser + + if self._message_tail: + self._payload_parser.feed_data(self._message_tail) + self._message_tail = b"" + + def eof_received(self) -> None: + pass + + def data_received(self, data: bytes) -> None: + if self._force_close or self._close: + return + # parse http messages + messages: Sequence[_MsgType] + if self._payload_parser is None and not self._upgrade: + assert self._request_parser is not None + try: + messages, upgraded, tail = self._request_parser.feed_data(data) + except HttpProcessingError as exc: + messages = [ + (_ErrInfo(status=400, exc=exc, message=exc.message), EMPTY_PAYLOAD) + ] + upgraded = False + tail = b"" + + for msg, payload in messages or (): + self._request_count += 1 + self._messages.append((msg, payload)) + + waiter = self._waiter + if messages and waiter is not None and not waiter.done(): + # don't set result twice + waiter.set_result(None) + + self._upgrade = upgraded + if upgraded and tail: + self._message_tail = tail + + # no parser, just store + elif self._payload_parser is None and self._upgrade and data: + self._message_tail += data + + # feed payload + elif data: + eof, tail = self._payload_parser.feed_data(data) + if eof: + self.close() + + def keep_alive(self, val: bool) -> None: + """Set keep-alive connection mode. + + :param bool val: new state. + """ + self._keepalive = val + if self._keepalive_handle: + self._keepalive_handle.cancel() + self._keepalive_handle = None + + def close(self) -> None: + """Close connection. + + Stop accepting new pipelining messages and close + connection when handlers done processing messages. + """ + self._close = True + if self._waiter: + self._waiter.cancel() + + def force_close(self) -> None: + """Forcefully close connection.""" + self._force_close = True + if self._waiter: + self._waiter.cancel() + if self.transport is not None: + self.transport.close() + self.transport = None + + def log_access( + self, request: BaseRequest, response: StreamResponse, time: float + ) -> None: + if self.access_logger is not None: + self.access_logger.log(request, response, self._loop.time() - time) + + def log_debug(self, *args: Any, **kw: Any) -> None: + if self.debug: + self.logger.debug(*args, **kw) + + def log_exception(self, *args: Any, **kw: Any) -> None: + self.logger.exception(*args, **kw) + + def _process_keepalive(self) -> None: + if self._force_close or not self._keepalive: + return + + next = self._keepalive_time + self._keepalive_timeout + + # handler in idle state + if self._waiter: + if self._loop.time() > next: + self.force_close() + return + + # not all request handlers are done, + # reschedule itself to next second + self._keepalive_handle = self._loop.call_later( + self.KEEPALIVE_RESCHEDULE_DELAY, + self._process_keepalive, + ) + + async def _handle_request( + self, + request: BaseRequest, + start_time: float, + request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]], + ) -> Tuple[StreamResponse, bool]: + self._handler_waiter = self._loop.create_future() + try: + try: + self._current_request = request + resp = await request_handler(request) + finally: + self._current_request = None + except HTTPException as exc: + resp = exc + reset = await self.finish_response(request, resp, start_time) + except asyncio.CancelledError: + raise + except asyncio.TimeoutError as exc: + self.log_debug("Request handler timed out.", exc_info=exc) + resp = self.handle_error(request, 504) + reset = await self.finish_response(request, resp, start_time) + except Exception as exc: + resp = self.handle_error(request, 500, exc) + reset = await self.finish_response(request, resp, start_time) + else: + # Deprecation warning (See #2415) + if getattr(resp, "__http_exception__", False): + warnings.warn( + "returning HTTPException object is deprecated " + "(#2415) and will be removed, " + "please raise the exception instead", + DeprecationWarning, + ) + + reset = await self.finish_response(request, resp, start_time) + finally: + self._handler_waiter.set_result(None) + + return resp, reset + + async def start(self) -> None: + """Process incoming request. + + It reads request line, request headers and request payload, then + calls handle_request() method. Subclass has to override + handle_request(). start() handles various exceptions in request + or response handling. Connection is being closed always unless + keep_alive(True) specified. + """ + loop = self._loop + handler = self._task_handler + assert handler is not None + manager = self._manager + assert manager is not None + keepalive_timeout = self._keepalive_timeout + resp = None + assert self._request_factory is not None + assert self._request_handler is not None + + while not self._force_close: + if not self._messages: + try: + # wait for next request + self._waiter = loop.create_future() + await self._waiter + except asyncio.CancelledError: + break + finally: + self._waiter = None + + message, payload = self._messages.popleft() + + start = loop.time() + + manager.requests_count += 1 + writer = StreamWriter(self, loop) + if isinstance(message, _ErrInfo): + # make request_factory work + request_handler = self._make_error_handler(message) + message = ERROR + else: + request_handler = self._request_handler + + request = self._request_factory(message, payload, self, writer, handler) + try: + # a new task is used for copy context vars (#3406) + task = self._loop.create_task( + self._handle_request(request, start, request_handler) + ) + try: + resp, reset = await task + except (asyncio.CancelledError, ConnectionError): + self.log_debug("Ignored premature client disconnection") + break + + # Drop the processed task from asyncio.Task.all_tasks() early + del task + if reset: + self.log_debug("Ignored premature client disconnection 2") + break + + # notify server about keep-alive + self._keepalive = bool(resp.keep_alive) + + # check payload + if not payload.is_eof(): + lingering_time = self._lingering_time + if not self._force_close and lingering_time: + self.log_debug( + "Start lingering close timer for %s sec.", lingering_time + ) + + now = loop.time() + end_t = now + lingering_time + + with suppress(asyncio.TimeoutError, asyncio.CancelledError): + while not payload.is_eof() and now < end_t: + async with ceil_timeout(end_t - now): + # read and ignore + await payload.readany() + now = loop.time() + + # if payload still uncompleted + if not payload.is_eof() and not self._force_close: + self.log_debug("Uncompleted request.") + self.close() + + set_exception(payload, PayloadAccessError()) + + except asyncio.CancelledError: + self.log_debug("Ignored premature client disconnection ") + break + except RuntimeError as exc: + if self.debug: + self.log_exception("Unhandled runtime exception", exc_info=exc) + self.force_close() + except Exception as exc: + self.log_exception("Unhandled exception", exc_info=exc) + self.force_close() + finally: + if self.transport is None and resp is not None: + self.log_debug("Ignored premature client disconnection.") + elif not self._force_close: + if self._keepalive and not self._close: + # start keep-alive timer + if keepalive_timeout is not None: + now = self._loop.time() + self._keepalive_time = now + if self._keepalive_handle is None: + self._keepalive_handle = loop.call_at( + now + keepalive_timeout, self._process_keepalive + ) + else: + break + + # remove handler, close transport if no handlers left + if not self._force_close: + self._task_handler = None + if self.transport is not None: + self.transport.close() + + async def finish_response( + self, request: BaseRequest, resp: StreamResponse, start_time: float + ) -> bool: + """Prepare the response and write_eof, then log access. + + This has to + be called within the context of any exception so the access logger + can get exception information. Returns True if the client disconnects + prematurely. + """ + request._finish() + if self._request_parser is not None: + self._request_parser.set_upgraded(False) + self._upgrade = False + if self._message_tail: + self._request_parser.feed_data(self._message_tail) + self._message_tail = b"" + try: + prepare_meth = resp.prepare + except AttributeError: + if resp is None: + raise RuntimeError("Missing return " "statement on request handler") + else: + raise RuntimeError( + "Web-handler should return " + "a response instance, " + "got {!r}".format(resp) + ) + try: + await prepare_meth(request) + await resp.write_eof() + except ConnectionError: + self.log_access(request, resp, start_time) + return True + else: + self.log_access(request, resp, start_time) + return False + + def handle_error( + self, + request: BaseRequest, + status: int = 500, + exc: Optional[BaseException] = None, + message: Optional[str] = None, + ) -> StreamResponse: + """Handle errors. + + Returns HTTP response with specific status code. Logs additional + information. It always closes current connection. + """ + self.log_exception("Error handling request", exc_info=exc) + + # some data already got sent, connection is broken + if request.writer.output_size > 0: + raise ConnectionError( + "Response is sent already, cannot send another response " + "with the error message" + ) + + ct = "text/plain" + if status == HTTPStatus.INTERNAL_SERVER_ERROR: + title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR) + msg = HTTPStatus.INTERNAL_SERVER_ERROR.description + tb = None + if self.debug: + with suppress(Exception): + tb = traceback.format_exc() + + if "text/html" in request.headers.get("Accept", ""): + if tb: + tb = html_escape(tb) + msg = f"

Traceback:

\n
{tb}
" + message = ( + "" + "{title}" + "\n

{title}

" + "\n{msg}\n\n" + ).format(title=title, msg=msg) + ct = "text/html" + else: + if tb: + msg = tb + message = title + "\n\n" + msg + + resp = Response(status=status, text=message, content_type=ct) + resp.force_close() + + return resp + + def _make_error_handler( + self, err_info: _ErrInfo + ) -> Callable[[BaseRequest], Awaitable[StreamResponse]]: + async def handler(request: BaseRequest) -> StreamResponse: + return self.handle_error( + request, err_info.status, err_info.exc, err_info.message + ) + + return handler diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_request.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_request.py new file mode 100644 index 000000000..a485f0dce --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_request.py @@ -0,0 +1,914 @@ +import asyncio +import datetime +import io +import re +import socket +import string +import tempfile +import types +import warnings +from http.cookies import SimpleCookie +from types import MappingProxyType +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Final, + Iterator, + Mapping, + MutableMapping, + Optional, + Pattern, + Tuple, + Union, + cast, +) +from urllib.parse import parse_qsl + +import attr +from multidict import ( + CIMultiDict, + CIMultiDictProxy, + MultiDict, + MultiDictProxy, + MultiMapping, +) +from yarl import URL + +from . import hdrs +from .abc import AbstractStreamWriter +from .helpers import ( + _SENTINEL, + DEBUG, + ETAG_ANY, + LIST_QUOTED_ETAG_RE, + ChainMapProxy, + ETag, + HeadersMixin, + parse_http_date, + reify, + sentinel, + set_exception, +) +from .http_parser import RawRequestMessage +from .http_writer import HttpVersion +from .multipart import BodyPartReader, MultipartReader +from .streams import EmptyStreamReader, StreamReader +from .typedefs import ( + DEFAULT_JSON_DECODER, + JSONDecoder, + LooseHeaders, + RawHeaders, + StrOrURL, +) +from .web_exceptions import HTTPRequestEntityTooLarge +from .web_response import StreamResponse + +__all__ = ("BaseRequest", "FileField", "Request") + + +if TYPE_CHECKING: + from .web_app import Application + from .web_protocol import RequestHandler + from .web_urldispatcher import UrlMappingMatchInfo + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class FileField: + name: str + filename: str + file: io.BufferedReader + content_type: str + headers: "CIMultiDictProxy[str]" + + +_TCHAR: Final[str] = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-" +# '-' at the end to prevent interpretation as range in a char class + +_TOKEN: Final[str] = rf"[{_TCHAR}]+" + +_QDTEXT: Final[str] = r"[{}]".format( + r"".join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F))) +) +# qdtext includes 0x5C to escape 0x5D ('\]') +# qdtext excludes obs-text (because obsoleted, and encoding not specified) + +_QUOTED_PAIR: Final[str] = r"\\[\t !-~]" + +_QUOTED_STRING: Final[str] = r'"(?:{quoted_pair}|{qdtext})*"'.format( + qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR +) + +_FORWARDED_PAIR: Final[str] = ( + r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format( + token=_TOKEN, quoted_string=_QUOTED_STRING + ) +) + +_QUOTED_PAIR_REPLACE_RE: Final[Pattern[str]] = re.compile(r"\\([\t !-~])") +# same pattern as _QUOTED_PAIR but contains a capture group + +_FORWARDED_PAIR_RE: Final[Pattern[str]] = re.compile(_FORWARDED_PAIR) + +############################################################ +# HTTP Request +############################################################ + + +class BaseRequest(MutableMapping[str, Any], HeadersMixin): + + POST_METHODS = { + hdrs.METH_PATCH, + hdrs.METH_POST, + hdrs.METH_PUT, + hdrs.METH_TRACE, + hdrs.METH_DELETE, + } + + ATTRS = HeadersMixin.ATTRS | frozenset( + [ + "_message", + "_protocol", + "_payload_writer", + "_payload", + "_headers", + "_method", + "_version", + "_rel_url", + "_post", + "_read_bytes", + "_state", + "_cache", + "_task", + "_client_max_size", + "_loop", + "_transport_sslcontext", + "_transport_peername", + ] + ) + + def __init__( + self, + message: RawRequestMessage, + payload: StreamReader, + protocol: "RequestHandler", + payload_writer: AbstractStreamWriter, + task: "asyncio.Task[None]", + loop: asyncio.AbstractEventLoop, + *, + client_max_size: int = 1024**2, + state: Optional[Dict[str, Any]] = None, + scheme: Optional[str] = None, + host: Optional[str] = None, + remote: Optional[str] = None, + ) -> None: + if state is None: + state = {} + self._message = message + self._protocol = protocol + self._payload_writer = payload_writer + + self._payload = payload + self._headers = message.headers + self._method = message.method + self._version = message.version + self._cache: Dict[str, Any] = {} + url = message.url + if url.is_absolute(): + # absolute URL is given, + # override auto-calculating url, host, and scheme + # all other properties should be good + self._cache["url"] = url + self._cache["host"] = url.host + self._cache["scheme"] = url.scheme + self._rel_url = url.relative() + else: + self._rel_url = message.url + self._post: Optional[MultiDictProxy[Union[str, bytes, FileField]]] = None + self._read_bytes: Optional[bytes] = None + + self._state = state + self._task = task + self._client_max_size = client_max_size + self._loop = loop + + transport = self._protocol.transport + assert transport is not None + self._transport_sslcontext = transport.get_extra_info("sslcontext") + self._transport_peername = transport.get_extra_info("peername") + + if scheme is not None: + self._cache["scheme"] = scheme + if host is not None: + self._cache["host"] = host + if remote is not None: + self._cache["remote"] = remote + + def clone( + self, + *, + method: Union[str, _SENTINEL] = sentinel, + rel_url: Union[StrOrURL, _SENTINEL] = sentinel, + headers: Union[LooseHeaders, _SENTINEL] = sentinel, + scheme: Union[str, _SENTINEL] = sentinel, + host: Union[str, _SENTINEL] = sentinel, + remote: Union[str, _SENTINEL] = sentinel, + client_max_size: Union[int, _SENTINEL] = sentinel, + ) -> "BaseRequest": + """Clone itself with replacement some attributes. + + Creates and returns a new instance of Request object. If no parameters + are given, an exact copy is returned. If a parameter is not passed, it + will reuse the one from the current request object. + """ + if self._read_bytes: + raise RuntimeError("Cannot clone request " "after reading its content") + + dct: Dict[str, Any] = {} + if method is not sentinel: + dct["method"] = method + if rel_url is not sentinel: + new_url: URL = URL(rel_url) + dct["url"] = new_url + dct["path"] = str(new_url) + if headers is not sentinel: + # a copy semantic + dct["headers"] = CIMultiDictProxy(CIMultiDict(headers)) + dct["raw_headers"] = tuple( + (k.encode("utf-8"), v.encode("utf-8")) + for k, v in dct["headers"].items() + ) + + message = self._message._replace(**dct) + + kwargs = {} + if scheme is not sentinel: + kwargs["scheme"] = scheme + if host is not sentinel: + kwargs["host"] = host + if remote is not sentinel: + kwargs["remote"] = remote + if client_max_size is sentinel: + client_max_size = self._client_max_size + + return self.__class__( + message, + self._payload, + self._protocol, + self._payload_writer, + self._task, + self._loop, + client_max_size=client_max_size, + state=self._state.copy(), + **kwargs, + ) + + @property + def task(self) -> "asyncio.Task[None]": + return self._task + + @property + def protocol(self) -> "RequestHandler": + return self._protocol + + @property + def transport(self) -> Optional[asyncio.Transport]: + if self._protocol is None: + return None + return self._protocol.transport + + @property + def writer(self) -> AbstractStreamWriter: + return self._payload_writer + + @property + def client_max_size(self) -> int: + return self._client_max_size + + @reify + def message(self) -> RawRequestMessage: + warnings.warn("Request.message is deprecated", DeprecationWarning, stacklevel=3) + return self._message + + @reify + def rel_url(self) -> URL: + return self._rel_url + + @reify + def loop(self) -> asyncio.AbstractEventLoop: + warnings.warn( + "request.loop property is deprecated", DeprecationWarning, stacklevel=2 + ) + return self._loop + + # MutableMapping API + + def __getitem__(self, key: str) -> Any: + return self._state[key] + + def __setitem__(self, key: str, value: Any) -> None: + self._state[key] = value + + def __delitem__(self, key: str) -> None: + del self._state[key] + + def __len__(self) -> int: + return len(self._state) + + def __iter__(self) -> Iterator[str]: + return iter(self._state) + + ######## + + @reify + def secure(self) -> bool: + """A bool indicating if the request is handled with SSL.""" + return self.scheme == "https" + + @reify + def forwarded(self) -> Tuple[Mapping[str, str], ...]: + """A tuple containing all parsed Forwarded header(s). + + Makes an effort to parse Forwarded headers as specified by RFC 7239: + + - It adds one (immutable) dictionary per Forwarded 'field-value', ie + per proxy. The element corresponds to the data in the Forwarded + field-value added by the first proxy encountered by the client. Each + subsequent item corresponds to those added by later proxies. + - It checks that every value has valid syntax in general as specified + in section 4: either a 'token' or a 'quoted-string'. + - It un-escapes found escape sequences. + - It does NOT validate 'by' and 'for' contents as specified in section + 6. + - It does NOT validate 'host' contents (Host ABNF). + - It does NOT validate 'proto' contents for valid URI scheme names. + + Returns a tuple containing one or more immutable dicts + """ + elems = [] + for field_value in self._message.headers.getall(hdrs.FORWARDED, ()): + length = len(field_value) + pos = 0 + need_separator = False + elem: Dict[str, str] = {} + elems.append(types.MappingProxyType(elem)) + while 0 <= pos < length: + match = _FORWARDED_PAIR_RE.match(field_value, pos) + if match is not None: # got a valid forwarded-pair + if need_separator: + # bad syntax here, skip to next comma + pos = field_value.find(",", pos) + else: + name, value, port = match.groups() + if value[0] == '"': + # quoted string: remove quotes and unescape + value = _QUOTED_PAIR_REPLACE_RE.sub(r"\1", value[1:-1]) + if port: + value += port + elem[name.lower()] = value + pos += len(match.group(0)) + need_separator = True + elif field_value[pos] == ",": # next forwarded-element + need_separator = False + elem = {} + elems.append(types.MappingProxyType(elem)) + pos += 1 + elif field_value[pos] == ";": # next forwarded-pair + need_separator = False + pos += 1 + elif field_value[pos] in " \t": + # Allow whitespace even between forwarded-pairs, though + # RFC 7239 doesn't. This simplifies code and is in line + # with Postel's law. + pos += 1 + else: + # bad syntax here, skip to next comma + pos = field_value.find(",", pos) + return tuple(elems) + + @reify + def scheme(self) -> str: + """A string representing the scheme of the request. + + Hostname is resolved in this order: + + - overridden value by .clone(scheme=new_scheme) call. + - type of connection to peer: HTTPS if socket is SSL, HTTP otherwise. + + 'http' or 'https'. + """ + if self._transport_sslcontext: + return "https" + else: + return "http" + + @reify + def method(self) -> str: + """Read only property for getting HTTP method. + + The value is upper-cased str like 'GET', 'POST', 'PUT' etc. + """ + return self._method + + @reify + def version(self) -> HttpVersion: + """Read only property for getting HTTP version of request. + + Returns aiohttp.protocol.HttpVersion instance. + """ + return self._version + + @reify + def host(self) -> str: + """Hostname of the request. + + Hostname is resolved in this order: + + - overridden value by .clone(host=new_host) call. + - HOST HTTP header + - socket.getfqdn() value + """ + host = self._message.headers.get(hdrs.HOST) + if host is not None: + return host + return socket.getfqdn() + + @reify + def remote(self) -> Optional[str]: + """Remote IP of client initiated HTTP request. + + The IP is resolved in this order: + + - overridden value by .clone(remote=new_remote) call. + - peername of opened socket + """ + if self._transport_peername is None: + return None + if isinstance(self._transport_peername, (list, tuple)): + return str(self._transport_peername[0]) + return str(self._transport_peername) + + @reify + def url(self) -> URL: + url = URL.build(scheme=self.scheme, host=self.host) + return url.join(self._rel_url) + + @reify + def path(self) -> str: + """The URL including *PATH INFO* without the host or scheme. + + E.g., ``/app/blog`` + """ + return self._rel_url.path + + @reify + def path_qs(self) -> str: + """The URL including PATH_INFO and the query string. + + E.g, /app/blog?id=10 + """ + return str(self._rel_url) + + @reify + def raw_path(self) -> str: + """The URL including raw *PATH INFO* without the host or scheme. + + Warning, the path is unquoted and may contains non valid URL characters + + E.g., ``/my%2Fpath%7Cwith%21some%25strange%24characters`` + """ + return self._message.path + + @reify + def query(self) -> "MultiMapping[str]": + """A multidict with all the variables in the query string.""" + return MultiDictProxy(self._rel_url.query) + + @reify + def query_string(self) -> str: + """The query string in the URL. + + E.g., id=10 + """ + return self._rel_url.query_string + + @reify + def headers(self) -> "MultiMapping[str]": + """A case-insensitive multidict proxy with all headers.""" + return self._headers + + @reify + def raw_headers(self) -> RawHeaders: + """A sequence of pairs for all headers.""" + return self._message.raw_headers + + @reify + def if_modified_since(self) -> Optional[datetime.datetime]: + """The value of If-Modified-Since HTTP header, or None. + + This header is represented as a `datetime` object. + """ + return parse_http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE)) + + @reify + def if_unmodified_since(self) -> Optional[datetime.datetime]: + """The value of If-Unmodified-Since HTTP header, or None. + + This header is represented as a `datetime` object. + """ + return parse_http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE)) + + @staticmethod + def _etag_values(etag_header: str) -> Iterator[ETag]: + """Extract `ETag` objects from raw header.""" + if etag_header == ETAG_ANY: + yield ETag( + is_weak=False, + value=ETAG_ANY, + ) + else: + for match in LIST_QUOTED_ETAG_RE.finditer(etag_header): + is_weak, value, garbage = match.group(2, 3, 4) + # Any symbol captured by 4th group means + # that the following sequence is invalid. + if garbage: + break + + yield ETag( + is_weak=bool(is_weak), + value=value, + ) + + @classmethod + def _if_match_or_none_impl( + cls, header_value: Optional[str] + ) -> Optional[Tuple[ETag, ...]]: + if not header_value: + return None + + return tuple(cls._etag_values(header_value)) + + @reify + def if_match(self) -> Optional[Tuple[ETag, ...]]: + """The value of If-Match HTTP header, or None. + + This header is represented as a `tuple` of `ETag` objects. + """ + return self._if_match_or_none_impl(self.headers.get(hdrs.IF_MATCH)) + + @reify + def if_none_match(self) -> Optional[Tuple[ETag, ...]]: + """The value of If-None-Match HTTP header, or None. + + This header is represented as a `tuple` of `ETag` objects. + """ + return self._if_match_or_none_impl(self.headers.get(hdrs.IF_NONE_MATCH)) + + @reify + def if_range(self) -> Optional[datetime.datetime]: + """The value of If-Range HTTP header, or None. + + This header is represented as a `datetime` object. + """ + return parse_http_date(self.headers.get(hdrs.IF_RANGE)) + + @reify + def keep_alive(self) -> bool: + """Is keepalive enabled by client?""" + return not self._message.should_close + + @reify + def cookies(self) -> Mapping[str, str]: + """Return request cookies. + + A read-only dictionary-like object. + """ + raw = self.headers.get(hdrs.COOKIE, "") + parsed = SimpleCookie(raw) + return MappingProxyType({key: val.value for key, val in parsed.items()}) + + @reify + def http_range(self) -> slice: + """The content of Range HTTP header. + + Return a slice instance. + + """ + rng = self._headers.get(hdrs.RANGE) + start, end = None, None + if rng is not None: + try: + pattern = r"^bytes=(\d*)-(\d*)$" + start, end = re.findall(pattern, rng)[0] + except IndexError: # pattern was not found in header + raise ValueError("range not in acceptable format") + + end = int(end) if end else None + start = int(start) if start else None + + if start is None and end is not None: + # end with no start is to return tail of content + start = -end + end = None + + if start is not None and end is not None: + # end is inclusive in range header, exclusive for slice + end += 1 + + if start >= end: + raise ValueError("start cannot be after end") + + if start is end is None: # No valid range supplied + raise ValueError("No start or end of range specified") + + return slice(start, end, 1) + + @reify + def content(self) -> StreamReader: + """Return raw payload stream.""" + return self._payload + + @property + def has_body(self) -> bool: + """Return True if request's HTTP BODY can be read, False otherwise.""" + warnings.warn( + "Deprecated, use .can_read_body #2005", DeprecationWarning, stacklevel=2 + ) + return not self._payload.at_eof() + + @property + def can_read_body(self) -> bool: + """Return True if request's HTTP BODY can be read, False otherwise.""" + return not self._payload.at_eof() + + @reify + def body_exists(self) -> bool: + """Return True if request has HTTP BODY, False otherwise.""" + return type(self._payload) is not EmptyStreamReader + + async def release(self) -> None: + """Release request. + + Eat unread part of HTTP BODY if present. + """ + while not self._payload.at_eof(): + await self._payload.readany() + + async def read(self) -> bytes: + """Read request body if present. + + Returns bytes object with full request content. + """ + if self._read_bytes is None: + body = bytearray() + while True: + chunk = await self._payload.readany() + body.extend(chunk) + if self._client_max_size: + body_size = len(body) + if body_size >= self._client_max_size: + raise HTTPRequestEntityTooLarge( + max_size=self._client_max_size, actual_size=body_size + ) + if not chunk: + break + self._read_bytes = bytes(body) + return self._read_bytes + + async def text(self) -> str: + """Return BODY as text using encoding from .charset.""" + bytes_body = await self.read() + encoding = self.charset or "utf-8" + return bytes_body.decode(encoding) + + async def json(self, *, loads: JSONDecoder = DEFAULT_JSON_DECODER) -> Any: + """Return BODY as JSON.""" + body = await self.text() + return loads(body) + + async def multipart(self) -> MultipartReader: + """Return async iterator to process BODY as multipart.""" + return MultipartReader(self._headers, self._payload) + + async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]": + """Return POST parameters.""" + if self._post is not None: + return self._post + if self._method not in self.POST_METHODS: + self._post = MultiDictProxy(MultiDict()) + return self._post + + content_type = self.content_type + if content_type not in ( + "", + "application/x-www-form-urlencoded", + "multipart/form-data", + ): + self._post = MultiDictProxy(MultiDict()) + return self._post + + out: MultiDict[Union[str, bytes, FileField]] = MultiDict() + + if content_type == "multipart/form-data": + multipart = await self.multipart() + max_size = self._client_max_size + + field = await multipart.next() + while field is not None: + size = 0 + field_ct = field.headers.get(hdrs.CONTENT_TYPE) + + if isinstance(field, BodyPartReader): + assert field.name is not None + + # Note that according to RFC 7578, the Content-Type header + # is optional, even for files, so we can't assume it's + # present. + # https://tools.ietf.org/html/rfc7578#section-4.4 + if field.filename: + # store file in temp file + tmp = await self._loop.run_in_executor( + None, tempfile.TemporaryFile + ) + chunk = await field.read_chunk(size=2**16) + while chunk: + chunk = field.decode(chunk) + await self._loop.run_in_executor(None, tmp.write, chunk) + size += len(chunk) + if 0 < max_size < size: + await self._loop.run_in_executor(None, tmp.close) + raise HTTPRequestEntityTooLarge( + max_size=max_size, actual_size=size + ) + chunk = await field.read_chunk(size=2**16) + await self._loop.run_in_executor(None, tmp.seek, 0) + + if field_ct is None: + field_ct = "application/octet-stream" + + ff = FileField( + field.name, + field.filename, + cast(io.BufferedReader, tmp), + field_ct, + field.headers, + ) + out.add(field.name, ff) + else: + # deal with ordinary data + value = await field.read(decode=True) + if field_ct is None or field_ct.startswith("text/"): + charset = field.get_charset(default="utf-8") + out.add(field.name, value.decode(charset)) + else: + out.add(field.name, value) + size += len(value) + if 0 < max_size < size: + raise HTTPRequestEntityTooLarge( + max_size=max_size, actual_size=size + ) + else: + raise ValueError( + "To decode nested multipart you need " "to use custom reader", + ) + + field = await multipart.next() + else: + data = await self.read() + if data: + charset = self.charset or "utf-8" + out.extend( + parse_qsl( + data.rstrip().decode(charset), + keep_blank_values=True, + encoding=charset, + ) + ) + + self._post = MultiDictProxy(out) + return self._post + + def get_extra_info(self, name: str, default: Any = None) -> Any: + """Extra info from protocol transport""" + protocol = self._protocol + if protocol is None: + return default + + transport = protocol.transport + if transport is None: + return default + + return transport.get_extra_info(name, default) + + def __repr__(self) -> str: + ascii_encodable_path = self.path.encode("ascii", "backslashreplace").decode( + "ascii" + ) + return "<{} {} {} >".format( + self.__class__.__name__, self._method, ascii_encodable_path + ) + + def __eq__(self, other: object) -> bool: + return id(self) == id(other) + + def __bool__(self) -> bool: + return True + + async def _prepare_hook(self, response: StreamResponse) -> None: + return + + def _cancel(self, exc: BaseException) -> None: + set_exception(self._payload, exc) + + def _finish(self) -> None: + if self._post is None or self.content_type != "multipart/form-data": + return + + # NOTE: Release file descriptors for the + # NOTE: `tempfile.Temporaryfile`-created `_io.BufferedRandom` + # NOTE: instances of files sent within multipart request body + # NOTE: via HTTP POST request. + for file_name, file_field_object in self._post.items(): + if isinstance(file_field_object, FileField): + file_field_object.file.close() + + +class Request(BaseRequest): + + ATTRS = BaseRequest.ATTRS | frozenset(["_match_info"]) + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + # matchdict, route_name, handler + # or information about traversal lookup + + # initialized after route resolving + self._match_info: Optional[UrlMappingMatchInfo] = None + + if DEBUG: + + def __setattr__(self, name: str, val: Any) -> None: + if name not in self.ATTRS: + warnings.warn( + "Setting custom {}.{} attribute " + "is discouraged".format(self.__class__.__name__, name), + DeprecationWarning, + stacklevel=2, + ) + super().__setattr__(name, val) + + def clone( + self, + *, + method: Union[str, _SENTINEL] = sentinel, + rel_url: Union[StrOrURL, _SENTINEL] = sentinel, + headers: Union[LooseHeaders, _SENTINEL] = sentinel, + scheme: Union[str, _SENTINEL] = sentinel, + host: Union[str, _SENTINEL] = sentinel, + remote: Union[str, _SENTINEL] = sentinel, + client_max_size: Union[int, _SENTINEL] = sentinel, + ) -> "Request": + ret = super().clone( + method=method, + rel_url=rel_url, + headers=headers, + scheme=scheme, + host=host, + remote=remote, + client_max_size=client_max_size, + ) + new_ret = cast(Request, ret) + new_ret._match_info = self._match_info + return new_ret + + @reify + def match_info(self) -> "UrlMappingMatchInfo": + """Result of route resolving.""" + match_info = self._match_info + assert match_info is not None + return match_info + + @property + def app(self) -> "Application": + """Application instance.""" + match_info = self._match_info + assert match_info is not None + return match_info.current_app + + @property + def config_dict(self) -> ChainMapProxy: + match_info = self._match_info + assert match_info is not None + lst = match_info.apps + app = self.app + idx = lst.index(app) + sublist = list(reversed(lst[: idx + 1])) + return ChainMapProxy(sublist) + + async def _prepare_hook(self, response: StreamResponse) -> None: + match_info = self._match_info + if match_info is None: + return + for app in match_info._apps: + await app.on_response_prepare.send(self, response) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_response.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_response.py new file mode 100644 index 000000000..78d3fe329 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_response.py @@ -0,0 +1,820 @@ +import asyncio +import collections.abc +import datetime +import enum +import json +import math +import time +import warnings +from concurrent.futures import Executor +from http import HTTPStatus +from http.cookies import SimpleCookie +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Iterator, + MutableMapping, + Optional, + Union, + cast, +) + +from multidict import CIMultiDict, istr + +from . import hdrs, payload +from .abc import AbstractStreamWriter +from .compression_utils import ZLibCompressor +from .helpers import ( + ETAG_ANY, + QUOTED_ETAG_RE, + ETag, + HeadersMixin, + must_be_empty_body, + parse_http_date, + rfc822_formatted_time, + sentinel, + should_remove_content_length, + validate_etag_value, +) +from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11 +from .payload import Payload +from .typedefs import JSONEncoder, LooseHeaders + +__all__ = ("ContentCoding", "StreamResponse", "Response", "json_response") + + +if TYPE_CHECKING: + from .web_request import BaseRequest + + BaseClass = MutableMapping[str, Any] +else: + BaseClass = collections.abc.MutableMapping + + +# TODO(py311): Convert to StrEnum for wider use +class ContentCoding(enum.Enum): + # The content codings that we have support for. + # + # Additional registered codings are listed at: + # https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding + deflate = "deflate" + gzip = "gzip" + identity = "identity" + + +############################################################ +# HTTP Response classes +############################################################ + + +class StreamResponse(BaseClass, HeadersMixin): + + _length_check = True + + def __init__( + self, + *, + status: int = 200, + reason: Optional[str] = None, + headers: Optional[LooseHeaders] = None, + ) -> None: + self._body = None + self._keep_alive: Optional[bool] = None + self._chunked = False + self._compression = False + self._compression_force: Optional[ContentCoding] = None + self._cookies = SimpleCookie() + + self._req: Optional[BaseRequest] = None + self._payload_writer: Optional[AbstractStreamWriter] = None + self._eof_sent = False + self._must_be_empty_body: Optional[bool] = None + self._body_length = 0 + self._state: Dict[str, Any] = {} + + if headers is not None: + self._headers: CIMultiDict[str] = CIMultiDict(headers) + else: + self._headers = CIMultiDict() + + self.set_status(status, reason) + + @property + def prepared(self) -> bool: + return self._payload_writer is not None + + @property + def task(self) -> "Optional[asyncio.Task[None]]": + if self._req: + return self._req.task + else: + return None + + @property + def status(self) -> int: + return self._status + + @property + def chunked(self) -> bool: + return self._chunked + + @property + def compression(self) -> bool: + return self._compression + + @property + def reason(self) -> str: + return self._reason + + def set_status( + self, + status: int, + reason: Optional[str] = None, + ) -> None: + assert not self.prepared, ( + "Cannot change the response status code after " "the headers have been sent" + ) + self._status = int(status) + if reason is None: + try: + reason = HTTPStatus(self._status).phrase + except ValueError: + reason = "" + self._reason = reason + + @property + def keep_alive(self) -> Optional[bool]: + return self._keep_alive + + def force_close(self) -> None: + self._keep_alive = False + + @property + def body_length(self) -> int: + return self._body_length + + @property + def output_length(self) -> int: + warnings.warn("output_length is deprecated", DeprecationWarning) + assert self._payload_writer + return self._payload_writer.buffer_size + + def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None: + """Enables automatic chunked transfer encoding.""" + self._chunked = True + + if hdrs.CONTENT_LENGTH in self._headers: + raise RuntimeError( + "You can't enable chunked encoding when " "a content length is set" + ) + if chunk_size is not None: + warnings.warn("Chunk size is deprecated #1615", DeprecationWarning) + + def enable_compression( + self, force: Optional[Union[bool, ContentCoding]] = None + ) -> None: + """Enables response compression encoding.""" + # Backwards compatibility for when force was a bool <0.17. + if isinstance(force, bool): + force = ContentCoding.deflate if force else ContentCoding.identity + warnings.warn( + "Using boolean for force is deprecated #3318", DeprecationWarning + ) + elif force is not None: + assert isinstance(force, ContentCoding), ( + "force should one of " "None, bool or " "ContentEncoding" + ) + + self._compression = True + self._compression_force = force + + @property + def headers(self) -> "CIMultiDict[str]": + return self._headers + + @property + def cookies(self) -> SimpleCookie: + return self._cookies + + def set_cookie( + self, + name: str, + value: str, + *, + expires: Optional[str] = None, + domain: Optional[str] = None, + max_age: Optional[Union[int, str]] = None, + path: str = "/", + secure: Optional[bool] = None, + httponly: Optional[bool] = None, + version: Optional[str] = None, + samesite: Optional[str] = None, + ) -> None: + """Set or update response cookie. + + Sets new cookie or updates existent with new value. + Also updates only those params which are not None. + """ + old = self._cookies.get(name) + if old is not None and old.coded_value == "": + # deleted cookie + self._cookies.pop(name, None) + + self._cookies[name] = value + c = self._cookies[name] + + if expires is not None: + c["expires"] = expires + elif c.get("expires") == "Thu, 01 Jan 1970 00:00:00 GMT": + del c["expires"] + + if domain is not None: + c["domain"] = domain + + if max_age is not None: + c["max-age"] = str(max_age) + elif "max-age" in c: + del c["max-age"] + + c["path"] = path + + if secure is not None: + c["secure"] = secure + if httponly is not None: + c["httponly"] = httponly + if version is not None: + c["version"] = version + if samesite is not None: + c["samesite"] = samesite + + def del_cookie( + self, name: str, *, domain: Optional[str] = None, path: str = "/" + ) -> None: + """Delete cookie. + + Creates new empty expired cookie. + """ + # TODO: do we need domain/path here? + self._cookies.pop(name, None) + self.set_cookie( + name, + "", + max_age=0, + expires="Thu, 01 Jan 1970 00:00:00 GMT", + domain=domain, + path=path, + ) + + @property + def content_length(self) -> Optional[int]: + # Just a placeholder for adding setter + return super().content_length + + @content_length.setter + def content_length(self, value: Optional[int]) -> None: + if value is not None: + value = int(value) + if self._chunked: + raise RuntimeError( + "You can't set content length when " "chunked encoding is enable" + ) + self._headers[hdrs.CONTENT_LENGTH] = str(value) + else: + self._headers.pop(hdrs.CONTENT_LENGTH, None) + + @property + def content_type(self) -> str: + # Just a placeholder for adding setter + return super().content_type + + @content_type.setter + def content_type(self, value: str) -> None: + self.content_type # read header values if needed + self._content_type = str(value) + self._generate_content_type_header() + + @property + def charset(self) -> Optional[str]: + # Just a placeholder for adding setter + return super().charset + + @charset.setter + def charset(self, value: Optional[str]) -> None: + ctype = self.content_type # read header values if needed + if ctype == "application/octet-stream": + raise RuntimeError( + "Setting charset for application/octet-stream " + "doesn't make sense, setup content_type first" + ) + assert self._content_dict is not None + if value is None: + self._content_dict.pop("charset", None) + else: + self._content_dict["charset"] = str(value).lower() + self._generate_content_type_header() + + @property + def last_modified(self) -> Optional[datetime.datetime]: + """The value of Last-Modified HTTP header, or None. + + This header is represented as a `datetime` object. + """ + return parse_http_date(self._headers.get(hdrs.LAST_MODIFIED)) + + @last_modified.setter + def last_modified( + self, value: Optional[Union[int, float, datetime.datetime, str]] + ) -> None: + if value is None: + self._headers.pop(hdrs.LAST_MODIFIED, None) + elif isinstance(value, (int, float)): + self._headers[hdrs.LAST_MODIFIED] = time.strftime( + "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value)) + ) + elif isinstance(value, datetime.datetime): + self._headers[hdrs.LAST_MODIFIED] = time.strftime( + "%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple() + ) + elif isinstance(value, str): + self._headers[hdrs.LAST_MODIFIED] = value + + @property + def etag(self) -> Optional[ETag]: + quoted_value = self._headers.get(hdrs.ETAG) + if not quoted_value: + return None + elif quoted_value == ETAG_ANY: + return ETag(value=ETAG_ANY) + match = QUOTED_ETAG_RE.fullmatch(quoted_value) + if not match: + return None + is_weak, value = match.group(1, 2) + return ETag( + is_weak=bool(is_weak), + value=value, + ) + + @etag.setter + def etag(self, value: Optional[Union[ETag, str]]) -> None: + if value is None: + self._headers.pop(hdrs.ETAG, None) + elif (isinstance(value, str) and value == ETAG_ANY) or ( + isinstance(value, ETag) and value.value == ETAG_ANY + ): + self._headers[hdrs.ETAG] = ETAG_ANY + elif isinstance(value, str): + validate_etag_value(value) + self._headers[hdrs.ETAG] = f'"{value}"' + elif isinstance(value, ETag) and isinstance(value.value, str): + validate_etag_value(value.value) + hdr_value = f'W/"{value.value}"' if value.is_weak else f'"{value.value}"' + self._headers[hdrs.ETAG] = hdr_value + else: + raise ValueError( + f"Unsupported etag type: {type(value)}. " + f"etag must be str, ETag or None" + ) + + def _generate_content_type_header( + self, CONTENT_TYPE: istr = hdrs.CONTENT_TYPE + ) -> None: + assert self._content_dict is not None + assert self._content_type is not None + params = "; ".join(f"{k}={v}" for k, v in self._content_dict.items()) + if params: + ctype = self._content_type + "; " + params + else: + ctype = self._content_type + self._headers[CONTENT_TYPE] = ctype + + async def _do_start_compression(self, coding: ContentCoding) -> None: + if coding != ContentCoding.identity: + assert self._payload_writer is not None + self._headers[hdrs.CONTENT_ENCODING] = coding.value + self._payload_writer.enable_compression(coding.value) + # Compressed payload may have different content length, + # remove the header + self._headers.popall(hdrs.CONTENT_LENGTH, None) + + async def _start_compression(self, request: "BaseRequest") -> None: + if self._compression_force: + await self._do_start_compression(self._compression_force) + else: + # Encoding comparisons should be case-insensitive + # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 + accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() + for coding in ContentCoding: + if coding.value in accept_encoding: + await self._do_start_compression(coding) + return + + async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: + if self._eof_sent: + return None + if self._payload_writer is not None: + return self._payload_writer + self._must_be_empty_body = must_be_empty_body(request.method, self.status) + return await self._start(request) + + async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: + self._req = request + writer = self._payload_writer = request._payload_writer + + await self._prepare_headers() + await request._prepare_hook(self) + await self._write_headers() + + return writer + + async def _prepare_headers(self) -> None: + request = self._req + assert request is not None + writer = self._payload_writer + assert writer is not None + keep_alive = self._keep_alive + if keep_alive is None: + keep_alive = request.keep_alive + self._keep_alive = keep_alive + + version = request.version + + headers = self._headers + for cookie in self._cookies.values(): + value = cookie.output(header="")[1:] + headers.add(hdrs.SET_COOKIE, value) + + if self._compression: + await self._start_compression(request) + + if self._chunked: + if version != HttpVersion11: + raise RuntimeError( + "Using chunked encoding is forbidden " + "for HTTP/{0.major}.{0.minor}".format(request.version) + ) + if not self._must_be_empty_body: + writer.enable_chunking() + headers[hdrs.TRANSFER_ENCODING] = "chunked" + if hdrs.CONTENT_LENGTH in headers: + del headers[hdrs.CONTENT_LENGTH] + elif self._length_check: + writer.length = self.content_length + if writer.length is None: + if version >= HttpVersion11: + if not self._must_be_empty_body: + writer.enable_chunking() + headers[hdrs.TRANSFER_ENCODING] = "chunked" + elif not self._must_be_empty_body: + keep_alive = False + + # HTTP 1.1: https://tools.ietf.org/html/rfc7230#section-3.3.2 + # HTTP 1.0: https://tools.ietf.org/html/rfc1945#section-10.4 + if self._must_be_empty_body: + if hdrs.CONTENT_LENGTH in headers and should_remove_content_length( + request.method, self.status + ): + del headers[hdrs.CONTENT_LENGTH] + # https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-10 + # https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-13 + if hdrs.TRANSFER_ENCODING in headers: + del headers[hdrs.TRANSFER_ENCODING] + else: + headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream") + headers.setdefault(hdrs.DATE, rfc822_formatted_time()) + headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE) + + # connection header + if hdrs.CONNECTION not in headers: + if keep_alive: + if version == HttpVersion10: + headers[hdrs.CONNECTION] = "keep-alive" + else: + if version == HttpVersion11: + headers[hdrs.CONNECTION] = "close" + + async def _write_headers(self) -> None: + request = self._req + assert request is not None + writer = self._payload_writer + assert writer is not None + # status line + version = request.version + status_line = "HTTP/{}.{} {} {}".format( + version[0], version[1], self._status, self._reason + ) + await writer.write_headers(status_line, self._headers) + + async def write(self, data: bytes) -> None: + assert isinstance( + data, (bytes, bytearray, memoryview) + ), "data argument must be byte-ish (%r)" % type(data) + + if self._eof_sent: + raise RuntimeError("Cannot call write() after write_eof()") + if self._payload_writer is None: + raise RuntimeError("Cannot call write() before prepare()") + + await self._payload_writer.write(data) + + async def drain(self) -> None: + assert not self._eof_sent, "EOF has already been sent" + assert self._payload_writer is not None, "Response has not been started" + warnings.warn( + "drain method is deprecated, use await resp.write()", + DeprecationWarning, + stacklevel=2, + ) + await self._payload_writer.drain() + + async def write_eof(self, data: bytes = b"") -> None: + assert isinstance( + data, (bytes, bytearray, memoryview) + ), "data argument must be byte-ish (%r)" % type(data) + + if self._eof_sent: + return + + assert self._payload_writer is not None, "Response has not been started" + + await self._payload_writer.write_eof(data) + self._eof_sent = True + self._req = None + self._body_length = self._payload_writer.output_size + self._payload_writer = None + + def __repr__(self) -> str: + if self._eof_sent: + info = "eof" + elif self.prepared: + assert self._req is not None + info = f"{self._req.method} {self._req.path} " + else: + info = "not prepared" + return f"<{self.__class__.__name__} {self.reason} {info}>" + + def __getitem__(self, key: str) -> Any: + return self._state[key] + + def __setitem__(self, key: str, value: Any) -> None: + self._state[key] = value + + def __delitem__(self, key: str) -> None: + del self._state[key] + + def __len__(self) -> int: + return len(self._state) + + def __iter__(self) -> Iterator[str]: + return iter(self._state) + + def __hash__(self) -> int: + return hash(id(self)) + + def __eq__(self, other: object) -> bool: + return self is other + + +class Response(StreamResponse): + def __init__( + self, + *, + body: Any = None, + status: int = 200, + reason: Optional[str] = None, + text: Optional[str] = None, + headers: Optional[LooseHeaders] = None, + content_type: Optional[str] = None, + charset: Optional[str] = None, + zlib_executor_size: Optional[int] = None, + zlib_executor: Optional[Executor] = None, + ) -> None: + if body is not None and text is not None: + raise ValueError("body and text are not allowed together") + + if headers is None: + real_headers: CIMultiDict[str] = CIMultiDict() + elif not isinstance(headers, CIMultiDict): + real_headers = CIMultiDict(headers) + else: + real_headers = headers # = cast('CIMultiDict[str]', headers) + + if content_type is not None and "charset" in content_type: + raise ValueError("charset must not be in content_type " "argument") + + if text is not None: + if hdrs.CONTENT_TYPE in real_headers: + if content_type or charset: + raise ValueError( + "passing both Content-Type header and " + "content_type or charset params " + "is forbidden" + ) + else: + # fast path for filling headers + if not isinstance(text, str): + raise TypeError("text argument must be str (%r)" % type(text)) + if content_type is None: + content_type = "text/plain" + if charset is None: + charset = "utf-8" + real_headers[hdrs.CONTENT_TYPE] = content_type + "; charset=" + charset + body = text.encode(charset) + text = None + else: + if hdrs.CONTENT_TYPE in real_headers: + if content_type is not None or charset is not None: + raise ValueError( + "passing both Content-Type header and " + "content_type or charset params " + "is forbidden" + ) + else: + if content_type is not None: + if charset is not None: + content_type += "; charset=" + charset + real_headers[hdrs.CONTENT_TYPE] = content_type + + super().__init__(status=status, reason=reason, headers=real_headers) + + if text is not None: + self.text = text + else: + self.body = body + + self._compressed_body: Optional[bytes] = None + self._zlib_executor_size = zlib_executor_size + self._zlib_executor = zlib_executor + + @property + def body(self) -> Optional[Union[bytes, Payload]]: + return self._body + + @body.setter + def body(self, body: bytes) -> None: + if body is None: + self._body: Optional[bytes] = None + self._body_payload: bool = False + elif isinstance(body, (bytes, bytearray)): + self._body = body + self._body_payload = False + else: + try: + self._body = body = payload.PAYLOAD_REGISTRY.get(body) + except payload.LookupError: + raise ValueError("Unsupported body type %r" % type(body)) + + self._body_payload = True + + headers = self._headers + + # set content-type + if hdrs.CONTENT_TYPE not in headers: + headers[hdrs.CONTENT_TYPE] = body.content_type + + # copy payload headers + if body.headers: + for key, value in body.headers.items(): + if key not in headers: + headers[key] = value + + self._compressed_body = None + + @property + def text(self) -> Optional[str]: + if self._body is None: + return None + return self._body.decode(self.charset or "utf-8") + + @text.setter + def text(self, text: str) -> None: + assert text is None or isinstance( + text, str + ), "text argument must be str (%r)" % type(text) + + if self.content_type == "application/octet-stream": + self.content_type = "text/plain" + if self.charset is None: + self.charset = "utf-8" + + self._body = text.encode(self.charset) + self._body_payload = False + self._compressed_body = None + + @property + def content_length(self) -> Optional[int]: + if self._chunked: + return None + + if hdrs.CONTENT_LENGTH in self._headers: + return super().content_length + + if self._compressed_body is not None: + # Return length of the compressed body + return len(self._compressed_body) + elif self._body_payload: + # A payload without content length, or a compressed payload + return None + elif self._body is not None: + return len(self._body) + else: + return 0 + + @content_length.setter + def content_length(self, value: Optional[int]) -> None: + raise RuntimeError("Content length is set automatically") + + async def write_eof(self, data: bytes = b"") -> None: + if self._eof_sent: + return + if self._compressed_body is None: + body: Optional[Union[bytes, Payload]] = self._body + else: + body = self._compressed_body + assert not data, f"data arg is not supported, got {data!r}" + assert self._req is not None + assert self._payload_writer is not None + if body is not None: + if self._must_be_empty_body: + await super().write_eof() + elif self._body_payload: + payload = cast(Payload, body) + await payload.write(self._payload_writer) + await super().write_eof() + else: + await super().write_eof(cast(bytes, body)) + else: + await super().write_eof() + + async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: + if should_remove_content_length(request.method, self.status): + if hdrs.CONTENT_LENGTH in self._headers: + del self._headers[hdrs.CONTENT_LENGTH] + elif not self._chunked and hdrs.CONTENT_LENGTH not in self._headers: + if self._body_payload: + size = cast(Payload, self._body).size + if size is not None: + self._headers[hdrs.CONTENT_LENGTH] = str(size) + else: + body_len = len(self._body) if self._body else "0" + # https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-7 + if body_len != "0" or ( + self.status != 304 and request.method.upper() != hdrs.METH_HEAD + ): + self._headers[hdrs.CONTENT_LENGTH] = str(body_len) + + return await super()._start(request) + + async def _do_start_compression(self, coding: ContentCoding) -> None: + if self._body_payload or self._chunked: + return await super()._do_start_compression(coding) + + if coding != ContentCoding.identity: + # Instead of using _payload_writer.enable_compression, + # compress the whole body + compressor = ZLibCompressor( + encoding=str(coding.value), + max_sync_chunk_size=self._zlib_executor_size, + executor=self._zlib_executor, + ) + assert self._body is not None + if self._zlib_executor_size is None and len(self._body) > 1024 * 1024: + warnings.warn( + "Synchronous compression of large response bodies " + f"({len(self._body)} bytes) might block the async event loop. " + "Consider providing a custom value to zlib_executor_size/" + "zlib_executor response properties or disabling compression on it." + ) + self._compressed_body = ( + await compressor.compress(self._body) + compressor.flush() + ) + assert self._compressed_body is not None + + self._headers[hdrs.CONTENT_ENCODING] = coding.value + self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body)) + + +def json_response( + data: Any = sentinel, + *, + text: Optional[str] = None, + body: Optional[bytes] = None, + status: int = 200, + reason: Optional[str] = None, + headers: Optional[LooseHeaders] = None, + content_type: str = "application/json", + dumps: JSONEncoder = json.dumps, +) -> Response: + if data is not sentinel: + if text or body: + raise ValueError("only one of data, text, or body should be specified") + else: + text = dumps(data) + return Response( + text=text, + body=body, + status=status, + reason=reason, + headers=headers, + content_type=content_type, + ) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_routedef.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_routedef.py new file mode 100644 index 000000000..93802141c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_routedef.py @@ -0,0 +1,214 @@ +import abc +import os # noqa +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Iterator, + List, + Optional, + Sequence, + Type, + Union, + overload, +) + +import attr + +from . import hdrs +from .abc import AbstractView +from .typedefs import Handler, PathLike + +if TYPE_CHECKING: + from .web_request import Request + from .web_response import StreamResponse + from .web_urldispatcher import AbstractRoute, UrlDispatcher +else: + Request = StreamResponse = UrlDispatcher = AbstractRoute = None + + +__all__ = ( + "AbstractRouteDef", + "RouteDef", + "StaticDef", + "RouteTableDef", + "head", + "options", + "get", + "post", + "patch", + "put", + "delete", + "route", + "view", + "static", +) + + +class AbstractRouteDef(abc.ABC): + @abc.abstractmethod + def register(self, router: UrlDispatcher) -> List[AbstractRoute]: + pass # pragma: no cover + + +_HandlerType = Union[Type[AbstractView], Handler] + + +@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True) +class RouteDef(AbstractRouteDef): + method: str + path: str + handler: _HandlerType + kwargs: Dict[str, Any] + + def __repr__(self) -> str: + info = [] + for name, value in sorted(self.kwargs.items()): + info.append(f", {name}={value!r}") + return " {handler.__name__!r}" "{info}>".format( + method=self.method, path=self.path, handler=self.handler, info="".join(info) + ) + + def register(self, router: UrlDispatcher) -> List[AbstractRoute]: + if self.method in hdrs.METH_ALL: + reg = getattr(router, "add_" + self.method.lower()) + return [reg(self.path, self.handler, **self.kwargs)] + else: + return [ + router.add_route(self.method, self.path, self.handler, **self.kwargs) + ] + + +@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True) +class StaticDef(AbstractRouteDef): + prefix: str + path: PathLike + kwargs: Dict[str, Any] + + def __repr__(self) -> str: + info = [] + for name, value in sorted(self.kwargs.items()): + info.append(f", {name}={value!r}") + return " {path}" "{info}>".format( + prefix=self.prefix, path=self.path, info="".join(info) + ) + + def register(self, router: UrlDispatcher) -> List[AbstractRoute]: + resource = router.add_static(self.prefix, self.path, **self.kwargs) + routes = resource.get_info().get("routes", {}) + return list(routes.values()) + + +def route(method: str, path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return RouteDef(method, path, handler, kwargs) + + +def head(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_HEAD, path, handler, **kwargs) + + +def options(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_OPTIONS, path, handler, **kwargs) + + +def get( + path: str, + handler: _HandlerType, + *, + name: Optional[str] = None, + allow_head: bool = True, + **kwargs: Any, +) -> RouteDef: + return route( + hdrs.METH_GET, path, handler, name=name, allow_head=allow_head, **kwargs + ) + + +def post(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_POST, path, handler, **kwargs) + + +def put(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_PUT, path, handler, **kwargs) + + +def patch(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_PATCH, path, handler, **kwargs) + + +def delete(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_DELETE, path, handler, **kwargs) + + +def view(path: str, handler: Type[AbstractView], **kwargs: Any) -> RouteDef: + return route(hdrs.METH_ANY, path, handler, **kwargs) + + +def static(prefix: str, path: PathLike, **kwargs: Any) -> StaticDef: + return StaticDef(prefix, path, kwargs) + + +_Deco = Callable[[_HandlerType], _HandlerType] + + +class RouteTableDef(Sequence[AbstractRouteDef]): + """Route definition table""" + + def __init__(self) -> None: + self._items: List[AbstractRouteDef] = [] + + def __repr__(self) -> str: + return f"" + + @overload + def __getitem__(self, index: int) -> AbstractRouteDef: ... + + @overload + def __getitem__(self, index: slice) -> List[AbstractRouteDef]: ... + + def __getitem__(self, index): # type: ignore[no-untyped-def] + return self._items[index] + + def __iter__(self) -> Iterator[AbstractRouteDef]: + return iter(self._items) + + def __len__(self) -> int: + return len(self._items) + + def __contains__(self, item: object) -> bool: + return item in self._items + + def route(self, method: str, path: str, **kwargs: Any) -> _Deco: + def inner(handler: _HandlerType) -> _HandlerType: + self._items.append(RouteDef(method, path, handler, kwargs)) + return handler + + return inner + + def head(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_HEAD, path, **kwargs) + + def get(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_GET, path, **kwargs) + + def post(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_POST, path, **kwargs) + + def put(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_PUT, path, **kwargs) + + def patch(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_PATCH, path, **kwargs) + + def delete(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_DELETE, path, **kwargs) + + def options(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_OPTIONS, path, **kwargs) + + def view(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_ANY, path, **kwargs) + + def static(self, prefix: str, path: PathLike, **kwargs: Any) -> None: + self._items.append(StaticDef(prefix, path, kwargs)) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_runner.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_runner.py new file mode 100644 index 000000000..2fe229c4e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_runner.py @@ -0,0 +1,397 @@ +import asyncio +import signal +import socket +import warnings +from abc import ABC, abstractmethod +from typing import Any, List, Optional, Set + +from yarl import URL + +from .typedefs import PathLike +from .web_app import Application +from .web_server import Server + +try: + from ssl import SSLContext +except ImportError: + SSLContext = object # type: ignore[misc,assignment] + + +__all__ = ( + "BaseSite", + "TCPSite", + "UnixSite", + "NamedPipeSite", + "SockSite", + "BaseRunner", + "AppRunner", + "ServerRunner", + "GracefulExit", +) + + +class GracefulExit(SystemExit): + code = 1 + + +def _raise_graceful_exit() -> None: + raise GracefulExit() + + +class BaseSite(ABC): + __slots__ = ("_runner", "_ssl_context", "_backlog", "_server") + + def __init__( + self, + runner: "BaseRunner", + *, + shutdown_timeout: float = 60.0, + ssl_context: Optional[SSLContext] = None, + backlog: int = 128, + ) -> None: + if runner.server is None: + raise RuntimeError("Call runner.setup() before making a site") + if shutdown_timeout != 60.0: + msg = "shutdown_timeout should be set on BaseRunner" + warnings.warn(msg, DeprecationWarning, stacklevel=2) + runner._shutdown_timeout = shutdown_timeout + self._runner = runner + self._ssl_context = ssl_context + self._backlog = backlog + self._server: Optional[asyncio.AbstractServer] = None + + @property + @abstractmethod + def name(self) -> str: + pass # pragma: no cover + + @abstractmethod + async def start(self) -> None: + self._runner._reg_site(self) + + async def stop(self) -> None: + self._runner._check_site(self) + if self._server is not None: # Maybe not started yet + self._server.close() + + self._runner._unreg_site(self) + + +class TCPSite(BaseSite): + __slots__ = ("_host", "_port", "_reuse_address", "_reuse_port") + + def __init__( + self, + runner: "BaseRunner", + host: Optional[str] = None, + port: Optional[int] = None, + *, + shutdown_timeout: float = 60.0, + ssl_context: Optional[SSLContext] = None, + backlog: int = 128, + reuse_address: Optional[bool] = None, + reuse_port: Optional[bool] = None, + ) -> None: + super().__init__( + runner, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + ) + self._host = host + if port is None: + port = 8443 if self._ssl_context else 8080 + self._port = port + self._reuse_address = reuse_address + self._reuse_port = reuse_port + + @property + def name(self) -> str: + scheme = "https" if self._ssl_context else "http" + host = "0.0.0.0" if self._host is None else self._host + return str(URL.build(scheme=scheme, host=host, port=self._port)) + + async def start(self) -> None: + await super().start() + loop = asyncio.get_event_loop() + server = self._runner.server + assert server is not None + self._server = await loop.create_server( + server, + self._host, + self._port, + ssl=self._ssl_context, + backlog=self._backlog, + reuse_address=self._reuse_address, + reuse_port=self._reuse_port, + ) + + +class UnixSite(BaseSite): + __slots__ = ("_path",) + + def __init__( + self, + runner: "BaseRunner", + path: PathLike, + *, + shutdown_timeout: float = 60.0, + ssl_context: Optional[SSLContext] = None, + backlog: int = 128, + ) -> None: + super().__init__( + runner, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + ) + self._path = path + + @property + def name(self) -> str: + scheme = "https" if self._ssl_context else "http" + return f"{scheme}://unix:{self._path}:" + + async def start(self) -> None: + await super().start() + loop = asyncio.get_event_loop() + server = self._runner.server + assert server is not None + self._server = await loop.create_unix_server( + server, + self._path, + ssl=self._ssl_context, + backlog=self._backlog, + ) + + +class NamedPipeSite(BaseSite): + __slots__ = ("_path",) + + def __init__( + self, runner: "BaseRunner", path: str, *, shutdown_timeout: float = 60.0 + ) -> None: + loop = asyncio.get_event_loop() + if not isinstance( + loop, asyncio.ProactorEventLoop # type: ignore[attr-defined] + ): + raise RuntimeError( + "Named Pipes only available in proactor" "loop under windows" + ) + super().__init__(runner, shutdown_timeout=shutdown_timeout) + self._path = path + + @property + def name(self) -> str: + return self._path + + async def start(self) -> None: + await super().start() + loop = asyncio.get_event_loop() + server = self._runner.server + assert server is not None + _server = await loop.start_serving_pipe( # type: ignore[attr-defined] + server, self._path + ) + self._server = _server[0] + + +class SockSite(BaseSite): + __slots__ = ("_sock", "_name") + + def __init__( + self, + runner: "BaseRunner", + sock: socket.socket, + *, + shutdown_timeout: float = 60.0, + ssl_context: Optional[SSLContext] = None, + backlog: int = 128, + ) -> None: + super().__init__( + runner, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + ) + self._sock = sock + scheme = "https" if self._ssl_context else "http" + if hasattr(socket, "AF_UNIX") and sock.family == socket.AF_UNIX: + name = f"{scheme}://unix:{sock.getsockname()}:" + else: + host, port = sock.getsockname()[:2] + name = str(URL.build(scheme=scheme, host=host, port=port)) + self._name = name + + @property + def name(self) -> str: + return self._name + + async def start(self) -> None: + await super().start() + loop = asyncio.get_event_loop() + server = self._runner.server + assert server is not None + self._server = await loop.create_server( + server, sock=self._sock, ssl=self._ssl_context, backlog=self._backlog + ) + + +class BaseRunner(ABC): + __slots__ = ("_handle_signals", "_kwargs", "_server", "_sites", "_shutdown_timeout") + + def __init__( + self, + *, + handle_signals: bool = False, + shutdown_timeout: float = 60.0, + **kwargs: Any, + ) -> None: + self._handle_signals = handle_signals + self._kwargs = kwargs + self._server: Optional[Server] = None + self._sites: List[BaseSite] = [] + self._shutdown_timeout = shutdown_timeout + + @property + def server(self) -> Optional[Server]: + return self._server + + @property + def addresses(self) -> List[Any]: + ret: List[Any] = [] + for site in self._sites: + server = site._server + if server is not None: + sockets = server.sockets # type: ignore[attr-defined] + if sockets is not None: + for sock in sockets: + ret.append(sock.getsockname()) + return ret + + @property + def sites(self) -> Set[BaseSite]: + return set(self._sites) + + async def setup(self) -> None: + loop = asyncio.get_event_loop() + + if self._handle_signals: + try: + loop.add_signal_handler(signal.SIGINT, _raise_graceful_exit) + loop.add_signal_handler(signal.SIGTERM, _raise_graceful_exit) + except NotImplementedError: # pragma: no cover + # add_signal_handler is not implemented on Windows + pass + + self._server = await self._make_server() + + @abstractmethod + async def shutdown(self) -> None: + """Call any shutdown hooks to help server close gracefully.""" + + async def cleanup(self) -> None: + # The loop over sites is intentional, an exception on gather() + # leaves self._sites in unpredictable state. + # The loop guaranties that a site is either deleted on success or + # still present on failure + for site in list(self._sites): + await site.stop() + + if self._server: # If setup succeeded + # Yield to event loop to ensure incoming requests prior to stopping the sites + # have all started to be handled before we proceed to close idle connections. + await asyncio.sleep(0) + self._server.pre_shutdown() + await self.shutdown() + await self._server.shutdown(self._shutdown_timeout) + await self._cleanup_server() + + self._server = None + if self._handle_signals: + loop = asyncio.get_running_loop() + try: + loop.remove_signal_handler(signal.SIGINT) + loop.remove_signal_handler(signal.SIGTERM) + except NotImplementedError: # pragma: no cover + # remove_signal_handler is not implemented on Windows + pass + + @abstractmethod + async def _make_server(self) -> Server: + pass # pragma: no cover + + @abstractmethod + async def _cleanup_server(self) -> None: + pass # pragma: no cover + + def _reg_site(self, site: BaseSite) -> None: + if site in self._sites: + raise RuntimeError(f"Site {site} is already registered in runner {self}") + self._sites.append(site) + + def _check_site(self, site: BaseSite) -> None: + if site not in self._sites: + raise RuntimeError(f"Site {site} is not registered in runner {self}") + + def _unreg_site(self, site: BaseSite) -> None: + if site not in self._sites: + raise RuntimeError(f"Site {site} is not registered in runner {self}") + self._sites.remove(site) + + +class ServerRunner(BaseRunner): + """Low-level web server runner""" + + __slots__ = ("_web_server",) + + def __init__( + self, web_server: Server, *, handle_signals: bool = False, **kwargs: Any + ) -> None: + super().__init__(handle_signals=handle_signals, **kwargs) + self._web_server = web_server + + async def shutdown(self) -> None: + pass + + async def _make_server(self) -> Server: + return self._web_server + + async def _cleanup_server(self) -> None: + pass + + +class AppRunner(BaseRunner): + """Web Application runner""" + + __slots__ = ("_app",) + + def __init__( + self, app: Application, *, handle_signals: bool = False, **kwargs: Any + ) -> None: + super().__init__(handle_signals=handle_signals, **kwargs) + if not isinstance(app, Application): + raise TypeError( + "The first argument should be web.Application " + "instance, got {!r}".format(app) + ) + self._app = app + + @property + def app(self) -> Application: + return self._app + + async def shutdown(self) -> None: + await self._app.shutdown() + + async def _make_server(self) -> Server: + loop = asyncio.get_event_loop() + self._app._set_loop(loop) + self._app.on_startup.freeze() + await self._app.startup() + self._app.freeze() + + return self._app._make_handler(loop=loop, **self._kwargs) + + async def _cleanup_server(self) -> None: + await self._app.cleanup() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_server.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_server.py new file mode 100644 index 000000000..ffc198d57 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_server.py @@ -0,0 +1,82 @@ +"""Low level HTTP server.""" + +import asyncio +from typing import Any, Awaitable, Callable, Dict, List, Optional # noqa + +from .abc import AbstractStreamWriter +from .http_parser import RawRequestMessage +from .streams import StreamReader +from .web_protocol import RequestHandler, _RequestFactory, _RequestHandler +from .web_request import BaseRequest + +__all__ = ("Server",) + + +class Server: + def __init__( + self, + handler: _RequestHandler, + *, + request_factory: Optional[_RequestFactory] = None, + handler_cancellation: bool = False, + loop: Optional[asyncio.AbstractEventLoop] = None, + **kwargs: Any + ) -> None: + self._loop = loop or asyncio.get_running_loop() + self._connections: Dict[RequestHandler, asyncio.Transport] = {} + self._kwargs = kwargs + self.requests_count = 0 + self.request_handler = handler + self.request_factory = request_factory or self._make_request + self.handler_cancellation = handler_cancellation + + @property + def connections(self) -> List[RequestHandler]: + return list(self._connections.keys()) + + def connection_made( + self, handler: RequestHandler, transport: asyncio.Transport + ) -> None: + self._connections[handler] = transport + + def connection_lost( + self, handler: RequestHandler, exc: Optional[BaseException] = None + ) -> None: + if handler in self._connections: + if handler._task_handler: + handler._task_handler.add_done_callback( + lambda f: self._connections.pop(handler, None) + ) + else: + del self._connections[handler] + + def _make_request( + self, + message: RawRequestMessage, + payload: StreamReader, + protocol: RequestHandler, + writer: AbstractStreamWriter, + task: "asyncio.Task[None]", + ) -> BaseRequest: + return BaseRequest(message, payload, protocol, writer, task, self._loop) + + def pre_shutdown(self) -> None: + for conn in self._connections: + conn.close() + + async def shutdown(self, timeout: Optional[float] = None) -> None: + coros = (conn.shutdown(timeout) for conn in self._connections) + await asyncio.gather(*coros) + self._connections.clear() + + def __call__(self) -> RequestHandler: + try: + return RequestHandler(self, loop=self._loop, **self._kwargs) + except TypeError: + # Failsafe creation: remove all custom handler_args + kwargs = { + k: v + for k, v in self._kwargs.items() + if k in ["debug", "access_log_class"] + } + return RequestHandler(self, loop=self._loop, **kwargs) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_urldispatcher.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_urldispatcher.py new file mode 100644 index 000000000..558fb7d0c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_urldispatcher.py @@ -0,0 +1,1302 @@ +import abc +import asyncio +import base64 +import functools +import hashlib +import html +import inspect +import keyword +import os +import re +import sys +import warnings +from contextlib import contextmanager +from functools import wraps +from pathlib import Path +from types import MappingProxyType +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Container, + Dict, + Final, + Generator, + Iterable, + Iterator, + List, + Mapping, + NoReturn, + Optional, + Pattern, + Set, + Sized, + Tuple, + Type, + TypedDict, + Union, + cast, +) + +from yarl import URL, __version__ as yarl_version # type: ignore[attr-defined] + +from . import hdrs +from .abc import AbstractMatchInfo, AbstractRouter, AbstractView +from .helpers import DEBUG +from .http import HttpVersion11 +from .typedefs import Handler, PathLike +from .web_exceptions import ( + HTTPException, + HTTPExpectationFailed, + HTTPForbidden, + HTTPMethodNotAllowed, + HTTPNotFound, +) +from .web_fileresponse import FileResponse +from .web_request import Request +from .web_response import Response, StreamResponse +from .web_routedef import AbstractRouteDef + +__all__ = ( + "UrlDispatcher", + "UrlMappingMatchInfo", + "AbstractResource", + "Resource", + "PlainResource", + "DynamicResource", + "AbstractRoute", + "ResourceRoute", + "StaticResource", + "View", +) + + +if TYPE_CHECKING: + from .web_app import Application + + BaseDict = Dict[str, str] +else: + BaseDict = dict + +CIRCULAR_SYMLINK_ERROR = ( + (OSError,) + if sys.version_info < (3, 10) and sys.platform.startswith("win32") + else (RuntimeError,) if sys.version_info < (3, 13) else () +) + +YARL_VERSION: Final[Tuple[int, ...]] = tuple(map(int, yarl_version.split(".")[:2])) + +HTTP_METHOD_RE: Final[Pattern[str]] = re.compile( + r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$" +) +ROUTE_RE: Final[Pattern[str]] = re.compile( + r"(\{[_a-zA-Z][^{}]*(?:\{[^{}]*\}[^{}]*)*\})" +) +PATH_SEP: Final[str] = re.escape("/") + + +_ExpectHandler = Callable[[Request], Awaitable[Optional[StreamResponse]]] +_Resolve = Tuple[Optional["UrlMappingMatchInfo"], Set[str]] + +html_escape = functools.partial(html.escape, quote=True) + + +class _InfoDict(TypedDict, total=False): + path: str + + formatter: str + pattern: Pattern[str] + + directory: Path + prefix: str + routes: Mapping[str, "AbstractRoute"] + + app: "Application" + + domain: str + + rule: "AbstractRuleMatching" + + http_exception: HTTPException + + +class AbstractResource(Sized, Iterable["AbstractRoute"]): + def __init__(self, *, name: Optional[str] = None) -> None: + self._name = name + + @property + def name(self) -> Optional[str]: + return self._name + + @property + @abc.abstractmethod + def canonical(self) -> str: + """Exposes the resource's canonical path. + + For example '/foo/bar/{name}' + + """ + + @abc.abstractmethod # pragma: no branch + def url_for(self, **kwargs: str) -> URL: + """Construct url for resource with additional params.""" + + @abc.abstractmethod # pragma: no branch + async def resolve(self, request: Request) -> _Resolve: + """Resolve resource. + + Return (UrlMappingMatchInfo, allowed_methods) pair. + """ + + @abc.abstractmethod + def add_prefix(self, prefix: str) -> None: + """Add a prefix to processed URLs. + + Required for subapplications support. + """ + + @abc.abstractmethod + def get_info(self) -> _InfoDict: + """Return a dict with additional info useful for introspection""" + + def freeze(self) -> None: + pass + + @abc.abstractmethod + def raw_match(self, path: str) -> bool: + """Perform a raw match against path""" + + +class AbstractRoute(abc.ABC): + def __init__( + self, + method: str, + handler: Union[Handler, Type[AbstractView]], + *, + expect_handler: Optional[_ExpectHandler] = None, + resource: Optional[AbstractResource] = None, + ) -> None: + + if expect_handler is None: + expect_handler = _default_expect_handler + + assert asyncio.iscoroutinefunction( + expect_handler + ), f"Coroutine is expected, got {expect_handler!r}" + + method = method.upper() + if not HTTP_METHOD_RE.match(method): + raise ValueError(f"{method} is not allowed HTTP method") + + assert callable(handler), handler + if asyncio.iscoroutinefunction(handler): + pass + elif inspect.isgeneratorfunction(handler): + warnings.warn( + "Bare generators are deprecated, " "use @coroutine wrapper", + DeprecationWarning, + ) + elif isinstance(handler, type) and issubclass(handler, AbstractView): + pass + else: + warnings.warn( + "Bare functions are deprecated, " "use async ones", DeprecationWarning + ) + + @wraps(handler) + async def handler_wrapper(request: Request) -> StreamResponse: + result = old_handler(request) # type: ignore[call-arg] + if asyncio.iscoroutine(result): + result = await result + assert isinstance(result, StreamResponse) + return result + + old_handler = handler + handler = handler_wrapper + + self._method = method + self._handler = handler + self._expect_handler = expect_handler + self._resource = resource + + @property + def method(self) -> str: + return self._method + + @property + def handler(self) -> Handler: + return self._handler + + @property + @abc.abstractmethod + def name(self) -> Optional[str]: + """Optional route's name, always equals to resource's name.""" + + @property + def resource(self) -> Optional[AbstractResource]: + return self._resource + + @abc.abstractmethod + def get_info(self) -> _InfoDict: + """Return a dict with additional info useful for introspection""" + + @abc.abstractmethod # pragma: no branch + def url_for(self, *args: str, **kwargs: str) -> URL: + """Construct url for route with additional params.""" + + async def handle_expect_header(self, request: Request) -> Optional[StreamResponse]: + return await self._expect_handler(request) + + +class UrlMappingMatchInfo(BaseDict, AbstractMatchInfo): + def __init__(self, match_dict: Dict[str, str], route: AbstractRoute): + super().__init__(match_dict) + self._route = route + self._apps: List[Application] = [] + self._current_app: Optional[Application] = None + self._frozen = False + + @property + def handler(self) -> Handler: + return self._route.handler + + @property + def route(self) -> AbstractRoute: + return self._route + + @property + def expect_handler(self) -> _ExpectHandler: + return self._route.handle_expect_header + + @property + def http_exception(self) -> Optional[HTTPException]: + return None + + def get_info(self) -> _InfoDict: # type: ignore[override] + return self._route.get_info() + + @property + def apps(self) -> Tuple["Application", ...]: + return tuple(self._apps) + + def add_app(self, app: "Application") -> None: + if self._frozen: + raise RuntimeError("Cannot change apps stack after .freeze() call") + if self._current_app is None: + self._current_app = app + self._apps.insert(0, app) + + @property + def current_app(self) -> "Application": + app = self._current_app + assert app is not None + return app + + @contextmanager + def set_current_app(self, app: "Application") -> Generator[None, None, None]: + if DEBUG: # pragma: no cover + if app not in self._apps: + raise RuntimeError( + "Expected one of the following apps {!r}, got {!r}".format( + self._apps, app + ) + ) + prev = self._current_app + self._current_app = app + try: + yield + finally: + self._current_app = prev + + def freeze(self) -> None: + self._frozen = True + + def __repr__(self) -> str: + return f"" + + +class MatchInfoError(UrlMappingMatchInfo): + def __init__(self, http_exception: HTTPException) -> None: + self._exception = http_exception + super().__init__({}, SystemRoute(self._exception)) + + @property + def http_exception(self) -> HTTPException: + return self._exception + + def __repr__(self) -> str: + return "".format( + self._exception.status, self._exception.reason + ) + + +async def _default_expect_handler(request: Request) -> None: + """Default handler for Expect header. + + Just send "100 Continue" to client. + raise HTTPExpectationFailed if value of header is not "100-continue" + """ + expect = request.headers.get(hdrs.EXPECT, "") + if request.version == HttpVersion11: + if expect.lower() == "100-continue": + await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n") + else: + raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect) + + +class Resource(AbstractResource): + def __init__(self, *, name: Optional[str] = None) -> None: + super().__init__(name=name) + self._routes: List[ResourceRoute] = [] + + def add_route( + self, + method: str, + handler: Union[Type[AbstractView], Handler], + *, + expect_handler: Optional[_ExpectHandler] = None, + ) -> "ResourceRoute": + + for route_obj in self._routes: + if route_obj.method == method or route_obj.method == hdrs.METH_ANY: + raise RuntimeError( + "Added route will never be executed, " + "method {route.method} is already " + "registered".format(route=route_obj) + ) + + route_obj = ResourceRoute(method, handler, self, expect_handler=expect_handler) + self.register_route(route_obj) + return route_obj + + def register_route(self, route: "ResourceRoute") -> None: + assert isinstance( + route, ResourceRoute + ), f"Instance of Route class is required, got {route!r}" + self._routes.append(route) + + async def resolve(self, request: Request) -> _Resolve: + allowed_methods: Set[str] = set() + + match_dict = self._match(request.rel_url.raw_path) + if match_dict is None: + return None, allowed_methods + + for route_obj in self._routes: + route_method = route_obj.method + allowed_methods.add(route_method) + + if route_method == request.method or route_method == hdrs.METH_ANY: + return (UrlMappingMatchInfo(match_dict, route_obj), allowed_methods) + else: + return None, allowed_methods + + @abc.abstractmethod + def _match(self, path: str) -> Optional[Dict[str, str]]: + pass # pragma: no cover + + def __len__(self) -> int: + return len(self._routes) + + def __iter__(self) -> Iterator["ResourceRoute"]: + return iter(self._routes) + + # TODO: implement all abstract methods + + +class PlainResource(Resource): + def __init__(self, path: str, *, name: Optional[str] = None) -> None: + super().__init__(name=name) + assert not path or path.startswith("/") + self._path = path + + @property + def canonical(self) -> str: + return self._path + + def freeze(self) -> None: + if not self._path: + self._path = "/" + + def add_prefix(self, prefix: str) -> None: + assert prefix.startswith("/") + assert not prefix.endswith("/") + assert len(prefix) > 1 + self._path = prefix + self._path + + def _match(self, path: str) -> Optional[Dict[str, str]]: + # string comparison is about 10 times faster than regexp matching + if self._path == path: + return {} + else: + return None + + def raw_match(self, path: str) -> bool: + return self._path == path + + def get_info(self) -> _InfoDict: + return {"path": self._path} + + def url_for(self) -> URL: # type: ignore[override] + return URL.build(path=self._path, encoded=True) + + def __repr__(self) -> str: + name = "'" + self.name + "' " if self.name is not None else "" + return f"" + + +class DynamicResource(Resource): + + DYN = re.compile(r"\{(?P[_a-zA-Z][_a-zA-Z0-9]*)\}") + DYN_WITH_RE = re.compile(r"\{(?P[_a-zA-Z][_a-zA-Z0-9]*):(?P.+)\}") + GOOD = r"[^{}/]+" + + def __init__(self, path: str, *, name: Optional[str] = None) -> None: + super().__init__(name=name) + pattern = "" + formatter = "" + for part in ROUTE_RE.split(path): + match = self.DYN.fullmatch(part) + if match: + pattern += "(?P<{}>{})".format(match.group("var"), self.GOOD) + formatter += "{" + match.group("var") + "}" + continue + + match = self.DYN_WITH_RE.fullmatch(part) + if match: + pattern += "(?P<{var}>{re})".format(**match.groupdict()) + formatter += "{" + match.group("var") + "}" + continue + + if "{" in part or "}" in part: + raise ValueError(f"Invalid path '{path}'['{part}']") + + part = _requote_path(part) + formatter += part + pattern += re.escape(part) + + try: + compiled = re.compile(pattern) + except re.error as exc: + raise ValueError(f"Bad pattern '{pattern}': {exc}") from None + assert compiled.pattern.startswith(PATH_SEP) + assert formatter.startswith("/") + self._pattern = compiled + self._formatter = formatter + + @property + def canonical(self) -> str: + return self._formatter + + def add_prefix(self, prefix: str) -> None: + assert prefix.startswith("/") + assert not prefix.endswith("/") + assert len(prefix) > 1 + self._pattern = re.compile(re.escape(prefix) + self._pattern.pattern) + self._formatter = prefix + self._formatter + + def _match(self, path: str) -> Optional[Dict[str, str]]: + match = self._pattern.fullmatch(path) + if match is None: + return None + else: + return { + key: _unquote_path(value) for key, value in match.groupdict().items() + } + + def raw_match(self, path: str) -> bool: + return self._formatter == path + + def get_info(self) -> _InfoDict: + return {"formatter": self._formatter, "pattern": self._pattern} + + def url_for(self, **parts: str) -> URL: + url = self._formatter.format_map({k: _quote_path(v) for k, v in parts.items()}) + return URL.build(path=url, encoded=True) + + def __repr__(self) -> str: + name = "'" + self.name + "' " if self.name is not None else "" + return "".format( + name=name, formatter=self._formatter + ) + + +class PrefixResource(AbstractResource): + def __init__(self, prefix: str, *, name: Optional[str] = None) -> None: + assert not prefix or prefix.startswith("/"), prefix + assert prefix in ("", "/") or not prefix.endswith("/"), prefix + super().__init__(name=name) + self._prefix = _requote_path(prefix) + self._prefix2 = self._prefix + "/" + + @property + def canonical(self) -> str: + return self._prefix + + def add_prefix(self, prefix: str) -> None: + assert prefix.startswith("/") + assert not prefix.endswith("/") + assert len(prefix) > 1 + self._prefix = prefix + self._prefix + self._prefix2 = self._prefix + "/" + + def raw_match(self, prefix: str) -> bool: + return False + + # TODO: impl missing abstract methods + + +class StaticResource(PrefixResource): + VERSION_KEY = "v" + + def __init__( + self, + prefix: str, + directory: PathLike, + *, + name: Optional[str] = None, + expect_handler: Optional[_ExpectHandler] = None, + chunk_size: int = 256 * 1024, + show_index: bool = False, + follow_symlinks: bool = False, + append_version: bool = False, + ) -> None: + super().__init__(prefix, name=name) + try: + directory = Path(directory).expanduser().resolve(strict=True) + except FileNotFoundError as error: + raise ValueError(f"'{directory}' does not exist") from error + if not directory.is_dir(): + raise ValueError(f"'{directory}' is not a directory") + self._directory = directory + self._show_index = show_index + self._chunk_size = chunk_size + self._follow_symlinks = follow_symlinks + self._expect_handler = expect_handler + self._append_version = append_version + + self._routes = { + "GET": ResourceRoute( + "GET", self._handle, self, expect_handler=expect_handler + ), + "HEAD": ResourceRoute( + "HEAD", self._handle, self, expect_handler=expect_handler + ), + } + + def url_for( # type: ignore[override] + self, + *, + filename: PathLike, + append_version: Optional[bool] = None, + ) -> URL: + if append_version is None: + append_version = self._append_version + filename = str(filename).lstrip("/") + + url = URL.build(path=self._prefix, encoded=True) + # filename is not encoded + if YARL_VERSION < (1, 6): + url = url / filename.replace("%", "%25") + else: + url = url / filename + + if append_version: + unresolved_path = self._directory.joinpath(filename) + try: + if self._follow_symlinks: + normalized_path = Path(os.path.normpath(unresolved_path)) + normalized_path.relative_to(self._directory) + filepath = normalized_path.resolve() + else: + filepath = unresolved_path.resolve() + filepath.relative_to(self._directory) + except (ValueError, FileNotFoundError): + # ValueError for case when path point to symlink + # with follow_symlinks is False + return url # relatively safe + if filepath.is_file(): + # TODO cache file content + # with file watcher for cache invalidation + with filepath.open("rb") as f: + file_bytes = f.read() + h = self._get_file_hash(file_bytes) + url = url.with_query({self.VERSION_KEY: h}) + return url + return url + + @staticmethod + def _get_file_hash(byte_array: bytes) -> str: + m = hashlib.sha256() # todo sha256 can be configurable param + m.update(byte_array) + b64 = base64.urlsafe_b64encode(m.digest()) + return b64.decode("ascii") + + def get_info(self) -> _InfoDict: + return { + "directory": self._directory, + "prefix": self._prefix, + "routes": self._routes, + } + + def set_options_route(self, handler: Handler) -> None: + if "OPTIONS" in self._routes: + raise RuntimeError("OPTIONS route was set already") + self._routes["OPTIONS"] = ResourceRoute( + "OPTIONS", handler, self, expect_handler=self._expect_handler + ) + + async def resolve(self, request: Request) -> _Resolve: + path = request.rel_url.raw_path + method = request.method + allowed_methods = set(self._routes) + if not path.startswith(self._prefix2) and path != self._prefix: + return None, set() + + if method not in allowed_methods: + return None, allowed_methods + + match_dict = {"filename": _unquote_path(path[len(self._prefix) + 1 :])} + return (UrlMappingMatchInfo(match_dict, self._routes[method]), allowed_methods) + + def __len__(self) -> int: + return len(self._routes) + + def __iter__(self) -> Iterator[AbstractRoute]: + return iter(self._routes.values()) + + async def _handle(self, request: Request) -> StreamResponse: + rel_url = request.match_info["filename"] + filename = Path(rel_url) + if filename.anchor: + # rel_url is an absolute name like + # /static/\\machine_name\c$ or /static/D:\path + # where the static dir is totally different + raise HTTPForbidden() + + unresolved_path = self._directory.joinpath(filename) + loop = asyncio.get_running_loop() + return await loop.run_in_executor( + None, self._resolve_path_to_response, unresolved_path + ) + + def _resolve_path_to_response(self, unresolved_path: Path) -> StreamResponse: + """Take the unresolved path and query the file system to form a response.""" + # Check for access outside the root directory. For follow symlinks, URI + # cannot traverse out, but symlinks can. Otherwise, no access outside + # root is permitted. + try: + if self._follow_symlinks: + normalized_path = Path(os.path.normpath(unresolved_path)) + normalized_path.relative_to(self._directory) + file_path = normalized_path.resolve() + else: + file_path = unresolved_path.resolve() + file_path.relative_to(self._directory) + except (ValueError, *CIRCULAR_SYMLINK_ERROR) as error: + # ValueError is raised for the relative check. Circular symlinks + # raise here on resolving for python < 3.13. + raise HTTPNotFound() from error + + # if path is a directory, return the contents if permitted. Note the + # directory check will raise if a segment is not readable. + try: + if file_path.is_dir(): + if self._show_index: + return Response( + text=self._directory_as_html(file_path), + content_type="text/html", + ) + else: + raise HTTPForbidden() + except PermissionError as error: + raise HTTPForbidden() from error + + # Return the file response, which handles all other checks. + return FileResponse(file_path, chunk_size=self._chunk_size) + + def _directory_as_html(self, dir_path: Path) -> str: + """returns directory's index as html.""" + assert dir_path.is_dir() + + relative_path_to_dir = dir_path.relative_to(self._directory).as_posix() + index_of = f"Index of /{html_escape(relative_path_to_dir)}" + h1 = f"

{index_of}

" + + index_list = [] + dir_index = dir_path.iterdir() + for _file in sorted(dir_index): + # show file url as relative to static path + rel_path = _file.relative_to(self._directory).as_posix() + quoted_file_url = _quote_path(f"{self._prefix}/{rel_path}") + + # if file is a directory, add '/' to the end of the name + if _file.is_dir(): + file_name = f"{_file.name}/" + else: + file_name = _file.name + + index_list.append( + f'
  • {html_escape(file_name)}
  • ' + ) + ul = "
      \n{}\n
    ".format("\n".join(index_list)) + body = f"\n{h1}\n{ul}\n" + + head_str = f"\n{index_of}\n" + html = f"\n{head_str}\n{body}\n" + + return html + + def __repr__(self) -> str: + name = "'" + self.name + "'" if self.name is not None else "" + return " {directory!r}>".format( + name=name, path=self._prefix, directory=self._directory + ) + + +class PrefixedSubAppResource(PrefixResource): + def __init__(self, prefix: str, app: "Application") -> None: + super().__init__(prefix) + self._app = app + self._add_prefix_to_resources(prefix) + + def add_prefix(self, prefix: str) -> None: + super().add_prefix(prefix) + self._add_prefix_to_resources(prefix) + + def _add_prefix_to_resources(self, prefix: str) -> None: + router = self._app.router + for resource in router.resources(): + # Since the canonical path of a resource is about + # to change, we need to unindex it and then reindex + router.unindex_resource(resource) + resource.add_prefix(prefix) + router.index_resource(resource) + + def url_for(self, *args: str, **kwargs: str) -> URL: + raise RuntimeError(".url_for() is not supported " "by sub-application root") + + def get_info(self) -> _InfoDict: + return {"app": self._app, "prefix": self._prefix} + + async def resolve(self, request: Request) -> _Resolve: + match_info = await self._app.router.resolve(request) + match_info.add_app(self._app) + if isinstance(match_info.http_exception, HTTPMethodNotAllowed): + methods = match_info.http_exception.allowed_methods + else: + methods = set() + return match_info, methods + + def __len__(self) -> int: + return len(self._app.router.routes()) + + def __iter__(self) -> Iterator[AbstractRoute]: + return iter(self._app.router.routes()) + + def __repr__(self) -> str: + return " {app!r}>".format( + prefix=self._prefix, app=self._app + ) + + +class AbstractRuleMatching(abc.ABC): + @abc.abstractmethod # pragma: no branch + async def match(self, request: Request) -> bool: + """Return bool if the request satisfies the criteria""" + + @abc.abstractmethod # pragma: no branch + def get_info(self) -> _InfoDict: + """Return a dict with additional info useful for introspection""" + + @property + @abc.abstractmethod # pragma: no branch + def canonical(self) -> str: + """Return a str""" + + +class Domain(AbstractRuleMatching): + re_part = re.compile(r"(?!-)[a-z\d-]{1,63}(? None: + super().__init__() + self._domain = self.validation(domain) + + @property + def canonical(self) -> str: + return self._domain + + def validation(self, domain: str) -> str: + if not isinstance(domain, str): + raise TypeError("Domain must be str") + domain = domain.rstrip(".").lower() + if not domain: + raise ValueError("Domain cannot be empty") + elif "://" in domain: + raise ValueError("Scheme not supported") + url = URL("http://" + domain) + assert url.raw_host is not None + if not all(self.re_part.fullmatch(x) for x in url.raw_host.split(".")): + raise ValueError("Domain not valid") + if url.port == 80: + return url.raw_host + return f"{url.raw_host}:{url.port}" + + async def match(self, request: Request) -> bool: + host = request.headers.get(hdrs.HOST) + if not host: + return False + return self.match_domain(host) + + def match_domain(self, host: str) -> bool: + return host.lower() == self._domain + + def get_info(self) -> _InfoDict: + return {"domain": self._domain} + + +class MaskDomain(Domain): + re_part = re.compile(r"(?!-)[a-z\d\*-]{1,63}(? None: + super().__init__(domain) + mask = self._domain.replace(".", r"\.").replace("*", ".*") + self._mask = re.compile(mask) + + @property + def canonical(self) -> str: + return self._mask.pattern + + def match_domain(self, host: str) -> bool: + return self._mask.fullmatch(host) is not None + + +class MatchedSubAppResource(PrefixedSubAppResource): + def __init__(self, rule: AbstractRuleMatching, app: "Application") -> None: + AbstractResource.__init__(self) + self._prefix = "" + self._app = app + self._rule = rule + + @property + def canonical(self) -> str: + return self._rule.canonical + + def get_info(self) -> _InfoDict: + return {"app": self._app, "rule": self._rule} + + async def resolve(self, request: Request) -> _Resolve: + if not await self._rule.match(request): + return None, set() + match_info = await self._app.router.resolve(request) + match_info.add_app(self._app) + if isinstance(match_info.http_exception, HTTPMethodNotAllowed): + methods = match_info.http_exception.allowed_methods + else: + methods = set() + return match_info, methods + + def __repr__(self) -> str: + return " {app!r}>" "".format(app=self._app) + + +class ResourceRoute(AbstractRoute): + """A route with resource""" + + def __init__( + self, + method: str, + handler: Union[Handler, Type[AbstractView]], + resource: AbstractResource, + *, + expect_handler: Optional[_ExpectHandler] = None, + ) -> None: + super().__init__( + method, handler, expect_handler=expect_handler, resource=resource + ) + + def __repr__(self) -> str: + return " {handler!r}".format( + method=self.method, resource=self._resource, handler=self.handler + ) + + @property + def name(self) -> Optional[str]: + if self._resource is None: + return None + return self._resource.name + + def url_for(self, *args: str, **kwargs: str) -> URL: + """Construct url for route with additional params.""" + assert self._resource is not None + return self._resource.url_for(*args, **kwargs) + + def get_info(self) -> _InfoDict: + assert self._resource is not None + return self._resource.get_info() + + +class SystemRoute(AbstractRoute): + def __init__(self, http_exception: HTTPException) -> None: + super().__init__(hdrs.METH_ANY, self._handle) + self._http_exception = http_exception + + def url_for(self, *args: str, **kwargs: str) -> URL: + raise RuntimeError(".url_for() is not allowed for SystemRoute") + + @property + def name(self) -> Optional[str]: + return None + + def get_info(self) -> _InfoDict: + return {"http_exception": self._http_exception} + + async def _handle(self, request: Request) -> StreamResponse: + raise self._http_exception + + @property + def status(self) -> int: + return self._http_exception.status + + @property + def reason(self) -> str: + return self._http_exception.reason + + def __repr__(self) -> str: + return "".format(self=self) + + +class View(AbstractView): + async def _iter(self) -> StreamResponse: + if self.request.method not in hdrs.METH_ALL: + self._raise_allowed_methods() + method: Optional[Callable[[], Awaitable[StreamResponse]]] + method = getattr(self, self.request.method.lower(), None) + if method is None: + self._raise_allowed_methods() + ret = await method() + assert isinstance(ret, StreamResponse) + return ret + + def __await__(self) -> Generator[Any, None, StreamResponse]: + return self._iter().__await__() + + def _raise_allowed_methods(self) -> NoReturn: + allowed_methods = {m for m in hdrs.METH_ALL if hasattr(self, m.lower())} + raise HTTPMethodNotAllowed(self.request.method, allowed_methods) + + +class ResourcesView(Sized, Iterable[AbstractResource], Container[AbstractResource]): + def __init__(self, resources: List[AbstractResource]) -> None: + self._resources = resources + + def __len__(self) -> int: + return len(self._resources) + + def __iter__(self) -> Iterator[AbstractResource]: + yield from self._resources + + def __contains__(self, resource: object) -> bool: + return resource in self._resources + + +class RoutesView(Sized, Iterable[AbstractRoute], Container[AbstractRoute]): + def __init__(self, resources: List[AbstractResource]): + self._routes: List[AbstractRoute] = [] + for resource in resources: + for route in resource: + self._routes.append(route) + + def __len__(self) -> int: + return len(self._routes) + + def __iter__(self) -> Iterator[AbstractRoute]: + yield from self._routes + + def __contains__(self, route: object) -> bool: + return route in self._routes + + +class UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]): + + NAME_SPLIT_RE = re.compile(r"[.:-]") + + def __init__(self) -> None: + super().__init__() + self._resources: List[AbstractResource] = [] + self._named_resources: Dict[str, AbstractResource] = {} + self._resource_index: dict[str, list[AbstractResource]] = {} + self._matched_sub_app_resources: List[MatchedSubAppResource] = [] + + async def resolve(self, request: Request) -> UrlMappingMatchInfo: + resource_index = self._resource_index + allowed_methods: Set[str] = set() + + # Walk the url parts looking for candidates. We walk the url backwards + # to ensure the most explicit match is found first. If there are multiple + # candidates for a given url part because there are multiple resources + # registered for the same canonical path, we resolve them in a linear + # fashion to ensure registration order is respected. + url_part = request.rel_url.raw_path + while url_part: + for candidate in resource_index.get(url_part, ()): + match_dict, allowed = await candidate.resolve(request) + if match_dict is not None: + return match_dict + else: + allowed_methods |= allowed + if url_part == "/": + break + url_part = url_part.rpartition("/")[0] or "/" + + # + # We didn't find any candidates, so we'll try the matched sub-app + # resources which we have to walk in a linear fashion because they + # have regex/wildcard match rules and we cannot index them. + # + # For most cases we do not expect there to be many of these since + # currently they are only added by `add_domain` + # + for resource in self._matched_sub_app_resources: + match_dict, allowed = await resource.resolve(request) + if match_dict is not None: + return match_dict + else: + allowed_methods |= allowed + + if allowed_methods: + return MatchInfoError(HTTPMethodNotAllowed(request.method, allowed_methods)) + + return MatchInfoError(HTTPNotFound()) + + def __iter__(self) -> Iterator[str]: + return iter(self._named_resources) + + def __len__(self) -> int: + return len(self._named_resources) + + def __contains__(self, resource: object) -> bool: + return resource in self._named_resources + + def __getitem__(self, name: str) -> AbstractResource: + return self._named_resources[name] + + def resources(self) -> ResourcesView: + return ResourcesView(self._resources) + + def routes(self) -> RoutesView: + return RoutesView(self._resources) + + def named_resources(self) -> Mapping[str, AbstractResource]: + return MappingProxyType(self._named_resources) + + def register_resource(self, resource: AbstractResource) -> None: + assert isinstance( + resource, AbstractResource + ), f"Instance of AbstractResource class is required, got {resource!r}" + if self.frozen: + raise RuntimeError("Cannot register a resource into frozen router.") + + name = resource.name + + if name is not None: + parts = self.NAME_SPLIT_RE.split(name) + for part in parts: + if keyword.iskeyword(part): + raise ValueError( + f"Incorrect route name {name!r}, " + "python keywords cannot be used " + "for route name" + ) + if not part.isidentifier(): + raise ValueError( + "Incorrect route name {!r}, " + "the name should be a sequence of " + "python identifiers separated " + "by dash, dot or column".format(name) + ) + if name in self._named_resources: + raise ValueError( + "Duplicate {!r}, " + "already handled by {!r}".format(name, self._named_resources[name]) + ) + self._named_resources[name] = resource + self._resources.append(resource) + + if isinstance(resource, MatchedSubAppResource): + # We cannot index match sub-app resources because they have match rules + self._matched_sub_app_resources.append(resource) + else: + self.index_resource(resource) + + def _get_resource_index_key(self, resource: AbstractResource) -> str: + """Return a key to index the resource in the resource index.""" + if "{" in (index_key := resource.canonical): + # strip at the first { to allow for variables, and than + # rpartition at / to allow for variable parts in the path + # For example if the canonical path is `/core/locations{tail:.*}` + # the index key will be `/core` since index is based on the + # url parts split by `/` + index_key = index_key.partition("{")[0].rpartition("/")[0] + return index_key.rstrip("/") or "/" + + def index_resource(self, resource: AbstractResource) -> None: + """Add a resource to the resource index.""" + resource_key = self._get_resource_index_key(resource) + # There may be multiple resources for a canonical path + # so we keep them in a list to ensure that registration + # order is respected. + self._resource_index.setdefault(resource_key, []).append(resource) + + def unindex_resource(self, resource: AbstractResource) -> None: + """Remove a resource from the resource index.""" + resource_key = self._get_resource_index_key(resource) + self._resource_index[resource_key].remove(resource) + + def add_resource(self, path: str, *, name: Optional[str] = None) -> Resource: + if path and not path.startswith("/"): + raise ValueError("path should be started with / or be empty") + # Reuse last added resource if path and name are the same + if self._resources: + resource = self._resources[-1] + if resource.name == name and resource.raw_match(path): + return cast(Resource, resource) + if not ("{" in path or "}" in path or ROUTE_RE.search(path)): + resource = PlainResource(_requote_path(path), name=name) + self.register_resource(resource) + return resource + resource = DynamicResource(path, name=name) + self.register_resource(resource) + return resource + + def add_route( + self, + method: str, + path: str, + handler: Union[Handler, Type[AbstractView]], + *, + name: Optional[str] = None, + expect_handler: Optional[_ExpectHandler] = None, + ) -> AbstractRoute: + resource = self.add_resource(path, name=name) + return resource.add_route(method, handler, expect_handler=expect_handler) + + def add_static( + self, + prefix: str, + path: PathLike, + *, + name: Optional[str] = None, + expect_handler: Optional[_ExpectHandler] = None, + chunk_size: int = 256 * 1024, + show_index: bool = False, + follow_symlinks: bool = False, + append_version: bool = False, + ) -> AbstractResource: + """Add static files view. + + prefix - url prefix + path - folder with files + + """ + assert prefix.startswith("/") + if prefix.endswith("/"): + prefix = prefix[:-1] + resource = StaticResource( + prefix, + path, + name=name, + expect_handler=expect_handler, + chunk_size=chunk_size, + show_index=show_index, + follow_symlinks=follow_symlinks, + append_version=append_version, + ) + self.register_resource(resource) + return resource + + def add_head(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method HEAD.""" + return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs) + + def add_options(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method OPTIONS.""" + return self.add_route(hdrs.METH_OPTIONS, path, handler, **kwargs) + + def add_get( + self, + path: str, + handler: Handler, + *, + name: Optional[str] = None, + allow_head: bool = True, + **kwargs: Any, + ) -> AbstractRoute: + """Shortcut for add_route with method GET. + + If allow_head is true, another + route is added allowing head requests to the same endpoint. + """ + resource = self.add_resource(path, name=name) + if allow_head: + resource.add_route(hdrs.METH_HEAD, handler, **kwargs) + return resource.add_route(hdrs.METH_GET, handler, **kwargs) + + def add_post(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method POST.""" + return self.add_route(hdrs.METH_POST, path, handler, **kwargs) + + def add_put(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method PUT.""" + return self.add_route(hdrs.METH_PUT, path, handler, **kwargs) + + def add_patch(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method PATCH.""" + return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs) + + def add_delete(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method DELETE.""" + return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs) + + def add_view( + self, path: str, handler: Type[AbstractView], **kwargs: Any + ) -> AbstractRoute: + """Shortcut for add_route with ANY methods for a class-based view.""" + return self.add_route(hdrs.METH_ANY, path, handler, **kwargs) + + def freeze(self) -> None: + super().freeze() + for resource in self._resources: + resource.freeze() + + def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]: + """Append routes to route table. + + Parameter should be a sequence of RouteDef objects. + + Returns a list of registered AbstractRoute instances. + """ + registered_routes = [] + for route_def in routes: + registered_routes.extend(route_def.register(self)) + return registered_routes + + +def _quote_path(value: str) -> str: + if YARL_VERSION < (1, 6): + value = value.replace("%", "%25") + return URL.build(path=value, encoded=False).raw_path + + +def _unquote_path(value: str) -> str: + return URL.build(path=value, encoded=True).path + + +def _requote_path(value: str) -> str: + # Quote non-ascii characters and other characters which must be quoted, + # but preserve existing %-sequences. + result = _quote_path(value) + if "%" in value: + result = result.replace("%25", "%") + return result diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_ws.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_ws.py new file mode 100644 index 000000000..ba3332715 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/web_ws.py @@ -0,0 +1,584 @@ +import asyncio +import base64 +import binascii +import hashlib +import json +import sys +from typing import Any, Final, Iterable, Optional, Tuple, cast + +import attr +from multidict import CIMultiDict + +from . import hdrs +from .abc import AbstractStreamWriter +from .helpers import calculate_timeout_when, set_exception, set_result +from .http import ( + WS_CLOSED_MESSAGE, + WS_CLOSING_MESSAGE, + WS_KEY, + WebSocketError, + WebSocketReader, + WebSocketWriter, + WSCloseCode, + WSMessage, + WSMsgType as WSMsgType, + ws_ext_gen, + ws_ext_parse, +) +from .log import ws_logger +from .streams import EofStream, FlowControlDataQueue +from .typedefs import JSONDecoder, JSONEncoder +from .web_exceptions import HTTPBadRequest, HTTPException +from .web_request import BaseRequest +from .web_response import StreamResponse + +if sys.version_info >= (3, 11): + import asyncio as async_timeout +else: + import async_timeout + +__all__ = ( + "WebSocketResponse", + "WebSocketReady", + "WSMsgType", +) + +THRESHOLD_CONNLOST_ACCESS: Final[int] = 5 + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class WebSocketReady: + ok: bool + protocol: Optional[str] + + def __bool__(self) -> bool: + return self.ok + + +class WebSocketResponse(StreamResponse): + + _length_check = False + + def __init__( + self, + *, + timeout: float = 10.0, + receive_timeout: Optional[float] = None, + autoclose: bool = True, + autoping: bool = True, + heartbeat: Optional[float] = None, + protocols: Iterable[str] = (), + compress: bool = True, + max_msg_size: int = 4 * 1024 * 1024, + ) -> None: + super().__init__(status=101) + self._protocols = protocols + self._ws_protocol: Optional[str] = None + self._writer: Optional[WebSocketWriter] = None + self._reader: Optional[FlowControlDataQueue[WSMessage]] = None + self._closed = False + self._closing = False + self._conn_lost = 0 + self._close_code: Optional[int] = None + self._loop: Optional[asyncio.AbstractEventLoop] = None + self._waiting: bool = False + self._close_wait: Optional[asyncio.Future[None]] = None + self._exception: Optional[BaseException] = None + self._timeout = timeout + self._receive_timeout = receive_timeout + self._autoclose = autoclose + self._autoping = autoping + self._heartbeat = heartbeat + self._heartbeat_when = 0.0 + self._heartbeat_cb: Optional[asyncio.TimerHandle] = None + if heartbeat is not None: + self._pong_heartbeat = heartbeat / 2.0 + self._pong_response_cb: Optional[asyncio.TimerHandle] = None + self._compress = compress + self._max_msg_size = max_msg_size + self._ping_task: Optional[asyncio.Task[None]] = None + + def _cancel_heartbeat(self) -> None: + self._cancel_pong_response_cb() + if self._heartbeat_cb is not None: + self._heartbeat_cb.cancel() + self._heartbeat_cb = None + if self._ping_task is not None: + self._ping_task.cancel() + self._ping_task = None + + def _cancel_pong_response_cb(self) -> None: + if self._pong_response_cb is not None: + self._pong_response_cb.cancel() + self._pong_response_cb = None + + def _reset_heartbeat(self) -> None: + if self._heartbeat is None: + return + self._cancel_pong_response_cb() + req = self._req + timeout_ceil_threshold = ( + req._protocol._timeout_ceil_threshold if req is not None else 5 + ) + loop = self._loop + assert loop is not None + now = loop.time() + when = calculate_timeout_when(now, self._heartbeat, timeout_ceil_threshold) + self._heartbeat_when = when + if self._heartbeat_cb is None: + # We do not cancel the previous heartbeat_cb here because + # it generates a significant amount of TimerHandle churn + # which causes asyncio to rebuild the heap frequently. + # Instead _send_heartbeat() will reschedule the next + # heartbeat if it fires too early. + self._heartbeat_cb = loop.call_at(when, self._send_heartbeat) + + def _send_heartbeat(self) -> None: + self._heartbeat_cb = None + loop = self._loop + assert loop is not None and self._writer is not None + now = loop.time() + if now < self._heartbeat_when: + # Heartbeat fired too early, reschedule + self._heartbeat_cb = loop.call_at( + self._heartbeat_when, self._send_heartbeat + ) + return + + req = self._req + timeout_ceil_threshold = ( + req._protocol._timeout_ceil_threshold if req is not None else 5 + ) + when = calculate_timeout_when(now, self._pong_heartbeat, timeout_ceil_threshold) + self._cancel_pong_response_cb() + self._pong_response_cb = loop.call_at(when, self._pong_not_received) + + if sys.version_info >= (3, 12): + # Optimization for Python 3.12, try to send the ping + # immediately to avoid having to schedule + # the task on the event loop. + ping_task = asyncio.Task(self._writer.ping(), loop=loop, eager_start=True) + else: + ping_task = loop.create_task(self._writer.ping()) + + if not ping_task.done(): + self._ping_task = ping_task + ping_task.add_done_callback(self._ping_task_done) + + def _ping_task_done(self, task: "asyncio.Task[None]") -> None: + """Callback for when the ping task completes.""" + self._ping_task = None + + def _pong_not_received(self) -> None: + if self._req is not None and self._req.transport is not None: + self._set_closed() + self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) + self._exception = asyncio.TimeoutError() + + def _set_closed(self) -> None: + """Set the connection to closed. + + Cancel any heartbeat timers and set the closed flag. + """ + self._closed = True + self._cancel_heartbeat() + + async def prepare(self, request: BaseRequest) -> AbstractStreamWriter: + # make pre-check to don't hide it by do_handshake() exceptions + if self._payload_writer is not None: + return self._payload_writer + + protocol, writer = self._pre_start(request) + payload_writer = await super().prepare(request) + assert payload_writer is not None + self._post_start(request, protocol, writer) + await payload_writer.drain() + return payload_writer + + def _handshake( + self, request: BaseRequest + ) -> Tuple["CIMultiDict[str]", str, bool, bool]: + headers = request.headers + if "websocket" != headers.get(hdrs.UPGRADE, "").lower().strip(): + raise HTTPBadRequest( + text=( + "No WebSocket UPGRADE hdr: {}\n Can " + '"Upgrade" only to "WebSocket".' + ).format(headers.get(hdrs.UPGRADE)) + ) + + if "upgrade" not in headers.get(hdrs.CONNECTION, "").lower(): + raise HTTPBadRequest( + text="No CONNECTION upgrade hdr: {}".format( + headers.get(hdrs.CONNECTION) + ) + ) + + # find common sub-protocol between client and server + protocol = None + if hdrs.SEC_WEBSOCKET_PROTOCOL in headers: + req_protocols = [ + str(proto.strip()) + for proto in headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",") + ] + + for proto in req_protocols: + if proto in self._protocols: + protocol = proto + break + else: + # No overlap found: Return no protocol as per spec + ws_logger.warning( + "Client protocols %r don’t overlap server-known ones %r", + req_protocols, + self._protocols, + ) + + # check supported version + version = headers.get(hdrs.SEC_WEBSOCKET_VERSION, "") + if version not in ("13", "8", "7"): + raise HTTPBadRequest(text=f"Unsupported version: {version}") + + # check client handshake for validity + key = headers.get(hdrs.SEC_WEBSOCKET_KEY) + try: + if not key or len(base64.b64decode(key)) != 16: + raise HTTPBadRequest(text=f"Handshake error: {key!r}") + except binascii.Error: + raise HTTPBadRequest(text=f"Handshake error: {key!r}") from None + + accept_val = base64.b64encode( + hashlib.sha1(key.encode() + WS_KEY).digest() + ).decode() + response_headers = CIMultiDict( + { + hdrs.UPGRADE: "websocket", + hdrs.CONNECTION: "upgrade", + hdrs.SEC_WEBSOCKET_ACCEPT: accept_val, + } + ) + + notakeover = False + compress = 0 + if self._compress: + extensions = headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS) + # Server side always get return with no exception. + # If something happened, just drop compress extension + compress, notakeover = ws_ext_parse(extensions, isserver=True) + if compress: + enabledext = ws_ext_gen( + compress=compress, isserver=True, server_notakeover=notakeover + ) + response_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = enabledext + + if protocol: + response_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = protocol + return ( + response_headers, + protocol, + compress, + notakeover, + ) # type: ignore[return-value] + + def _pre_start(self, request: BaseRequest) -> Tuple[str, WebSocketWriter]: + self._loop = request._loop + + headers, protocol, compress, notakeover = self._handshake(request) + + self.set_status(101) + self.headers.update(headers) + self.force_close() + self._compress = compress + transport = request._protocol.transport + assert transport is not None + writer = WebSocketWriter( + request._protocol, transport, compress=compress, notakeover=notakeover + ) + + return protocol, writer + + def _post_start( + self, request: BaseRequest, protocol: str, writer: WebSocketWriter + ) -> None: + self._ws_protocol = protocol + self._writer = writer + + self._reset_heartbeat() + + loop = self._loop + assert loop is not None + self._reader = FlowControlDataQueue(request._protocol, 2**16, loop=loop) + request.protocol.set_parser( + WebSocketReader(self._reader, self._max_msg_size, compress=self._compress) + ) + # disable HTTP keepalive for WebSocket + request.protocol.keep_alive(False) + + def can_prepare(self, request: BaseRequest) -> WebSocketReady: + if self._writer is not None: + raise RuntimeError("Already started") + try: + _, protocol, _, _ = self._handshake(request) + except HTTPException: + return WebSocketReady(False, None) + else: + return WebSocketReady(True, protocol) + + @property + def closed(self) -> bool: + return self._closed + + @property + def close_code(self) -> Optional[int]: + return self._close_code + + @property + def ws_protocol(self) -> Optional[str]: + return self._ws_protocol + + @property + def compress(self) -> bool: + return self._compress + + def get_extra_info(self, name: str, default: Any = None) -> Any: + """Get optional transport information. + + If no value associated with ``name`` is found, ``default`` is returned. + """ + writer = self._writer + if writer is None: + return default + transport = writer.transport + if transport is None: + return default + return transport.get_extra_info(name, default) + + def exception(self) -> Optional[BaseException]: + return self._exception + + async def ping(self, message: bytes = b"") -> None: + if self._writer is None: + raise RuntimeError("Call .prepare() first") + await self._writer.ping(message) + + async def pong(self, message: bytes = b"") -> None: + # unsolicited pong + if self._writer is None: + raise RuntimeError("Call .prepare() first") + await self._writer.pong(message) + + async def send_str(self, data: str, compress: Optional[bool] = None) -> None: + if self._writer is None: + raise RuntimeError("Call .prepare() first") + if not isinstance(data, str): + raise TypeError("data argument must be str (%r)" % type(data)) + await self._writer.send(data, binary=False, compress=compress) + + async def send_bytes(self, data: bytes, compress: Optional[bool] = None) -> None: + if self._writer is None: + raise RuntimeError("Call .prepare() first") + if not isinstance(data, (bytes, bytearray, memoryview)): + raise TypeError("data argument must be byte-ish (%r)" % type(data)) + await self._writer.send(data, binary=True, compress=compress) + + async def send_json( + self, + data: Any, + compress: Optional[bool] = None, + *, + dumps: JSONEncoder = json.dumps, + ) -> None: + await self.send_str(dumps(data), compress=compress) + + async def write_eof(self) -> None: # type: ignore[override] + if self._eof_sent: + return + if self._payload_writer is None: + raise RuntimeError("Response has not been started") + + await self.close() + self._eof_sent = True + + async def close( + self, *, code: int = WSCloseCode.OK, message: bytes = b"", drain: bool = True + ) -> bool: + """Close websocket connection.""" + if self._writer is None: + raise RuntimeError("Call .prepare() first") + + self._cancel_heartbeat() + reader = self._reader + assert reader is not None + + # we need to break `receive()` cycle first, + # `close()` may be called from different task + if self._waiting and not self._closed: + if not self._close_wait: + assert self._loop is not None + self._close_wait = self._loop.create_future() + reader.feed_data(WS_CLOSING_MESSAGE, 0) + await self._close_wait + + if self._closed: + return False + + self._set_closed() + try: + await self._writer.close(code, message) + writer = self._payload_writer + assert writer is not None + if drain: + await writer.drain() + except (asyncio.CancelledError, asyncio.TimeoutError): + self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) + raise + except Exception as exc: + self._exception = exc + self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) + return True + + if self._closing: + self._close_transport() + return True + + reader = self._reader + assert reader is not None + try: + async with async_timeout.timeout(self._timeout): + msg = await reader.read() + except asyncio.CancelledError: + self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) + raise + except Exception as exc: + self._exception = exc + self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) + return True + + if msg.type is WSMsgType.CLOSE: + self._set_code_close_transport(msg.data) + return True + + self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) + self._exception = asyncio.TimeoutError() + return True + + def _set_closing(self, code: WSCloseCode) -> None: + """Set the close code and mark the connection as closing.""" + self._closing = True + self._close_code = code + self._cancel_heartbeat() + + def _set_code_close_transport(self, code: WSCloseCode) -> None: + """Set the close code and close the transport.""" + self._close_code = code + self._close_transport() + + def _close_transport(self) -> None: + """Close the transport.""" + if self._req is not None and self._req.transport is not None: + self._req.transport.close() + + async def receive(self, timeout: Optional[float] = None) -> WSMessage: + if self._reader is None: + raise RuntimeError("Call .prepare() first") + + loop = self._loop + assert loop is not None + while True: + if self._waiting: + raise RuntimeError("Concurrent call to receive() is not allowed") + + if self._closed: + self._conn_lost += 1 + if self._conn_lost >= THRESHOLD_CONNLOST_ACCESS: + raise RuntimeError("WebSocket connection is closed.") + return WS_CLOSED_MESSAGE + elif self._closing: + return WS_CLOSING_MESSAGE + + try: + self._waiting = True + try: + async with async_timeout.timeout(timeout or self._receive_timeout): + msg = await self._reader.read() + self._reset_heartbeat() + finally: + self._waiting = False + if self._close_wait: + set_result(self._close_wait, None) + except asyncio.TimeoutError: + raise + except EofStream: + self._close_code = WSCloseCode.OK + await self.close() + return WSMessage(WSMsgType.CLOSED, None, None) + except WebSocketError as exc: + self._close_code = exc.code + await self.close(code=exc.code) + return WSMessage(WSMsgType.ERROR, exc, None) + except Exception as exc: + self._exception = exc + self._set_closing(WSCloseCode.ABNORMAL_CLOSURE) + await self.close() + return WSMessage(WSMsgType.ERROR, exc, None) + + if msg.type is WSMsgType.CLOSE: + self._set_closing(msg.data) + # Could be closed while awaiting reader. + if not self._closed and self._autoclose: + # The client is likely going to close the + # connection out from under us so we do not + # want to drain any pending writes as it will + # likely result writing to a broken pipe. + await self.close(drain=False) + elif msg.type is WSMsgType.CLOSING: + self._set_closing(WSCloseCode.OK) + elif msg.type is WSMsgType.PING and self._autoping: + await self.pong(msg.data) + continue + elif msg.type is WSMsgType.PONG and self._autoping: + continue + + return msg + + async def receive_str(self, *, timeout: Optional[float] = None) -> str: + msg = await self.receive(timeout) + if msg.type is not WSMsgType.TEXT: + raise TypeError( + "Received message {}:{!r} is not WSMsgType.TEXT".format( + msg.type, msg.data + ) + ) + return cast(str, msg.data) + + async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes: + msg = await self.receive(timeout) + if msg.type is not WSMsgType.BINARY: + raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes") + return cast(bytes, msg.data) + + async def receive_json( + self, *, loads: JSONDecoder = json.loads, timeout: Optional[float] = None + ) -> Any: + data = await self.receive_str(timeout=timeout) + return loads(data) + + async def write(self, data: bytes) -> None: + raise RuntimeError("Cannot call .write() for websocket") + + def __aiter__(self) -> "WebSocketResponse": + return self + + async def __anext__(self) -> WSMessage: + msg = await self.receive() + if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED): + raise StopAsyncIteration + return msg + + def _cancel(self, exc: BaseException) -> None: + # web_protocol calls this from connection_lost + # or when the server is shutting down. + self._closing = True + self._cancel_heartbeat() + if self._reader is not None: + set_exception(self._reader, exc) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/worker.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/worker.py new file mode 100644 index 000000000..9b3076973 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiohttp/worker.py @@ -0,0 +1,247 @@ +"""Async gunicorn worker for aiohttp.web""" + +import asyncio +import os +import re +import signal +import sys +from types import FrameType +from typing import Any, Awaitable, Callable, Optional, Union # noqa + +from gunicorn.config import AccessLogFormat as GunicornAccessLogFormat +from gunicorn.workers import base + +from aiohttp import web + +from .helpers import set_result +from .web_app import Application +from .web_log import AccessLogger + +try: + import ssl + + SSLContext = ssl.SSLContext +except ImportError: # pragma: no cover + ssl = None # type: ignore[assignment] + SSLContext = object # type: ignore[misc,assignment] + + +__all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker") + + +class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported] + + DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT + DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default + + def __init__(self, *args: Any, **kw: Any) -> None: # pragma: no cover + super().__init__(*args, **kw) + + self._task: Optional[asyncio.Task[None]] = None + self.exit_code = 0 + self._notify_waiter: Optional[asyncio.Future[bool]] = None + + def init_process(self) -> None: + # create new event_loop after fork + asyncio.get_event_loop().close() + + self.loop = asyncio.new_event_loop() + asyncio.set_event_loop(self.loop) + + super().init_process() + + def run(self) -> None: + self._task = self.loop.create_task(self._run()) + + try: # ignore all finalization problems + self.loop.run_until_complete(self._task) + except Exception: + self.log.exception("Exception in gunicorn worker") + self.loop.run_until_complete(self.loop.shutdown_asyncgens()) + self.loop.close() + + sys.exit(self.exit_code) + + async def _run(self) -> None: + runner = None + if isinstance(self.wsgi, Application): + app = self.wsgi + elif asyncio.iscoroutinefunction(self.wsgi): + wsgi = await self.wsgi() + if isinstance(wsgi, web.AppRunner): + runner = wsgi + app = runner.app + else: + app = wsgi + else: + raise RuntimeError( + "wsgi app should be either Application or " + "async function returning Application, got {}".format(self.wsgi) + ) + + if runner is None: + access_log = self.log.access_log if self.cfg.accesslog else None + runner = web.AppRunner( + app, + logger=self.log, + keepalive_timeout=self.cfg.keepalive, + access_log=access_log, + access_log_format=self._get_valid_log_format( + self.cfg.access_log_format + ), + shutdown_timeout=self.cfg.graceful_timeout / 100 * 95, + ) + await runner.setup() + + ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None + + runner = runner + assert runner is not None + server = runner.server + assert server is not None + for sock in self.sockets: + site = web.SockSite( + runner, + sock, + ssl_context=ctx, + ) + await site.start() + + # If our parent changed then we shut down. + pid = os.getpid() + try: + while self.alive: # type: ignore[has-type] + self.notify() + + cnt = server.requests_count + if self.max_requests and cnt > self.max_requests: + self.alive = False + self.log.info("Max requests, shutting down: %s", self) + + elif pid == os.getpid() and self.ppid != os.getppid(): + self.alive = False + self.log.info("Parent changed, shutting down: %s", self) + else: + await self._wait_next_notify() + except BaseException: + pass + + await runner.cleanup() + + def _wait_next_notify(self) -> "asyncio.Future[bool]": + self._notify_waiter_done() + + loop = self.loop + assert loop is not None + self._notify_waiter = waiter = loop.create_future() + self.loop.call_later(1.0, self._notify_waiter_done, waiter) + + return waiter + + def _notify_waiter_done( + self, waiter: Optional["asyncio.Future[bool]"] = None + ) -> None: + if waiter is None: + waiter = self._notify_waiter + if waiter is not None: + set_result(waiter, True) + + if waiter is self._notify_waiter: + self._notify_waiter = None + + def init_signals(self) -> None: + # Set up signals through the event loop API. + + self.loop.add_signal_handler( + signal.SIGQUIT, self.handle_quit, signal.SIGQUIT, None + ) + + self.loop.add_signal_handler( + signal.SIGTERM, self.handle_exit, signal.SIGTERM, None + ) + + self.loop.add_signal_handler( + signal.SIGINT, self.handle_quit, signal.SIGINT, None + ) + + self.loop.add_signal_handler( + signal.SIGWINCH, self.handle_winch, signal.SIGWINCH, None + ) + + self.loop.add_signal_handler( + signal.SIGUSR1, self.handle_usr1, signal.SIGUSR1, None + ) + + self.loop.add_signal_handler( + signal.SIGABRT, self.handle_abort, signal.SIGABRT, None + ) + + # Don't let SIGTERM and SIGUSR1 disturb active requests + # by interrupting system calls + signal.siginterrupt(signal.SIGTERM, False) + signal.siginterrupt(signal.SIGUSR1, False) + # Reset signals so Gunicorn doesn't swallow subprocess return codes + # See: https://github.com/aio-libs/aiohttp/issues/6130 + + def handle_quit(self, sig: int, frame: Optional[FrameType]) -> None: + self.alive = False + + # worker_int callback + self.cfg.worker_int(self) + + # wakeup closing process + self._notify_waiter_done() + + def handle_abort(self, sig: int, frame: Optional[FrameType]) -> None: + self.alive = False + self.exit_code = 1 + self.cfg.worker_abort(self) + sys.exit(1) + + @staticmethod + def _create_ssl_context(cfg: Any) -> "SSLContext": + """Creates SSLContext instance for usage in asyncio.create_server. + + See ssl.SSLSocket.__init__ for more details. + """ + if ssl is None: # pragma: no cover + raise RuntimeError("SSL is not supported.") + + ctx = ssl.SSLContext(cfg.ssl_version) + ctx.load_cert_chain(cfg.certfile, cfg.keyfile) + ctx.verify_mode = cfg.cert_reqs + if cfg.ca_certs: + ctx.load_verify_locations(cfg.ca_certs) + if cfg.ciphers: + ctx.set_ciphers(cfg.ciphers) + return ctx + + def _get_valid_log_format(self, source_format: str) -> str: + if source_format == self.DEFAULT_GUNICORN_LOG_FORMAT: + return self.DEFAULT_AIOHTTP_LOG_FORMAT + elif re.search(r"%\([^\)]+\)", source_format): + raise ValueError( + "Gunicorn's style options in form of `%(name)s` are not " + "supported for the log formatting. Please use aiohttp's " + "format specification to configure access log formatting: " + "http://docs.aiohttp.org/en/stable/logging.html" + "#format-specification" + ) + else: + return source_format + + +class GunicornUVLoopWebWorker(GunicornWebWorker): + def init_process(self) -> None: + import uvloop + + # Close any existing event loop before setting a + # new policy. + asyncio.get_event_loop().close() + + # Setup uvloop policy, so that every + # asyncio.get_event_loop() will create an instance + # of uvloop event loop. + asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) + + super().init_process() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal-1.3.1.dist-info/INSTALLER b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal-1.3.1.dist-info/INSTALLER new file mode 100644 index 000000000..ce3133dc1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal-1.3.1.dist-info/INSTALLER @@ -0,0 +1 @@ +Poetry 2.2.1 \ No newline at end of file diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal-1.3.1.dist-info/LICENSE b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal-1.3.1.dist-info/LICENSE new file mode 100644 index 000000000..7082a2d5b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal-1.3.1.dist-info/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2013-2019 Nikolay Kim and Andrew Svetlov + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal-1.3.1.dist-info/METADATA b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal-1.3.1.dist-info/METADATA new file mode 100644 index 000000000..fc964525f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal-1.3.1.dist-info/METADATA @@ -0,0 +1,128 @@ +Metadata-Version: 2.1 +Name: aiosignal +Version: 1.3.1 +Summary: aiosignal: a list of registered asynchronous callbacks +Home-page: https://github.com/aio-libs/aiosignal +Maintainer: aiohttp team +Maintainer-email: team@aiohttp.org +License: Apache 2.0 +Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby +Project-URL: CI: GitHub Actions, https://github.com/aio-libs/aiosignal/actions +Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiosignal +Project-URL: Docs: RTD, https://docs.aiosignal.org +Project-URL: GitHub: issues, https://github.com/aio-libs/aiosignal/issues +Project-URL: GitHub: repo, https://github.com/aio-libs/aiosignal +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Development Status :: 5 - Production/Stable +Classifier: Operating System :: POSIX +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Framework :: AsyncIO +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: frozenlist (>=1.1.0) + +========= +aiosignal +========= + +.. image:: https://github.com/aio-libs/aiosignal/workflows/CI/badge.svg + :target: https://github.com/aio-libs/aiosignal/actions?query=workflow%3ACI + :alt: GitHub status for master branch + +.. image:: https://codecov.io/gh/aio-libs/aiosignal/branch/master/graph/badge.svg + :target: https://codecov.io/gh/aio-libs/aiosignal + :alt: codecov.io status for master branch + +.. image:: https://badge.fury.io/py/aiosignal.svg + :target: https://pypi.org/project/aiosignal + :alt: Latest PyPI package version + +.. image:: https://readthedocs.org/projects/aiosignal/badge/?version=latest + :target: https://aiosignal.readthedocs.io/ + :alt: Latest Read The Docs + +.. image:: https://img.shields.io/discourse/topics?server=https%3A%2F%2Faio-libs.discourse.group%2F + :target: https://aio-libs.discourse.group/ + :alt: Discourse group for io-libs + +.. image:: https://badges.gitter.im/Join%20Chat.svg + :target: https://gitter.im/aio-libs/Lobby + :alt: Chat on Gitter + +Introduction +============ + +A project to manage callbacks in `asyncio` projects. + +``Signal`` is a list of registered asynchronous callbacks. + +The signal's life-cycle has two stages: after creation its content +could be filled by using standard list operations: ``sig.append()`` +etc. + +After you call ``sig.freeze()`` the signal is *frozen*: adding, removing +and dropping callbacks is forbidden. + +The only available operation is calling the previously registered +callbacks by using ``await sig.send(data)``. + +For concrete usage examples see the `Signals + +section of the `Web Server Advanced +` chapter of the `aiohttp +documentation`_. + + +Installation +------------ + +:: + + $ pip install aiosignal + +The library requires Python 3.6 or newer. + + +Documentation +============= + +https://aiosignal.readthedocs.io/ + +Communication channels +====================== + +*gitter chat* https://gitter.im/aio-libs/Lobby + +Requirements +============ + +- Python >= 3.6 +- frozenlist >= 1.0.0 + +License +======= + +``aiosignal`` is offered under the Apache 2 license. + +Source code +=========== + +The project is hosted on GitHub_ + +Please file an issue in the `bug tracker +`_ if you have found a bug +or have some suggestions to improve the library. + +.. _GitHub: https://github.com/aio-libs/aiosignal +.. _aiohttp documentation: https://docs.aiohttp.org/ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal-1.3.1.dist-info/RECORD b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal-1.3.1.dist-info/RECORD new file mode 100644 index 000000000..13b3fc59c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal-1.3.1.dist-info/RECORD @@ -0,0 +1,9 @@ +aiosignal/__init__.py,sha256=zQNfFYRSd84bswvpFv8ZWjEr5DeYwV3LXbMSyo2222s,867 +aiosignal/__init__.pyi,sha256=xeCddYSS8fZAkz8S4HuKSR2IDe3N7RW_LKcXDPPA1Xk,311 +aiosignal/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +aiosignal-1.3.1.dist-info/LICENSE,sha256=b9UkPpLdf5jsacesN3co50kFcJ_1J6W_mNbQJjwE9bY,11332 +aiosignal-1.3.1.dist-info/METADATA,sha256=c0HRnlYzfXKztZPTFDlPfygizTherhG5WdwXlvco0Ug,4008 +aiosignal-1.3.1.dist-info/WHEEL,sha256=ZL1lC_LiPDNRgDnOl2taCMc83aPEUZgHHv2h-LDgdiM,92 +aiosignal-1.3.1.dist-info/top_level.txt,sha256=z45aNOKGDdrI1roqZY3BGXQ22kJFPHBmVdwtLYLtXC0,10 +aiosignal-1.3.1.dist-info/INSTALLER,sha256=sz0Tnp99msIbbLB51q7U9nA6AvRKcsOeUKhrHH0Ulg4,12 +aiosignal-1.3.1.dist-info/RECORD,, diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal-1.3.1.dist-info/WHEEL b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal-1.3.1.dist-info/WHEEL new file mode 100644 index 000000000..5e1f087ca --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal-1.3.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.38.2) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal-1.3.1.dist-info/top_level.txt b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal-1.3.1.dist-info/top_level.txt new file mode 100644 index 000000000..ac6df3afe --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal-1.3.1.dist-info/top_level.txt @@ -0,0 +1 @@ +aiosignal diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal/__init__.py new file mode 100644 index 000000000..3d288e6ed --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal/__init__.py @@ -0,0 +1,36 @@ +from frozenlist import FrozenList + +__version__ = "1.3.1" + +__all__ = ("Signal",) + + +class Signal(FrozenList): + """Coroutine-based signal implementation. + + To connect a callback to a signal, use any list method. + + Signals are fired using the send() coroutine, which takes named + arguments. + """ + + __slots__ = ("_owner",) + + def __init__(self, owner): + super().__init__() + self._owner = owner + + def __repr__(self): + return "".format( + self._owner, self.frozen, list(self) + ) + + async def send(self, *args, **kwargs): + """ + Sends data to all registered receivers. + """ + if not self.frozen: + raise RuntimeError("Cannot send non-frozen signal.") + + for receiver in self: + await receiver(*args, **kwargs) # type: ignore diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal/__init__.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal/__init__.pyi new file mode 100644 index 000000000..d4e3416d7 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal/__init__.pyi @@ -0,0 +1,12 @@ +from typing import Any, Generic, TypeVar + +from frozenlist import FrozenList + +__all__ = ("Signal",) + +_T = TypeVar("_T") + +class Signal(FrozenList[_T], Generic[_T]): + def __init__(self, owner: Any) -> None: ... + def __repr__(self) -> str: ... + async def send(self, *args: Any, **kwargs: Any) -> None: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal/py.typed b/.direnv/python-3.8.20/lib/python3.8/site-packages/aiosignal/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout-4.0.3.dist-info/INSTALLER b/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout-4.0.3.dist-info/INSTALLER new file mode 100644 index 000000000..ce3133dc1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout-4.0.3.dist-info/INSTALLER @@ -0,0 +1 @@ +Poetry 2.2.1 \ No newline at end of file diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout-4.0.3.dist-info/LICENSE b/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout-4.0.3.dist-info/LICENSE new file mode 100644 index 000000000..033c86b7a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout-4.0.3.dist-info/LICENSE @@ -0,0 +1,13 @@ +Copyright 2016-2020 aio-libs collaboration. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout-4.0.3.dist-info/METADATA b/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout-4.0.3.dist-info/METADATA new file mode 100644 index 000000000..d8dd6d12d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout-4.0.3.dist-info/METADATA @@ -0,0 +1,131 @@ +Metadata-Version: 2.1 +Name: async-timeout +Version: 4.0.3 +Summary: Timeout context manager for asyncio programs +Home-page: https://github.com/aio-libs/async-timeout +Author: Andrew Svetlov +Author-email: andrew.svetlov@gmail.com +License: Apache 2 +Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby +Project-URL: CI: GitHub Actions, https://github.com/aio-libs/async-timeout/actions +Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/async-timeout +Project-URL: GitHub: issues, https://github.com/aio-libs/async-timeout/issues +Project-URL: GitHub: repo, https://github.com/aio-libs/async-timeout +Classifier: Development Status :: 5 - Production/Stable +Classifier: Topic :: Software Development :: Libraries +Classifier: Framework :: AsyncIO +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: typing-extensions >=3.6.5 ; python_version < "3.8" + +async-timeout +============= +.. image:: https://travis-ci.com/aio-libs/async-timeout.svg?branch=master + :target: https://travis-ci.com/aio-libs/async-timeout +.. image:: https://codecov.io/gh/aio-libs/async-timeout/branch/master/graph/badge.svg + :target: https://codecov.io/gh/aio-libs/async-timeout +.. image:: https://img.shields.io/pypi/v/async-timeout.svg + :target: https://pypi.python.org/pypi/async-timeout +.. image:: https://badges.gitter.im/Join%20Chat.svg + :target: https://gitter.im/aio-libs/Lobby + :alt: Chat on Gitter + +asyncio-compatible timeout context manager. + + +Usage example +------------- + + +The context manager is useful in cases when you want to apply timeout +logic around block of code or in cases when ``asyncio.wait_for()`` is +not suitable. Also it's much faster than ``asyncio.wait_for()`` +because ``timeout`` doesn't create a new task. + +The ``timeout(delay, *, loop=None)`` call returns a context manager +that cancels a block on *timeout* expiring:: + + from async_timeout import timeout + async with timeout(1.5): + await inner() + +1. If ``inner()`` is executed faster than in ``1.5`` seconds nothing + happens. +2. Otherwise ``inner()`` is cancelled internally by sending + ``asyncio.CancelledError`` into but ``asyncio.TimeoutError`` is + raised outside of context manager scope. + +*timeout* parameter could be ``None`` for skipping timeout functionality. + + +Alternatively, ``timeout_at(when)`` can be used for scheduling +at the absolute time:: + + loop = asyncio.get_event_loop() + now = loop.time() + + async with timeout_at(now + 1.5): + await inner() + + +Please note: it is not POSIX time but a time with +undefined starting base, e.g. the time of the system power on. + + +Context manager has ``.expired`` property for check if timeout happens +exactly in context manager:: + + async with timeout(1.5) as cm: + await inner() + print(cm.expired) + +The property is ``True`` if ``inner()`` execution is cancelled by +timeout context manager. + +If ``inner()`` call explicitly raises ``TimeoutError`` ``cm.expired`` +is ``False``. + +The scheduled deadline time is available as ``.deadline`` property:: + + async with timeout(1.5) as cm: + cm.deadline + +Not finished yet timeout can be rescheduled by ``shift_by()`` +or ``shift_to()`` methods:: + + async with timeout(1.5) as cm: + cm.shift(1) # add another second on waiting + cm.update(loop.time() + 5) # reschedule to now+5 seconds + +Rescheduling is forbidden if the timeout is expired or after exit from ``async with`` +code block. + + +Installation +------------ + +:: + + $ pip install async-timeout + +The library is Python 3 only! + + + +Authors and License +------------------- + +The module is written by Andrew Svetlov. + +It's *Apache 2* licensed and freely available. diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout-4.0.3.dist-info/RECORD b/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout-4.0.3.dist-info/RECORD new file mode 100644 index 000000000..01c061e37 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout-4.0.3.dist-info/RECORD @@ -0,0 +1,9 @@ +async_timeout/__init__.py,sha256=A0VOqDGQ3cCPFp0NZJKIbx_VRP1Y2xPtQOZebVIUB88,7242 +async_timeout/py.typed,sha256=tyozzRT1fziXETDxokmuyt6jhOmtjUbnVNJdZcG7ik0,12 +async_timeout-4.0.3.dist-info/LICENSE,sha256=4Y17uPUT4sRrtYXJS1hb0wcg3TzLId2weG9y0WZY-Sw,568 +async_timeout-4.0.3.dist-info/METADATA,sha256=WQVcnDIXQ2ntebcm-vYjhNLg_VMeTWw13_ReT-U36J4,4209 +async_timeout-4.0.3.dist-info/WHEEL,sha256=5sUXSg9e4bi7lTLOHcm6QEYwO5TIF1TNbTSVFVjcJcc,92 +async_timeout-4.0.3.dist-info/top_level.txt,sha256=9oM4e7Twq8iD_7_Q3Mz0E6GPIB6vJvRFo-UBwUQtBDU,14 +async_timeout-4.0.3.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +async_timeout-4.0.3.dist-info/INSTALLER,sha256=sz0Tnp99msIbbLB51q7U9nA6AvRKcsOeUKhrHH0Ulg4,12 +async_timeout-4.0.3.dist-info/RECORD,, diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout-4.0.3.dist-info/WHEEL b/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout-4.0.3.dist-info/WHEEL new file mode 100644 index 000000000..2c08da084 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout-4.0.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.41.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout-4.0.3.dist-info/top_level.txt b/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout-4.0.3.dist-info/top_level.txt new file mode 100644 index 000000000..ad29955ef --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout-4.0.3.dist-info/top_level.txt @@ -0,0 +1 @@ +async_timeout diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout-4.0.3.dist-info/zip-safe b/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout-4.0.3.dist-info/zip-safe new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout-4.0.3.dist-info/zip-safe @@ -0,0 +1 @@ + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout/__init__.py new file mode 100644 index 000000000..1ffb069fc --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout/__init__.py @@ -0,0 +1,239 @@ +import asyncio +import enum +import sys +import warnings +from types import TracebackType +from typing import Optional, Type + + +if sys.version_info >= (3, 8): + from typing import final +else: + from typing_extensions import final + + +if sys.version_info >= (3, 11): + + def _uncancel_task(task: "asyncio.Task[object]") -> None: + task.uncancel() + +else: + + def _uncancel_task(task: "asyncio.Task[object]") -> None: + pass + + +__version__ = "4.0.3" + + +__all__ = ("timeout", "timeout_at", "Timeout") + + +def timeout(delay: Optional[float]) -> "Timeout": + """timeout context manager. + + Useful in cases when you want to apply timeout logic around block + of code or in cases when asyncio.wait_for is not suitable. For example: + + >>> async with timeout(0.001): + ... async with aiohttp.get('https://github.com') as r: + ... await r.text() + + + delay - value in seconds or None to disable timeout logic + """ + loop = asyncio.get_running_loop() + if delay is not None: + deadline = loop.time() + delay # type: Optional[float] + else: + deadline = None + return Timeout(deadline, loop) + + +def timeout_at(deadline: Optional[float]) -> "Timeout": + """Schedule the timeout at absolute time. + + deadline argument points on the time in the same clock system + as loop.time(). + + Please note: it is not POSIX time but a time with + undefined starting base, e.g. the time of the system power on. + + >>> async with timeout_at(loop.time() + 10): + ... async with aiohttp.get('https://github.com') as r: + ... await r.text() + + + """ + loop = asyncio.get_running_loop() + return Timeout(deadline, loop) + + +class _State(enum.Enum): + INIT = "INIT" + ENTER = "ENTER" + TIMEOUT = "TIMEOUT" + EXIT = "EXIT" + + +@final +class Timeout: + # Internal class, please don't instantiate it directly + # Use timeout() and timeout_at() public factories instead. + # + # Implementation note: `async with timeout()` is preferred + # over `with timeout()`. + # While technically the Timeout class implementation + # doesn't need to be async at all, + # the `async with` statement explicitly points that + # the context manager should be used from async function context. + # + # This design allows to avoid many silly misusages. + # + # TimeoutError is raised immediately when scheduled + # if the deadline is passed. + # The purpose is to time out as soon as possible + # without waiting for the next await expression. + + __slots__ = ("_deadline", "_loop", "_state", "_timeout_handler", "_task") + + def __init__( + self, deadline: Optional[float], loop: asyncio.AbstractEventLoop + ) -> None: + self._loop = loop + self._state = _State.INIT + + self._task: Optional["asyncio.Task[object]"] = None + self._timeout_handler = None # type: Optional[asyncio.Handle] + if deadline is None: + self._deadline = None # type: Optional[float] + else: + self.update(deadline) + + def __enter__(self) -> "Timeout": + warnings.warn( + "with timeout() is deprecated, use async with timeout() instead", + DeprecationWarning, + stacklevel=2, + ) + self._do_enter() + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + self._do_exit(exc_type) + return None + + async def __aenter__(self) -> "Timeout": + self._do_enter() + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + self._do_exit(exc_type) + return None + + @property + def expired(self) -> bool: + """Is timeout expired during execution?""" + return self._state == _State.TIMEOUT + + @property + def deadline(self) -> Optional[float]: + return self._deadline + + def reject(self) -> None: + """Reject scheduled timeout if any.""" + # cancel is maybe better name but + # task.cancel() raises CancelledError in asyncio world. + if self._state not in (_State.INIT, _State.ENTER): + raise RuntimeError(f"invalid state {self._state.value}") + self._reject() + + def _reject(self) -> None: + self._task = None + if self._timeout_handler is not None: + self._timeout_handler.cancel() + self._timeout_handler = None + + def shift(self, delay: float) -> None: + """Advance timeout on delay seconds. + + The delay can be negative. + + Raise RuntimeError if shift is called when deadline is not scheduled + """ + deadline = self._deadline + if deadline is None: + raise RuntimeError("cannot shift timeout if deadline is not scheduled") + self.update(deadline + delay) + + def update(self, deadline: float) -> None: + """Set deadline to absolute value. + + deadline argument points on the time in the same clock system + as loop.time(). + + If new deadline is in the past the timeout is raised immediately. + + Please note: it is not POSIX time but a time with + undefined starting base, e.g. the time of the system power on. + """ + if self._state == _State.EXIT: + raise RuntimeError("cannot reschedule after exit from context manager") + if self._state == _State.TIMEOUT: + raise RuntimeError("cannot reschedule expired timeout") + if self._timeout_handler is not None: + self._timeout_handler.cancel() + self._deadline = deadline + if self._state != _State.INIT: + self._reschedule() + + def _reschedule(self) -> None: + assert self._state == _State.ENTER + deadline = self._deadline + if deadline is None: + return + + now = self._loop.time() + if self._timeout_handler is not None: + self._timeout_handler.cancel() + + self._task = asyncio.current_task() + if deadline <= now: + self._timeout_handler = self._loop.call_soon(self._on_timeout) + else: + self._timeout_handler = self._loop.call_at(deadline, self._on_timeout) + + def _do_enter(self) -> None: + if self._state != _State.INIT: + raise RuntimeError(f"invalid state {self._state.value}") + self._state = _State.ENTER + self._reschedule() + + def _do_exit(self, exc_type: Optional[Type[BaseException]]) -> None: + if exc_type is asyncio.CancelledError and self._state == _State.TIMEOUT: + assert self._task is not None + _uncancel_task(self._task) + self._timeout_handler = None + self._task = None + raise asyncio.TimeoutError + # timeout has not expired + self._state = _State.EXIT + self._reject() + return None + + def _on_timeout(self) -> None: + assert self._task is not None + self._task.cancel() + self._state = _State.TIMEOUT + # drop the reference early + self._timeout_handler = None diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout/py.typed b/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout/py.typed new file mode 100644 index 000000000..3b94f9157 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/async_timeout/py.typed @@ -0,0 +1 @@ +Placeholder diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/__init__.py new file mode 100644 index 000000000..9226258a2 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/__init__.py @@ -0,0 +1,134 @@ +# SPDX-License-Identifier: MIT + +""" +Classes Without Boilerplate +""" + +from functools import partial +from typing import Callable + +from . import converters, exceptions, filters, setters, validators +from ._cmp import cmp_using +from ._compat import Protocol +from ._config import get_run_validators, set_run_validators +from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types +from ._make import ( + NOTHING, + Attribute, + Factory, + attrib, + attrs, + fields, + fields_dict, + make_class, + validate, +) +from ._next_gen import define, field, frozen, mutable +from ._version_info import VersionInfo + + +s = attributes = attrs +ib = attr = attrib +dataclass = partial(attrs, auto_attribs=True) # happy Easter ;) + + +class AttrsInstance(Protocol): + pass + + +__all__ = [ + "Attribute", + "AttrsInstance", + "Factory", + "NOTHING", + "asdict", + "assoc", + "astuple", + "attr", + "attrib", + "attributes", + "attrs", + "cmp_using", + "converters", + "define", + "evolve", + "exceptions", + "field", + "fields", + "fields_dict", + "filters", + "frozen", + "get_run_validators", + "has", + "ib", + "make_class", + "mutable", + "resolve_types", + "s", + "set_run_validators", + "setters", + "validate", + "validators", +] + + +def _make_getattr(mod_name: str) -> Callable: + """ + Create a metadata proxy for packaging information that uses *mod_name* in + its warnings and errors. + """ + + def __getattr__(name: str) -> str: + dunder_to_metadata = { + "__title__": "Name", + "__copyright__": "", + "__version__": "version", + "__version_info__": "version", + "__description__": "summary", + "__uri__": "", + "__url__": "", + "__author__": "", + "__email__": "", + "__license__": "license", + } + if name not in dunder_to_metadata: + msg = f"module {mod_name} has no attribute {name}" + raise AttributeError(msg) + + import sys + import warnings + + if sys.version_info < (3, 8): + from importlib_metadata import metadata + else: + from importlib.metadata import metadata + + if name not in ("__version__", "__version_info__"): + warnings.warn( + f"Accessing {mod_name}.{name} is deprecated and will be " + "removed in a future release. Use importlib.metadata directly " + "to query for attrs's packaging metadata.", + DeprecationWarning, + stacklevel=2, + ) + + meta = metadata("attrs") + if name == "__license__": + return "MIT" + if name == "__copyright__": + return "Copyright (c) 2015 Hynek Schlawack" + if name in ("__uri__", "__url__"): + return meta["Project-URL"].split(" ", 1)[-1] + if name == "__version_info__": + return VersionInfo._from_version_string(meta["version"]) + if name == "__author__": + return meta["Author-email"].rsplit(" ", 1)[0] + if name == "__email__": + return meta["Author-email"].rsplit("<", 1)[1][:-1] + + return meta[dunder_to_metadata[name]] + + return __getattr__ + + +__getattr__ = _make_getattr(__name__) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/__init__.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/__init__.pyi new file mode 100644 index 000000000..37a208732 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/__init__.pyi @@ -0,0 +1,555 @@ +import enum +import sys + +from typing import ( + Any, + Callable, + Dict, + Generic, + List, + Mapping, + Optional, + Protocol, + Sequence, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +# `import X as X` is required to make these public +from . import converters as converters +from . import exceptions as exceptions +from . import filters as filters +from . import setters as setters +from . import validators as validators +from ._cmp import cmp_using as cmp_using +from ._typing_compat import AttrsInstance_ +from ._version_info import VersionInfo + +if sys.version_info >= (3, 10): + from typing import TypeGuard +else: + from typing_extensions import TypeGuard + +if sys.version_info >= (3, 11): + from typing import dataclass_transform +else: + from typing_extensions import dataclass_transform + +__version__: str +__version_info__: VersionInfo +__title__: str +__description__: str +__url__: str +__uri__: str +__author__: str +__email__: str +__license__: str +__copyright__: str + +_T = TypeVar("_T") +_C = TypeVar("_C", bound=type) + +_EqOrderType = Union[bool, Callable[[Any], Any]] +_ValidatorType = Callable[[Any, "Attribute[_T]", _T], Any] +_ConverterType = Callable[[Any], Any] +_FilterType = Callable[["Attribute[_T]", _T], bool] +_ReprType = Callable[[Any], str] +_ReprArgType = Union[bool, _ReprType] +_OnSetAttrType = Callable[[Any, "Attribute[Any]", Any], Any] +_OnSetAttrArgType = Union[ + _OnSetAttrType, List[_OnSetAttrType], setters._NoOpType +] +_FieldTransformer = Callable[ + [type, List["Attribute[Any]"]], List["Attribute[Any]"] +] +# FIXME: in reality, if multiple validators are passed they must be in a list +# or tuple, but those are invariant and so would prevent subtypes of +# _ValidatorType from working when passed in a list or tuple. +_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]] + +# We subclass this here to keep the protocol's qualified name clean. +class AttrsInstance(AttrsInstance_, Protocol): + pass + +_A = TypeVar("_A", bound=type[AttrsInstance]) + +class _Nothing(enum.Enum): + NOTHING = enum.auto() + +NOTHING = _Nothing.NOTHING + +# NOTE: Factory lies about its return type to make this possible: +# `x: List[int] # = Factory(list)` +# Work around mypy issue #4554 in the common case by using an overload. +if sys.version_info >= (3, 8): + from typing import Literal + @overload + def Factory(factory: Callable[[], _T]) -> _T: ... + @overload + def Factory( + factory: Callable[[Any], _T], + takes_self: Literal[True], + ) -> _T: ... + @overload + def Factory( + factory: Callable[[], _T], + takes_self: Literal[False], + ) -> _T: ... + +else: + @overload + def Factory(factory: Callable[[], _T]) -> _T: ... + @overload + def Factory( + factory: Union[Callable[[Any], _T], Callable[[], _T]], + takes_self: bool = ..., + ) -> _T: ... + +class Attribute(Generic[_T]): + name: str + default: Optional[_T] + validator: Optional[_ValidatorType[_T]] + repr: _ReprArgType + cmp: _EqOrderType + eq: _EqOrderType + order: _EqOrderType + hash: Optional[bool] + init: bool + converter: Optional[_ConverterType] + metadata: Dict[Any, Any] + type: Optional[Type[_T]] + kw_only: bool + on_setattr: _OnSetAttrType + alias: Optional[str] + + def evolve(self, **changes: Any) -> "Attribute[Any]": ... + +# NOTE: We had several choices for the annotation to use for type arg: +# 1) Type[_T] +# - Pros: Handles simple cases correctly +# - Cons: Might produce less informative errors in the case of conflicting +# TypeVars e.g. `attr.ib(default='bad', type=int)` +# 2) Callable[..., _T] +# - Pros: Better error messages than #1 for conflicting TypeVars +# - Cons: Terrible error messages for validator checks. +# e.g. attr.ib(type=int, validator=validate_str) +# -> error: Cannot infer function type argument +# 3) type (and do all of the work in the mypy plugin) +# - Pros: Simple here, and we could customize the plugin with our own errors. +# - Cons: Would need to write mypy plugin code to handle all the cases. +# We chose option #1. + +# `attr` lies about its return type to make the following possible: +# attr() -> Any +# attr(8) -> int +# attr(validator=) -> Whatever the callable expects. +# This makes this type of assignments possible: +# x: int = attr(8) +# +# This form catches explicit None or no default but with no other arguments +# returns Any. +@overload +def attrib( + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: None = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def attrib( + default: None = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def attrib( + default: _T, + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def attrib( + default: Optional[_T] = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: object = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> Any: ... +@overload +def field( + *, + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., + type: Optional[type] = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def field( + *, + default: None = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., + type: Optional[type] = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def field( + *, + default: _T, + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., + type: Optional[type] = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def field( + *, + default: Optional[_T] = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., + type: Optional[type] = ..., +) -> Any: ... +@overload +@dataclass_transform(order_default=True, field_specifiers=(attrib, field)) +def attrs( + maybe_cls: _C, + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., + unsafe_hash: Optional[bool] = ..., +) -> _C: ... +@overload +@dataclass_transform(order_default=True, field_specifiers=(attrib, field)) +def attrs( + maybe_cls: None = ..., + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., + unsafe_hash: Optional[bool] = ..., +) -> Callable[[_C], _C]: ... +@overload +@dataclass_transform(field_specifiers=(attrib, field)) +def define( + maybe_cls: _C, + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + unsafe_hash: Optional[bool] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> _C: ... +@overload +@dataclass_transform(field_specifiers=(attrib, field)) +def define( + maybe_cls: None = ..., + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + unsafe_hash: Optional[bool] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> Callable[[_C], _C]: ... + +mutable = define + +@overload +@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field)) +def frozen( + maybe_cls: _C, + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + unsafe_hash: Optional[bool] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> _C: ... +@overload +@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field)) +def frozen( + maybe_cls: None = ..., + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + unsafe_hash: Optional[bool] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> Callable[[_C], _C]: ... +def fields(cls: Type[AttrsInstance]) -> Any: ... +def fields_dict(cls: Type[AttrsInstance]) -> Dict[str, Attribute[Any]]: ... +def validate(inst: AttrsInstance) -> None: ... +def resolve_types( + cls: _A, + globalns: Optional[Dict[str, Any]] = ..., + localns: Optional[Dict[str, Any]] = ..., + attribs: Optional[List[Attribute[Any]]] = ..., + include_extras: bool = ..., +) -> _A: ... + +# TODO: add support for returning a proper attrs class from the mypy plugin +# we use Any instead of _CountingAttr so that e.g. `make_class('Foo', +# [attr.ib()])` is valid +def make_class( + name: str, + attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]], + bases: Tuple[type, ...] = ..., + class_body: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + collect_by_mro: bool = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., +) -> type: ... + +# _funcs -- + +# TODO: add support for returning TypedDict from the mypy plugin +# FIXME: asdict/astuple do not honor their factory args. Waiting on one of +# these: +# https://github.com/python/mypy/issues/4236 +# https://github.com/python/typing/issues/253 +# XXX: remember to fix attrs.asdict/astuple too! +def asdict( + inst: AttrsInstance, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + dict_factory: Type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., + value_serializer: Optional[ + Callable[[type, Attribute[Any], Any], Any] + ] = ..., + tuple_keys: Optional[bool] = ..., +) -> Dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: AttrsInstance, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + tuple_factory: Type[Sequence[Any]] = ..., + retain_collection_types: bool = ..., +) -> Tuple[Any, ...]: ... +def has(cls: type) -> TypeGuard[Type[AttrsInstance]]: ... +def assoc(inst: _T, **changes: Any) -> _T: ... +def evolve(inst: _T, **changes: Any) -> _T: ... + +# _config -- + +def set_run_validators(run: bool) -> None: ... +def get_run_validators() -> bool: ... + +# aliases -- + +s = attributes = attrs +ib = attr = attrib +dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_cmp.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_cmp.py new file mode 100644 index 000000000..a4a35e08f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_cmp.py @@ -0,0 +1,150 @@ +# SPDX-License-Identifier: MIT + + +import functools +import types + +from ._make import _make_ne + + +_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="} + + +def cmp_using( + eq=None, + lt=None, + le=None, + gt=None, + ge=None, + require_same_type=True, + class_name="Comparable", +): + """ + Create a class that can be passed into `attrs.field`'s ``eq``, ``order``, + and ``cmp`` arguments to customize field comparison. + + The resulting class will have a full set of ordering methods if at least + one of ``{lt, le, gt, ge}`` and ``eq`` are provided. + + :param Optional[callable] eq: `callable` used to evaluate equality of two + objects. + :param Optional[callable] lt: `callable` used to evaluate whether one + object is less than another object. + :param Optional[callable] le: `callable` used to evaluate whether one + object is less than or equal to another object. + :param Optional[callable] gt: `callable` used to evaluate whether one + object is greater than another object. + :param Optional[callable] ge: `callable` used to evaluate whether one + object is greater than or equal to another object. + + :param bool require_same_type: When `True`, equality and ordering methods + will return `NotImplemented` if objects are not of the same type. + + :param Optional[str] class_name: Name of class. Defaults to 'Comparable'. + + See `comparison` for more details. + + .. versionadded:: 21.1.0 + """ + + body = { + "__slots__": ["value"], + "__init__": _make_init(), + "_requirements": [], + "_is_comparable_to": _is_comparable_to, + } + + # Add operations. + num_order_functions = 0 + has_eq_function = False + + if eq is not None: + has_eq_function = True + body["__eq__"] = _make_operator("eq", eq) + body["__ne__"] = _make_ne() + + if lt is not None: + num_order_functions += 1 + body["__lt__"] = _make_operator("lt", lt) + + if le is not None: + num_order_functions += 1 + body["__le__"] = _make_operator("le", le) + + if gt is not None: + num_order_functions += 1 + body["__gt__"] = _make_operator("gt", gt) + + if ge is not None: + num_order_functions += 1 + body["__ge__"] = _make_operator("ge", ge) + + type_ = types.new_class( + class_name, (object,), {}, lambda ns: ns.update(body) + ) + + # Add same type requirement. + if require_same_type: + type_._requirements.append(_check_same_type) + + # Add total ordering if at least one operation was defined. + if 0 < num_order_functions < 4: + if not has_eq_function: + # functools.total_ordering requires __eq__ to be defined, + # so raise early error here to keep a nice stack. + msg = "eq must be define is order to complete ordering from lt, le, gt, ge." + raise ValueError(msg) + type_ = functools.total_ordering(type_) + + return type_ + + +def _make_init(): + """ + Create __init__ method. + """ + + def __init__(self, value): + """ + Initialize object with *value*. + """ + self.value = value + + return __init__ + + +def _make_operator(name, func): + """ + Create operator method. + """ + + def method(self, other): + if not self._is_comparable_to(other): + return NotImplemented + + result = func(self.value, other.value) + if result is NotImplemented: + return NotImplemented + + return result + + method.__name__ = f"__{name}__" + method.__doc__ = ( + f"Return a {_operation_names[name]} b. Computed by attrs." + ) + + return method + + +def _is_comparable_to(self, other): + """ + Check whether `other` is comparable to `self`. + """ + return all(func(self, other) for func in self._requirements) + + +def _check_same_type(self, other): + """ + Return True if *self* and *other* are of the same type, False otherwise. + """ + return other.value.__class__ is self.value.__class__ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_cmp.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_cmp.pyi new file mode 100644 index 000000000..f3dcdc1a7 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_cmp.pyi @@ -0,0 +1,13 @@ +from typing import Any, Callable, Optional, Type + +_CompareWithType = Callable[[Any, Any], bool] + +def cmp_using( + eq: Optional[_CompareWithType] = ..., + lt: Optional[_CompareWithType] = ..., + le: Optional[_CompareWithType] = ..., + gt: Optional[_CompareWithType] = ..., + ge: Optional[_CompareWithType] = ..., + require_same_type: bool = ..., + class_name: str = ..., +) -> Type: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_compat.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_compat.py new file mode 100644 index 000000000..46b05ca45 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_compat.py @@ -0,0 +1,87 @@ +# SPDX-License-Identifier: MIT + +import inspect +import platform +import sys +import threading + +from collections.abc import Mapping, Sequence # noqa: F401 +from typing import _GenericAlias + + +PYPY = platform.python_implementation() == "PyPy" +PY_3_8_PLUS = sys.version_info[:2] >= (3, 8) +PY_3_9_PLUS = sys.version_info[:2] >= (3, 9) +PY310 = sys.version_info[:2] >= (3, 10) +PY_3_12_PLUS = sys.version_info[:2] >= (3, 12) + + +if sys.version_info < (3, 8): + try: + from typing_extensions import Protocol + except ImportError: # pragma: no cover + Protocol = object +else: + from typing import Protocol # noqa: F401 + + +class _AnnotationExtractor: + """ + Extract type annotations from a callable, returning None whenever there + is none. + """ + + __slots__ = ["sig"] + + def __init__(self, callable): + try: + self.sig = inspect.signature(callable) + except (ValueError, TypeError): # inspect failed + self.sig = None + + def get_first_param_type(self): + """ + Return the type annotation of the first argument if it's not empty. + """ + if not self.sig: + return None + + params = list(self.sig.parameters.values()) + if params and params[0].annotation is not inspect.Parameter.empty: + return params[0].annotation + + return None + + def get_return_type(self): + """ + Return the return type if it's not empty. + """ + if ( + self.sig + and self.sig.return_annotation is not inspect.Signature.empty + ): + return self.sig.return_annotation + + return None + + +# Thread-local global to track attrs instances which are already being repr'd. +# This is needed because there is no other (thread-safe) way to pass info +# about the instances that are already being repr'd through the call stack +# in order to ensure we don't perform infinite recursion. +# +# For instance, if an instance contains a dict which contains that instance, +# we need to know that we're already repr'ing the outside instance from within +# the dict's repr() call. +# +# This lives here rather than in _make.py so that the functions in _make.py +# don't have a direct reference to the thread-local in their globals dict. +# If they have such a reference, it breaks cloudpickle. +repr_context = threading.local() + + +def get_generic_base(cl): + """If this is a generic class (A[str]), return the generic base for it.""" + if cl.__class__ is _GenericAlias: + return cl.__origin__ + return None diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_config.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_config.py new file mode 100644 index 000000000..9c245b146 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_config.py @@ -0,0 +1,31 @@ +# SPDX-License-Identifier: MIT + +__all__ = ["set_run_validators", "get_run_validators"] + +_run_validators = True + + +def set_run_validators(run): + """ + Set whether or not validators are run. By default, they are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()` + instead. + """ + if not isinstance(run, bool): + msg = "'run' must be bool." + raise TypeError(msg) + global _run_validators + _run_validators = run + + +def get_run_validators(): + """ + Return whether or not validators are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()` + instead. + """ + return _run_validators diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_funcs.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_funcs.py new file mode 100644 index 000000000..a888991d9 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_funcs.py @@ -0,0 +1,483 @@ +# SPDX-License-Identifier: MIT + + +import copy + +from ._compat import PY_3_9_PLUS, get_generic_base +from ._make import NOTHING, _obj_setattr, fields +from .exceptions import AttrsAttributeNotFoundError + + +def asdict( + inst, + recurse=True, + filter=None, + dict_factory=dict, + retain_collection_types=False, + value_serializer=None, +): + """ + Return the *attrs* attribute values of *inst* as a dict. + + Optionally recurse into other *attrs*-decorated classes. + + :param inst: Instance of an *attrs*-decorated class. + :param bool recurse: Recurse into classes that are also + *attrs*-decorated. + :param callable filter: A callable whose return code determines whether an + attribute or element is included (``True``) or dropped (``False``). Is + called with the `attrs.Attribute` as the first argument and the + value as the second argument. + :param callable dict_factory: A callable to produce dictionaries from. For + example, to produce ordered dictionaries instead of normal Python + dictionaries, pass in ``collections.OrderedDict``. + :param bool retain_collection_types: Do not convert to ``list`` when + encountering an attribute whose type is ``tuple`` or ``set``. Only + meaningful if ``recurse`` is ``True``. + :param Optional[callable] value_serializer: A hook that is called for every + attribute or dict key/value. It receives the current instance, field + and value and must return the (updated) value. The hook is run *after* + the optional *filter* has been applied. + + :rtype: return type of *dict_factory* + + :raise attrs.exceptions.NotAnAttrsClassError: If *cls* is not an *attrs* + class. + + .. versionadded:: 16.0.0 *dict_factory* + .. versionadded:: 16.1.0 *retain_collection_types* + .. versionadded:: 20.3.0 *value_serializer* + .. versionadded:: 21.3.0 If a dict has a collection for a key, it is + serialized as a tuple. + """ + attrs = fields(inst.__class__) + rv = dict_factory() + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + + if value_serializer is not None: + v = value_serializer(inst, a, v) + + if recurse is True: + if has(v.__class__): + rv[a.name] = asdict( + v, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + elif isinstance(v, (tuple, list, set, frozenset)): + cf = v.__class__ if retain_collection_types is True else list + items = [ + _asdict_anything( + i, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + for i in v + ] + try: + rv[a.name] = cf(items) + except TypeError: + if not issubclass(cf, tuple): + raise + # Workaround for TypeError: cf.__new__() missing 1 required + # positional argument (which appears, for a namedturle) + rv[a.name] = cf(*items) + elif isinstance(v, dict): + df = dict_factory + rv[a.name] = df( + ( + _asdict_anything( + kk, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + _asdict_anything( + vv, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + ) + for kk, vv in v.items() + ) + else: + rv[a.name] = v + else: + rv[a.name] = v + return rv + + +def _asdict_anything( + val, + is_key, + filter, + dict_factory, + retain_collection_types, + value_serializer, +): + """ + ``asdict`` only works on attrs instances, this works on anything. + """ + if getattr(val.__class__, "__attrs_attrs__", None) is not None: + # Attrs class. + rv = asdict( + val, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + elif isinstance(val, (tuple, list, set, frozenset)): + if retain_collection_types is True: + cf = val.__class__ + elif is_key: + cf = tuple + else: + cf = list + + rv = cf( + [ + _asdict_anything( + i, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + for i in val + ] + ) + elif isinstance(val, dict): + df = dict_factory + rv = df( + ( + _asdict_anything( + kk, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + _asdict_anything( + vv, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + ) + for kk, vv in val.items() + ) + else: + rv = val + if value_serializer is not None: + rv = value_serializer(None, None, rv) + + return rv + + +def astuple( + inst, + recurse=True, + filter=None, + tuple_factory=tuple, + retain_collection_types=False, +): + """ + Return the *attrs* attribute values of *inst* as a tuple. + + Optionally recurse into other *attrs*-decorated classes. + + :param inst: Instance of an *attrs*-decorated class. + :param bool recurse: Recurse into classes that are also + *attrs*-decorated. + :param callable filter: A callable whose return code determines whether an + attribute or element is included (``True``) or dropped (``False``). Is + called with the `attrs.Attribute` as the first argument and the + value as the second argument. + :param callable tuple_factory: A callable to produce tuples from. For + example, to produce lists instead of tuples. + :param bool retain_collection_types: Do not convert to ``list`` + or ``dict`` when encountering an attribute which type is + ``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is + ``True``. + + :rtype: return type of *tuple_factory* + + :raise attrs.exceptions.NotAnAttrsClassError: If *cls* is not an *attrs* + class. + + .. versionadded:: 16.2.0 + """ + attrs = fields(inst.__class__) + rv = [] + retain = retain_collection_types # Very long. :/ + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + if recurse is True: + if has(v.__class__): + rv.append( + astuple( + v, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + ) + elif isinstance(v, (tuple, list, set, frozenset)): + cf = v.__class__ if retain is True else list + items = [ + astuple( + j, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(j.__class__) + else j + for j in v + ] + try: + rv.append(cf(items)) + except TypeError: + if not issubclass(cf, tuple): + raise + # Workaround for TypeError: cf.__new__() missing 1 required + # positional argument (which appears, for a namedturle) + rv.append(cf(*items)) + elif isinstance(v, dict): + df = v.__class__ if retain is True else dict + rv.append( + df( + ( + astuple( + kk, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(kk.__class__) + else kk, + astuple( + vv, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(vv.__class__) + else vv, + ) + for kk, vv in v.items() + ) + ) + else: + rv.append(v) + else: + rv.append(v) + + return rv if tuple_factory is list else tuple_factory(rv) + + +def has(cls): + """ + Check whether *cls* is a class with *attrs* attributes. + + :param type cls: Class to introspect. + :raise TypeError: If *cls* is not a class. + + :rtype: bool + """ + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is not None: + return True + + # No attrs, maybe it's a specialized generic (A[str])? + generic_base = get_generic_base(cls) + if generic_base is not None: + generic_attrs = getattr(generic_base, "__attrs_attrs__", None) + if generic_attrs is not None: + # Stick it on here for speed next time. + cls.__attrs_attrs__ = generic_attrs + return generic_attrs is not None + return False + + +def assoc(inst, **changes): + """ + Copy *inst* and apply *changes*. + + This is different from `evolve` that applies the changes to the arguments + that create the new instance. + + `evolve`'s behavior is preferable, but there are `edge cases`_ where it + doesn't work. Therefore `assoc` is deprecated, but will not be removed. + + .. _`edge cases`: https://github.com/python-attrs/attrs/issues/251 + + :param inst: Instance of a class with *attrs* attributes. + :param changes: Keyword changes in the new copy. + + :return: A copy of inst with *changes* incorporated. + + :raise attrs.exceptions.AttrsAttributeNotFoundError: If *attr_name* + couldn't be found on *cls*. + :raise attrs.exceptions.NotAnAttrsClassError: If *cls* is not an *attrs* + class. + + .. deprecated:: 17.1.0 + Use `attrs.evolve` instead if you can. + This function will not be removed du to the slightly different approach + compared to `attrs.evolve`. + """ + new = copy.copy(inst) + attrs = fields(inst.__class__) + for k, v in changes.items(): + a = getattr(attrs, k, NOTHING) + if a is NOTHING: + msg = f"{k} is not an attrs attribute on {new.__class__}." + raise AttrsAttributeNotFoundError(msg) + _obj_setattr(new, k, v) + return new + + +def evolve(*args, **changes): + """ + Create a new instance, based on the first positional argument with + *changes* applied. + + :param inst: Instance of a class with *attrs* attributes. + :param changes: Keyword changes in the new copy. + + :return: A copy of inst with *changes* incorporated. + + :raise TypeError: If *attr_name* couldn't be found in the class + ``__init__``. + :raise attrs.exceptions.NotAnAttrsClassError: If *cls* is not an *attrs* + class. + + .. versionadded:: 17.1.0 + .. deprecated:: 23.1.0 + It is now deprecated to pass the instance using the keyword argument + *inst*. It will raise a warning until at least April 2024, after which + it will become an error. Always pass the instance as a positional + argument. + """ + # Try to get instance by positional argument first. + # Use changes otherwise and warn it'll break. + if args: + try: + (inst,) = args + except ValueError: + msg = f"evolve() takes 1 positional argument, but {len(args)} were given" + raise TypeError(msg) from None + else: + try: + inst = changes.pop("inst") + except KeyError: + msg = "evolve() missing 1 required positional argument: 'inst'" + raise TypeError(msg) from None + + import warnings + + warnings.warn( + "Passing the instance per keyword argument is deprecated and " + "will stop working in, or after, April 2024.", + DeprecationWarning, + stacklevel=2, + ) + + cls = inst.__class__ + attrs = fields(cls) + for a in attrs: + if not a.init: + continue + attr_name = a.name # To deal with private attributes. + init_name = a.alias + if init_name not in changes: + changes[init_name] = getattr(inst, attr_name) + + return cls(**changes) + + +def resolve_types( + cls, globalns=None, localns=None, attribs=None, include_extras=True +): + """ + Resolve any strings and forward annotations in type annotations. + + This is only required if you need concrete types in `Attribute`'s *type* + field. In other words, you don't need to resolve your types if you only + use them for static type checking. + + With no arguments, names will be looked up in the module in which the class + was created. If this is not what you want, e.g. if the name only exists + inside a method, you may pass *globalns* or *localns* to specify other + dictionaries in which to look up these names. See the docs of + `typing.get_type_hints` for more details. + + :param type cls: Class to resolve. + :param Optional[dict] globalns: Dictionary containing global variables. + :param Optional[dict] localns: Dictionary containing local variables. + :param Optional[list] attribs: List of attribs for the given class. + This is necessary when calling from inside a ``field_transformer`` + since *cls* is not an *attrs* class yet. + :param bool include_extras: Resolve more accurately, if possible. + Pass ``include_extras`` to ``typing.get_hints``, if supported by the + typing module. On supported Python versions (3.9+), this resolves the + types more accurately. + + :raise TypeError: If *cls* is not a class. + :raise attrs.exceptions.NotAnAttrsClassError: If *cls* is not an *attrs* + class and you didn't pass any attribs. + :raise NameError: If types cannot be resolved because of missing variables. + + :returns: *cls* so you can use this function also as a class decorator. + Please note that you have to apply it **after** `attrs.define`. That + means the decorator has to come in the line **before** `attrs.define`. + + .. versionadded:: 20.1.0 + .. versionadded:: 21.1.0 *attribs* + .. versionadded:: 23.1.0 *include_extras* + + """ + # Since calling get_type_hints is expensive we cache whether we've + # done it already. + if getattr(cls, "__attrs_types_resolved__", None) != cls: + import typing + + kwargs = {"globalns": globalns, "localns": localns} + + if PY_3_9_PLUS: + kwargs["include_extras"] = include_extras + + hints = typing.get_type_hints(cls, **kwargs) + for field in fields(cls) if attribs is None else attribs: + if field.name in hints: + # Since fields have been frozen we must work around it. + _obj_setattr(field, "type", hints[field.name]) + # We store the class we resolved so that subclasses know they haven't + # been resolved. + cls.__attrs_types_resolved__ = cls + + # Return the class so you can use it as a decorator too. + return cls diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_make.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_make.py new file mode 100644 index 000000000..10b4eca77 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_make.py @@ -0,0 +1,3119 @@ +# SPDX-License-Identifier: MIT + +import contextlib +import copy +import enum +import functools +import inspect +import itertools +import linecache +import sys +import types +import typing + +from operator import itemgetter + +# We need to import _compat itself in addition to the _compat members to avoid +# having the thread-local in the globals here. +from . import _compat, _config, setters +from ._compat import ( + PY310, + PY_3_8_PLUS, + _AnnotationExtractor, + get_generic_base, +) +from .exceptions import ( + DefaultAlreadySetError, + FrozenInstanceError, + NotAnAttrsClassError, + UnannotatedAttributeError, +) + + +# This is used at least twice, so cache it here. +_obj_setattr = object.__setattr__ +_init_converter_pat = "__attr_converter_%s" +_init_factory_pat = "__attr_factory_%s" +_classvar_prefixes = ( + "typing.ClassVar", + "t.ClassVar", + "ClassVar", + "typing_extensions.ClassVar", +) +# we don't use a double-underscore prefix because that triggers +# name mangling when trying to create a slot for the field +# (when slots=True) +_hash_cache_field = "_attrs_cached_hash" + +_empty_metadata_singleton = types.MappingProxyType({}) + +# Unique object for unequivocal getattr() defaults. +_sentinel = object() + +_ng_default_on_setattr = setters.pipe(setters.convert, setters.validate) + + +class _Nothing(enum.Enum): + """ + Sentinel to indicate the lack of a value when ``None`` is ambiguous. + + If extending attrs, you can use ``typing.Literal[NOTHING]`` to show + that a value may be ``NOTHING``. + + .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False. + .. versionchanged:: 22.2.0 ``NOTHING`` is now an ``enum.Enum`` variant. + """ + + NOTHING = enum.auto() + + def __repr__(self): + return "NOTHING" + + def __bool__(self): + return False + + +NOTHING = _Nothing.NOTHING +""" +Sentinel to indicate the lack of a value when ``None`` is ambiguous. +""" + + +class _CacheHashWrapper(int): + """ + An integer subclass that pickles / copies as None + + This is used for non-slots classes with ``cache_hash=True``, to avoid + serializing a potentially (even likely) invalid hash value. Since ``None`` + is the default value for uncalculated hashes, whenever this is copied, + the copy's value for the hash should automatically reset. + + See GH #613 for more details. + """ + + def __reduce__(self, _none_constructor=type(None), _args=()): # noqa: B008 + return _none_constructor, _args + + +def attrib( + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=None, + init=True, + metadata=None, + type=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, + alias=None, +): + """ + Create a new attribute on a class. + + .. warning:: + + Does *not* do anything unless the class is also decorated with `attr.s` + / `attrs.define` / and so on! + + Please consider using `attrs.field` in new code (``attr.ib`` will *never* + go away, though). + + :param default: A value that is used if an *attrs*-generated ``__init__`` + is used and no value is passed while instantiating or the attribute is + excluded using ``init=False``. + + If the value is an instance of `attrs.Factory`, its callable will be + used to construct a new value (useful for mutable data types like lists + or dicts). + + If a default is not set (or set manually to `attrs.NOTHING`), a value + *must* be supplied when instantiating; otherwise a `TypeError` will be + raised. + + The default can also be set using decorator notation as shown below. + + .. seealso:: `defaults` + + :param callable factory: Syntactic sugar for + ``default=attr.Factory(factory)``. + + :param validator: `callable` that is called by *attrs*-generated + ``__init__`` methods after the instance has been initialized. They + receive the initialized instance, the :func:`~attrs.Attribute`, and the + passed value. + + The return value is *not* inspected so the validator has to throw an + exception itself. + + If a `list` is passed, its items are treated as validators and must all + pass. + + Validators can be globally disabled and re-enabled using + `attrs.validators.get_disabled` / `attrs.validators.set_disabled`. + + The validator can also be set using decorator notation as shown below. + + .. seealso:: :ref:`validators` + + :type validator: `callable` or a `list` of `callable`\\ s. + + :param repr: Include this attribute in the generated ``__repr__`` method. + If ``True``, include the attribute; if ``False``, omit it. By default, + the built-in ``repr()`` function is used. To override how the attribute + value is formatted, pass a ``callable`` that takes a single value and + returns a string. Note that the resulting string is used as-is, i.e. it + will be used directly *instead* of calling ``repr()`` (the default). + :type repr: a `bool` or a `callable` to use a custom function. + + :param eq: If ``True`` (default), include this attribute in the generated + ``__eq__`` and ``__ne__`` methods that check two instances for + equality. To override how the attribute value is compared, pass a + ``callable`` that takes a single value and returns the value to be + compared. + + .. seealso:: `comparison` + :type eq: a `bool` or a `callable`. + + :param order: If ``True`` (default), include this attributes in the + generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. To + override how the attribute value is ordered, pass a ``callable`` that + takes a single value and returns the value to be ordered. + + .. seealso:: `comparison` + :type order: a `bool` or a `callable`. + + :param cmp: Setting *cmp* is equivalent to setting *eq* and *order* to the + same value. Must not be mixed with *eq* or *order*. + + .. seealso:: `comparison` + :type cmp: a `bool` or a `callable`. + + :param bool | None hash: Include this attribute in the generated + ``__hash__`` method. If ``None`` (default), mirror *eq*'s value. This + is the correct behavior according the Python spec. Setting this value + to anything else than ``None`` is *discouraged*. + + .. seealso:: `hashing` + :param bool init: Include this attribute in the generated ``__init__`` + method. It is possible to set this to ``False`` and set a default + value. In that case this attributed is unconditionally initialized + with the specified default value or factory. + + .. seealso:: `init` + :param callable converter: `callable` that is called by *attrs*-generated + ``__init__`` methods to convert attribute's value to the desired + format. It is given the passed-in value, and the returned value will + be used as the new value of the attribute. The value is converted + before being passed to the validator, if any. + + .. seealso:: :ref:`converters` + :param dict | None metadata: An arbitrary mapping, to be used by + third-party components. See `extending-metadata`. + + :param type: The type of the attribute. Nowadays, the preferred method to + specify the type is using a variable annotation (see :pep:`526`). This + argument is provided for backward compatibility. Regardless of the + approach used, the type will be stored on ``Attribute.type``. + + Please note that *attrs* doesn't do anything with this metadata by + itself. You can use it as part of your own code or for `static type + checking `. + :param bool kw_only: Make this attribute keyword-only in the generated + ``__init__`` (if ``init`` is ``False``, this parameter is ignored). + :param on_setattr: Allows to overwrite the *on_setattr* setting from + `attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used. + Set to `attrs.setters.NO_OP` to run **no** `setattr` hooks for this + attribute -- regardless of the setting in `attr.s`. + :type on_setattr: `callable`, or a list of callables, or `None`, or + `attrs.setters.NO_OP` + :param str | None alias: Override this attribute's parameter name in the + generated ``__init__`` method. If left `None`, default to ``name`` + stripped of leading underscores. See `private-attributes`. + + .. versionadded:: 15.2.0 *convert* + .. versionadded:: 16.3.0 *metadata* + .. versionchanged:: 17.1.0 *validator* can be a ``list`` now. + .. versionchanged:: 17.1.0 + *hash* is ``None`` and therefore mirrors *eq* by default. + .. versionadded:: 17.3.0 *type* + .. deprecated:: 17.4.0 *convert* + .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated + *convert* to achieve consistency with other noun-based arguments. + .. versionadded:: 18.1.0 + ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``. + .. versionadded:: 18.2.0 *kw_only* + .. versionchanged:: 19.2.0 *convert* keyword argument removed. + .. versionchanged:: 19.2.0 *repr* also accepts a custom callable. + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.3.0 *kw_only* backported to Python 2 + .. versionchanged:: 21.1.0 + *eq*, *order*, and *cmp* also accept a custom callable + .. versionchanged:: 21.1.0 *cmp* undeprecated + .. versionadded:: 22.2.0 *alias* + """ + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq, order, True + ) + + if hash is not None and hash is not True and hash is not False: + msg = "Invalid value for hash. Must be True, False, or None." + raise TypeError(msg) + + if factory is not None: + if default is not NOTHING: + msg = ( + "The `default` and `factory` arguments are mutually exclusive." + ) + raise ValueError(msg) + if not callable(factory): + msg = "The `factory` argument must be a callable." + raise ValueError(msg) + default = Factory(factory) + + if metadata is None: + metadata = {} + + # Apply syntactic sugar by auto-wrapping. + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + if validator and isinstance(validator, (list, tuple)): + validator = and_(*validator) + + if converter and isinstance(converter, (list, tuple)): + converter = pipe(*converter) + + return _CountingAttr( + default=default, + validator=validator, + repr=repr, + cmp=None, + hash=hash, + init=init, + converter=converter, + metadata=metadata, + type=type, + kw_only=kw_only, + eq=eq, + eq_key=eq_key, + order=order, + order_key=order_key, + on_setattr=on_setattr, + alias=alias, + ) + + +def _compile_and_eval(script, globs, locs=None, filename=""): + """ + "Exec" the script with the given global (globs) and local (locs) variables. + """ + bytecode = compile(script, filename, "exec") + eval(bytecode, globs, locs) + + +def _make_method(name, script, filename, globs): + """ + Create the method with the script given and return the method object. + """ + locs = {} + + # In order of debuggers like PDB being able to step through the code, + # we add a fake linecache entry. + count = 1 + base_filename = filename + while True: + linecache_tuple = ( + len(script), + None, + script.splitlines(True), + filename, + ) + old_val = linecache.cache.setdefault(filename, linecache_tuple) + if old_val == linecache_tuple: + break + + filename = f"{base_filename[:-1]}-{count}>" + count += 1 + + _compile_and_eval(script, globs, locs, filename) + + return locs[name] + + +def _make_attr_tuple_class(cls_name, attr_names): + """ + Create a tuple subclass to hold `Attribute`s for an `attrs` class. + + The subclass is a bare tuple with properties for names. + + class MyClassAttributes(tuple): + __slots__ = () + x = property(itemgetter(0)) + """ + attr_class_name = f"{cls_name}Attributes" + attr_class_template = [ + f"class {attr_class_name}(tuple):", + " __slots__ = ()", + ] + if attr_names: + for i, attr_name in enumerate(attr_names): + attr_class_template.append( + f" {attr_name} = _attrs_property(_attrs_itemgetter({i}))" + ) + else: + attr_class_template.append(" pass") + globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property} + _compile_and_eval("\n".join(attr_class_template), globs) + return globs[attr_class_name] + + +# Tuple class for extracted attributes from a class definition. +# `base_attrs` is a subset of `attrs`. +_Attributes = _make_attr_tuple_class( + "_Attributes", + [ + # all attributes to build dunder methods for + "attrs", + # attributes that have been inherited + "base_attrs", + # map inherited attributes to their originating classes + "base_attrs_map", + ], +) + + +def _is_class_var(annot): + """ + Check whether *annot* is a typing.ClassVar. + + The string comparison hack is used to avoid evaluating all string + annotations which would put attrs-based classes at a performance + disadvantage compared to plain old classes. + """ + annot = str(annot) + + # Annotation can be quoted. + if annot.startswith(("'", '"')) and annot.endswith(("'", '"')): + annot = annot[1:-1] + + return annot.startswith(_classvar_prefixes) + + +def _has_own_attribute(cls, attrib_name): + """ + Check whether *cls* defines *attrib_name* (and doesn't just inherit it). + """ + attr = getattr(cls, attrib_name, _sentinel) + if attr is _sentinel: + return False + + for base_cls in cls.__mro__[1:]: + a = getattr(base_cls, attrib_name, None) + if attr is a: + return False + + return True + + +def _get_annotations(cls): + """ + Get annotations for *cls*. + """ + if _has_own_attribute(cls, "__annotations__"): + return cls.__annotations__ + + return {} + + +def _collect_base_attrs(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in reversed(cls.__mro__[1:-1]): + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.inherited or a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) # noqa: PLW2901 + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + # For each name, only keep the freshest definition i.e. the furthest at the + # back. base_attr_map is fine because it gets overwritten with every new + # instance. + filtered = [] + seen = set() + for a in reversed(base_attrs): + if a.name in seen: + continue + filtered.insert(0, a) + seen.add(a.name) + + return filtered, base_attr_map + + +def _collect_base_attrs_broken(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + + N.B. *taken_attr_names* will be mutated. + + Adhere to the old incorrect behavior. + + Notably it collects from the front and considers inherited attributes which + leads to the buggy behavior reported in #428. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in cls.__mro__[1:-1]: + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) # noqa: PLW2901 + taken_attr_names.add(a.name) + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + return base_attrs, base_attr_map + + +def _transform_attrs( + cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer +): + """ + Transform all `_CountingAttr`s on a class into `Attribute`s. + + If *these* is passed, use that and don't look for them on the class. + + *collect_by_mro* is True, collect them in the correct MRO order, otherwise + use the old -- incorrect -- order. See #428. + + Return an `_Attributes`. + """ + cd = cls.__dict__ + anns = _get_annotations(cls) + + if these is not None: + ca_list = list(these.items()) + elif auto_attribs is True: + ca_names = { + name + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + } + ca_list = [] + annot_names = set() + for attr_name, type in anns.items(): + if _is_class_var(type): + continue + annot_names.add(attr_name) + a = cd.get(attr_name, NOTHING) + + if not isinstance(a, _CountingAttr): + a = attrib() if a is NOTHING else attrib(default=a) + ca_list.append((attr_name, a)) + + unannotated = ca_names - annot_names + if len(unannotated) > 0: + raise UnannotatedAttributeError( + "The following `attr.ib`s lack a type annotation: " + + ", ".join( + sorted(unannotated, key=lambda n: cd.get(n).counter) + ) + + "." + ) + else: + ca_list = sorted( + ( + (name, attr) + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + ), + key=lambda e: e[1].counter, + ) + + own_attrs = [ + Attribute.from_counting_attr( + name=attr_name, ca=ca, type=anns.get(attr_name) + ) + for attr_name, ca in ca_list + ] + + if collect_by_mro: + base_attrs, base_attr_map = _collect_base_attrs( + cls, {a.name for a in own_attrs} + ) + else: + base_attrs, base_attr_map = _collect_base_attrs_broken( + cls, {a.name for a in own_attrs} + ) + + if kw_only: + own_attrs = [a.evolve(kw_only=True) for a in own_attrs] + base_attrs = [a.evolve(kw_only=True) for a in base_attrs] + + attrs = base_attrs + own_attrs + + # Mandatory vs non-mandatory attr order only matters when they are part of + # the __init__ signature and when they aren't kw_only (which are moved to + # the end and can be mandatory or non-mandatory in any order, as they will + # be specified as keyword args anyway). Check the order of those attrs: + had_default = False + for a in (a for a in attrs if a.init is not False and a.kw_only is False): + if had_default is True and a.default is NOTHING: + msg = f"No mandatory attributes allowed after an attribute with a default value or factory. Attribute in question: {a!r}" + raise ValueError(msg) + + if had_default is False and a.default is not NOTHING: + had_default = True + + if field_transformer is not None: + attrs = field_transformer(cls, attrs) + + # Resolve default field alias after executing field_transformer. + # This allows field_transformer to differentiate between explicit vs + # default aliases and supply their own defaults. + attrs = [ + a.evolve(alias=_default_init_alias_for(a.name)) if not a.alias else a + for a in attrs + ] + + # Create AttrsClass *after* applying the field_transformer since it may + # add or remove attributes! + attr_names = [a.name for a in attrs] + AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) + + return _Attributes((AttrsClass(attrs), base_attrs, base_attr_map)) + + +def _make_cached_property_getattr( + cached_properties, + original_getattr, + cls, +): + lines = [ + # Wrapped to get `__class__` into closure cell for super() + # (It will be replaced with the newly constructed class after construction). + "def wrapper():", + " __class__ = _cls", + " def __getattr__(self, item, cached_properties=cached_properties, original_getattr=original_getattr, _cached_setattr_get=_cached_setattr_get):", + " func = cached_properties.get(item)", + " if func is not None:", + " result = func(self)", + " _setter = _cached_setattr_get(self)", + " _setter(item, result)", + " return result", + ] + if original_getattr is not None: + lines.append( + " return original_getattr(self, item)", + ) + else: + lines.extend( + [ + " if hasattr(super(), '__getattr__'):", + " return super().__getattr__(item)", + " original_error = f\"'{self.__class__.__name__}' object has no attribute '{item}'\"", + " raise AttributeError(original_error)", + ] + ) + + lines.extend( + [ + " return __getattr__", + "__getattr__ = wrapper()", + ] + ) + + unique_filename = _generate_unique_filename(cls, "getattr") + + glob = { + "cached_properties": cached_properties, + "_cached_setattr_get": _obj_setattr.__get__, + "_cls": cls, + "original_getattr": original_getattr, + } + + return _make_method( + "__getattr__", + "\n".join(lines), + unique_filename, + glob, + ) + + +def _frozen_setattrs(self, name, value): + """ + Attached to frozen classes as __setattr__. + """ + if isinstance(self, BaseException) and name in ( + "__cause__", + "__context__", + "__traceback__", + ): + BaseException.__setattr__(self, name, value) + return + + raise FrozenInstanceError() + + +def _frozen_delattrs(self, name): + """ + Attached to frozen classes as __delattr__. + """ + raise FrozenInstanceError() + + +class _ClassBuilder: + """ + Iteratively build *one* class. + """ + + __slots__ = ( + "_attr_names", + "_attrs", + "_base_attr_map", + "_base_names", + "_cache_hash", + "_cls", + "_cls_dict", + "_delete_attribs", + "_frozen", + "_has_pre_init", + "_pre_init_has_args", + "_has_post_init", + "_is_exc", + "_on_setattr", + "_slots", + "_weakref_slot", + "_wrote_own_setattr", + "_has_custom_setattr", + ) + + def __init__( + self, + cls, + these, + slots, + frozen, + weakref_slot, + getstate_setstate, + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_custom_setattr, + field_transformer, + ): + attrs, base_attrs, base_map = _transform_attrs( + cls, + these, + auto_attribs, + kw_only, + collect_by_mro, + field_transformer, + ) + + self._cls = cls + self._cls_dict = dict(cls.__dict__) if slots else {} + self._attrs = attrs + self._base_names = {a.name for a in base_attrs} + self._base_attr_map = base_map + self._attr_names = tuple(a.name for a in attrs) + self._slots = slots + self._frozen = frozen + self._weakref_slot = weakref_slot + self._cache_hash = cache_hash + self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False)) + self._pre_init_has_args = False + if self._has_pre_init: + # Check if the pre init method has more arguments than just `self` + # We want to pass arguments if pre init expects arguments + pre_init_func = cls.__attrs_pre_init__ + pre_init_signature = inspect.signature(pre_init_func) + self._pre_init_has_args = len(pre_init_signature.parameters) > 1 + self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False)) + self._delete_attribs = not bool(these) + self._is_exc = is_exc + self._on_setattr = on_setattr + + self._has_custom_setattr = has_custom_setattr + self._wrote_own_setattr = False + + self._cls_dict["__attrs_attrs__"] = self._attrs + + if frozen: + self._cls_dict["__setattr__"] = _frozen_setattrs + self._cls_dict["__delattr__"] = _frozen_delattrs + + self._wrote_own_setattr = True + elif on_setattr in ( + _ng_default_on_setattr, + setters.validate, + setters.convert, + ): + has_validator = has_converter = False + for a in attrs: + if a.validator is not None: + has_validator = True + if a.converter is not None: + has_converter = True + + if has_validator and has_converter: + break + if ( + ( + on_setattr == _ng_default_on_setattr + and not (has_validator or has_converter) + ) + or (on_setattr == setters.validate and not has_validator) + or (on_setattr == setters.convert and not has_converter) + ): + # If class-level on_setattr is set to convert + validate, but + # there's no field to convert or validate, pretend like there's + # no on_setattr. + self._on_setattr = None + + if getstate_setstate: + ( + self._cls_dict["__getstate__"], + self._cls_dict["__setstate__"], + ) = self._make_getstate_setstate() + + def __repr__(self): + return f"<_ClassBuilder(cls={self._cls.__name__})>" + + if PY310: + import abc + + def build_class(self): + """ + Finalize class based on the accumulated configuration. + + Builder cannot be used after calling this method. + """ + if self._slots is True: + return self._create_slots_class() + + return self.abc.update_abstractmethods( + self._patch_original_class() + ) + + else: + + def build_class(self): + """ + Finalize class based on the accumulated configuration. + + Builder cannot be used after calling this method. + """ + if self._slots is True: + return self._create_slots_class() + + return self._patch_original_class() + + def _patch_original_class(self): + """ + Apply accumulated methods and return the class. + """ + cls = self._cls + base_names = self._base_names + + # Clean class of attribute definitions (`attr.ib()`s). + if self._delete_attribs: + for name in self._attr_names: + if ( + name not in base_names + and getattr(cls, name, _sentinel) is not _sentinel + ): + # An AttributeError can happen if a base class defines a + # class variable and we want to set an attribute with the + # same name by using only a type annotation. + with contextlib.suppress(AttributeError): + delattr(cls, name) + + # Attach our dunder methods. + for name, value in self._cls_dict.items(): + setattr(cls, name, value) + + # If we've inherited an attrs __setattr__ and don't write our own, + # reset it to object's. + if not self._wrote_own_setattr and getattr( + cls, "__attrs_own_setattr__", False + ): + cls.__attrs_own_setattr__ = False + + if not self._has_custom_setattr: + cls.__setattr__ = _obj_setattr + + return cls + + def _create_slots_class(self): + """ + Build and return a new class with a `__slots__` attribute. + """ + cd = { + k: v + for k, v in self._cls_dict.items() + if k not in (*tuple(self._attr_names), "__dict__", "__weakref__") + } + + # If our class doesn't have its own implementation of __setattr__ + # (either from the user or by us), check the bases, if one of them has + # an attrs-made __setattr__, that needs to be reset. We don't walk the + # MRO because we only care about our immediate base classes. + # XXX: This can be confused by subclassing a slotted attrs class with + # XXX: a non-attrs class and subclass the resulting class with an attrs + # XXX: class. See `test_slotted_confused` for details. For now that's + # XXX: OK with us. + if not self._wrote_own_setattr: + cd["__attrs_own_setattr__"] = False + + if not self._has_custom_setattr: + for base_cls in self._cls.__bases__: + if base_cls.__dict__.get("__attrs_own_setattr__", False): + cd["__setattr__"] = _obj_setattr + break + + # Traverse the MRO to collect existing slots + # and check for an existing __weakref__. + existing_slots = {} + weakref_inherited = False + for base_cls in self._cls.__mro__[1:-1]: + if base_cls.__dict__.get("__weakref__", None) is not None: + weakref_inherited = True + existing_slots.update( + { + name: getattr(base_cls, name) + for name in getattr(base_cls, "__slots__", []) + } + ) + + base_names = set(self._base_names) + + names = self._attr_names + if ( + self._weakref_slot + and "__weakref__" not in getattr(self._cls, "__slots__", ()) + and "__weakref__" not in names + and not weakref_inherited + ): + names += ("__weakref__",) + + if PY_3_8_PLUS: + cached_properties = { + name: cached_property.func + for name, cached_property in cd.items() + if isinstance(cached_property, functools.cached_property) + } + else: + # `functools.cached_property` was introduced in 3.8. + # So can't be used before this. + cached_properties = {} + + # Collect methods with a `__class__` reference that are shadowed in the new class. + # To know to update them. + additional_closure_functions_to_update = [] + if cached_properties: + # Add cached properties to names for slotting. + names += tuple(cached_properties.keys()) + + for name in cached_properties: + # Clear out function from class to avoid clashing. + del cd[name] + + class_annotations = _get_annotations(self._cls) + for name, func in cached_properties.items(): + annotation = inspect.signature(func).return_annotation + if annotation is not inspect.Parameter.empty: + class_annotations[name] = annotation + + original_getattr = cd.get("__getattr__") + if original_getattr is not None: + additional_closure_functions_to_update.append(original_getattr) + + cd["__getattr__"] = _make_cached_property_getattr( + cached_properties, original_getattr, self._cls + ) + + # We only add the names of attributes that aren't inherited. + # Setting __slots__ to inherited attributes wastes memory. + slot_names = [name for name in names if name not in base_names] + + # There are slots for attributes from current class + # that are defined in parent classes. + # As their descriptors may be overridden by a child class, + # we collect them here and update the class dict + reused_slots = { + slot: slot_descriptor + for slot, slot_descriptor in existing_slots.items() + if slot in slot_names + } + slot_names = [name for name in slot_names if name not in reused_slots] + cd.update(reused_slots) + if self._cache_hash: + slot_names.append(_hash_cache_field) + + cd["__slots__"] = tuple(slot_names) + + cd["__qualname__"] = self._cls.__qualname__ + + # Create new class based on old class and our methods. + cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd) + + # The following is a fix for + # . + # If a method mentions `__class__` or uses the no-arg super(), the + # compiler will bake a reference to the class in the method itself + # as `method.__closure__`. Since we replace the class with a + # clone, we rewrite these references so it keeps working. + for item in itertools.chain( + cls.__dict__.values(), additional_closure_functions_to_update + ): + if isinstance(item, (classmethod, staticmethod)): + # Class- and staticmethods hide their functions inside. + # These might need to be rewritten as well. + closure_cells = getattr(item.__func__, "__closure__", None) + elif isinstance(item, property): + # Workaround for property `super()` shortcut (PY3-only). + # There is no universal way for other descriptors. + closure_cells = getattr(item.fget, "__closure__", None) + else: + closure_cells = getattr(item, "__closure__", None) + + if not closure_cells: # Catch None or the empty list. + continue + for cell in closure_cells: + try: + match = cell.cell_contents is self._cls + except ValueError: # noqa: PERF203 + # ValueError: Cell is empty + pass + else: + if match: + cell.cell_contents = cls + return cls + + def add_repr(self, ns): + self._cls_dict["__repr__"] = self._add_method_dunders( + _make_repr(self._attrs, ns, self._cls) + ) + return self + + def add_str(self): + repr = self._cls_dict.get("__repr__") + if repr is None: + msg = "__str__ can only be generated if a __repr__ exists." + raise ValueError(msg) + + def __str__(self): + return self.__repr__() + + self._cls_dict["__str__"] = self._add_method_dunders(__str__) + return self + + def _make_getstate_setstate(self): + """ + Create custom __setstate__ and __getstate__ methods. + """ + # __weakref__ is not writable. + state_attr_names = tuple( + an for an in self._attr_names if an != "__weakref__" + ) + + def slots_getstate(self): + """ + Automatically created by attrs. + """ + return {name: getattr(self, name) for name in state_attr_names} + + hash_caching_enabled = self._cache_hash + + def slots_setstate(self, state): + """ + Automatically created by attrs. + """ + __bound_setattr = _obj_setattr.__get__(self) + if isinstance(state, tuple): + # Backward compatibility with attrs instances pickled with + # attrs versions before v22.2.0 which stored tuples. + for name, value in zip(state_attr_names, state): + __bound_setattr(name, value) + else: + for name in state_attr_names: + if name in state: + __bound_setattr(name, state[name]) + + # The hash code cache is not included when the object is + # serialized, but it still needs to be initialized to None to + # indicate that the first call to __hash__ should be a cache + # miss. + if hash_caching_enabled: + __bound_setattr(_hash_cache_field, None) + + return slots_getstate, slots_setstate + + def make_unhashable(self): + self._cls_dict["__hash__"] = None + return self + + def add_hash(self): + self._cls_dict["__hash__"] = self._add_method_dunders( + _make_hash( + self._cls, + self._attrs, + frozen=self._frozen, + cache_hash=self._cache_hash, + ) + ) + + return self + + def add_init(self): + self._cls_dict["__init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_pre_init, + self._pre_init_has_args, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=False, + ) + ) + + return self + + def add_match_args(self): + self._cls_dict["__match_args__"] = tuple( + field.name + for field in self._attrs + if field.init and not field.kw_only + ) + + def add_attrs_init(self): + self._cls_dict["__attrs_init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_pre_init, + self._pre_init_has_args, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=True, + ) + ) + + return self + + def add_eq(self): + cd = self._cls_dict + + cd["__eq__"] = self._add_method_dunders( + _make_eq(self._cls, self._attrs) + ) + cd["__ne__"] = self._add_method_dunders(_make_ne()) + + return self + + def add_order(self): + cd = self._cls_dict + + cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = ( + self._add_method_dunders(meth) + for meth in _make_order(self._cls, self._attrs) + ) + + return self + + def add_setattr(self): + if self._frozen: + return self + + sa_attrs = {} + for a in self._attrs: + on_setattr = a.on_setattr or self._on_setattr + if on_setattr and on_setattr is not setters.NO_OP: + sa_attrs[a.name] = a, on_setattr + + if not sa_attrs: + return self + + if self._has_custom_setattr: + # We need to write a __setattr__ but there already is one! + msg = "Can't combine custom __setattr__ with on_setattr hooks." + raise ValueError(msg) + + # docstring comes from _add_method_dunders + def __setattr__(self, name, val): + try: + a, hook = sa_attrs[name] + except KeyError: + nval = val + else: + nval = hook(self, a, val) + + _obj_setattr(self, name, nval) + + self._cls_dict["__attrs_own_setattr__"] = True + self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__) + self._wrote_own_setattr = True + + return self + + def _add_method_dunders(self, method): + """ + Add __module__ and __qualname__ to a *method* if possible. + """ + with contextlib.suppress(AttributeError): + method.__module__ = self._cls.__module__ + + with contextlib.suppress(AttributeError): + method.__qualname__ = f"{self._cls.__qualname__}.{method.__name__}" + + with contextlib.suppress(AttributeError): + method.__doc__ = ( + "Method generated by attrs for class " + f"{self._cls.__qualname__}." + ) + + return method + + +def _determine_attrs_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + msg = "Don't mix `cmp` with `eq' and `order`." + raise ValueError(msg) + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + return cmp, cmp + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq = default_eq + + if order is None: + order = eq + + if eq is False and order is True: + msg = "`order` can only be True if `eq` is True too." + raise ValueError(msg) + + return eq, order + + +def _determine_attrib_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + msg = "Don't mix `cmp` with `eq' and `order`." + raise ValueError(msg) + + def decide_callable_or_boolean(value): + """ + Decide whether a key function is used. + """ + if callable(value): + value, key = True, value + else: + key = None + return value, key + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + cmp, cmp_key = decide_callable_or_boolean(cmp) + return cmp, cmp_key, cmp, cmp_key + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq, eq_key = default_eq, None + else: + eq, eq_key = decide_callable_or_boolean(eq) + + if order is None: + order, order_key = eq, eq_key + else: + order, order_key = decide_callable_or_boolean(order) + + if eq is False and order is True: + msg = "`order` can only be True if `eq` is True too." + raise ValueError(msg) + + return eq, eq_key, order, order_key + + +def _determine_whether_to_implement( + cls, flag, auto_detect, dunders, default=True +): + """ + Check whether we should implement a set of methods for *cls*. + + *flag* is the argument passed into @attr.s like 'init', *auto_detect* the + same as passed into @attr.s and *dunders* is a tuple of attribute names + whose presence signal that the user has implemented it themselves. + + Return *default* if no reason for either for or against is found. + """ + if flag is True or flag is False: + return flag + + if flag is None and auto_detect is False: + return default + + # Logically, flag is None and auto_detect is True here. + for dunder in dunders: + if _has_own_attribute(cls, dunder): + return False + + return default + + +def attrs( + maybe_cls=None, + these=None, + repr_ns=None, + repr=None, + cmp=None, + hash=None, + init=None, + slots=False, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=False, + kw_only=False, + cache_hash=False, + auto_exc=False, + eq=None, + order=None, + auto_detect=False, + collect_by_mro=False, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, + match_args=True, + unsafe_hash=None, +): + r""" + A class decorator that adds :term:`dunder methods` according to the + specified attributes using `attr.ib` or the *these* argument. + + Please consider using `attrs.define` / `attrs.frozen` in new code + (``attr.s`` will *never* go away, though). + + :param these: A dictionary of name to `attr.ib` mappings. This is useful + to avoid the definition of your attributes within the class body + because you can't (e.g. if you want to add ``__repr__`` methods to + Django models) or don't want to. + + If *these* is not ``None``, *attrs* will *not* search the class body + for attributes and will *not* remove any attributes from it. + + The order is deduced from the order of the attributes inside *these*. + + :type these: `dict` of `str` to `attr.ib` + + :param str repr_ns: When using nested classes, there's no way in Python 2 + to automatically detect that. Therefore it's possible to set the + namespace explicitly for a more meaningful ``repr`` output. + :param bool auto_detect: Instead of setting the *init*, *repr*, *eq*, + *order*, and *hash* arguments explicitly, assume they are set to + ``True`` **unless any** of the involved methods for one of the + arguments is implemented in the *current* class (i.e. it is *not* + inherited from some base class). + + So for example by implementing ``__eq__`` on a class yourself, *attrs* + will deduce ``eq=False`` and will create *neither* ``__eq__`` *nor* + ``__ne__`` (but Python classes come with a sensible ``__ne__`` by + default, so it *should* be enough to only implement ``__eq__`` in most + cases). + + .. warning:: + + If you prevent *attrs* from creating the ordering methods for you + (``order=False``, e.g. by implementing ``__le__``), it becomes + *your* responsibility to make sure its ordering is sound. The best + way is to use the `functools.total_ordering` decorator. + + + Passing ``True`` or ``False`` to *init*, *repr*, *eq*, *order*, *cmp*, + or *hash* overrides whatever *auto_detect* would determine. + + :param bool repr: Create a ``__repr__`` method with a human readable + representation of *attrs* attributes.. + :param bool str: Create a ``__str__`` method that is identical to + ``__repr__``. This is usually not necessary except for `Exception`\ s. + :param bool | None eq: If ``True`` or ``None`` (default), add ``__eq__`` + and ``__ne__`` methods that check two instances for equality. + + They compare the instances as if they were tuples of their *attrs* + attributes if and only if the types of both classes are *identical*! + + .. seealso:: `comparison` + :param bool | None order: If ``True``, add ``__lt__``, ``__le__``, + ``__gt__``, and ``__ge__`` methods that behave like *eq* above and + allow instances to be ordered. If ``None`` (default) mirror value of + *eq*. + + .. seealso:: `comparison` + :param bool | None cmp: Setting *cmp* is equivalent to setting *eq* and + *order* to the same value. Must not be mixed with *eq* or *order*. + + .. seealso:: `comparison` + :param bool | None unsafe_hash: If ``None`` (default), the ``__hash__`` + method is generated according how *eq* and *frozen* are set. + + 1. If *both* are True, *attrs* will generate a ``__hash__`` for you. + 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to + None, marking it unhashable (which it is). + 3. If *eq* is False, ``__hash__`` will be left untouched meaning the + ``__hash__`` method of the base class will be used (if base class is + ``object``, this means it will fall back to id-based hashing.). + + Although not recommended, you can decide for yourself and force *attrs* + to create one (e.g. if the class is immutable even though you didn't + freeze it programmatically) by passing ``True`` or not. Both of these + cases are rather special and should be used carefully. + + .. seealso:: + + - Our documentation on `hashing`, + - Python's documentation on `object.__hash__`, + - and the `GitHub issue that led to the default \ + behavior `_ for + more details. + + :param bool | None hash: Alias for *unsafe_hash*. *unsafe_hash* takes + precedence. + :param bool init: Create a ``__init__`` method that initializes the *attrs* + attributes. Leading underscores are stripped for the argument name. If + a ``__attrs_pre_init__`` method exists on the class, it will be called + before the class is initialized. If a ``__attrs_post_init__`` method + exists on the class, it will be called after the class is fully + initialized. + + If ``init`` is ``False``, an ``__attrs_init__`` method will be injected + instead. This allows you to define a custom ``__init__`` method that + can do pre-init work such as ``super().__init__()``, and then call + ``__attrs_init__()`` and ``__attrs_post_init__()``. + + .. seealso:: `init` + :param bool slots: Create a :term:`slotted class ` that's + more memory-efficient. Slotted classes are generally superior to the + default dict classes, but have some gotchas you should know about, so + we encourage you to read the :term:`glossary entry `. + :param bool frozen: Make instances immutable after initialization. If + someone attempts to modify a frozen instance, + `attrs.exceptions.FrozenInstanceError` is raised. + + .. note:: + + 1. This is achieved by installing a custom ``__setattr__`` method + on your class, so you can't implement your own. + + 2. True immutability is impossible in Python. + + 3. This *does* have a minor a runtime performance `impact + ` when initializing new instances. In other words: + ``__init__`` is slightly slower with ``frozen=True``. + + 4. If a class is frozen, you cannot modify ``self`` in + ``__attrs_post_init__`` or a self-written ``__init__``. You can + circumvent that limitation by using ``object.__setattr__(self, + "attribute_name", value)``. + + 5. Subclasses of a frozen class are frozen too. + + :param bool weakref_slot: Make instances weak-referenceable. This has no + effect unless ``slots`` is also enabled. + :param bool auto_attribs: If ``True``, collect :pep:`526`-annotated + attributes from the class body. + + In this case, you **must** annotate every field. If *attrs* encounters + a field that is set to an `attr.ib` but lacks a type annotation, an + `attr.exceptions.UnannotatedAttributeError` is raised. Use + ``field_name: typing.Any = attr.ib(...)`` if you don't want to set a + type. + + If you assign a value to those attributes (e.g. ``x: int = 42``), that + value becomes the default value like if it were passed using + ``attr.ib(default=42)``. Passing an instance of `attrs.Factory` also + works as expected in most cases (see warning below). + + Attributes annotated as `typing.ClassVar`, and attributes that are + neither annotated nor set to an `attr.ib` are **ignored**. + + .. warning:: + For features that use the attribute name to create decorators (e.g. + :ref:`validators `), you still *must* assign `attr.ib` + to them. Otherwise Python will either not find the name or try to + use the default value to call e.g. ``validator`` on it. + + These errors can be quite confusing and probably the most common bug + report on our bug tracker. + + :param bool kw_only: Make all attributes keyword-only in the generated + ``__init__`` (if ``init`` is ``False``, this parameter is ignored). + :param bool cache_hash: Ensure that the object's hash code is computed only + once and stored on the object. If this is set to ``True``, hashing + must be either explicitly or implicitly enabled for this class. If the + hash code is cached, avoid any reassignments of fields involved in hash + code computation or mutations of the objects those fields point to + after object creation. If such changes occur, the behavior of the + object's hash code is undefined. + :param bool auto_exc: If the class subclasses `BaseException` (which + implicitly includes any subclass of any exception), the following + happens to behave like a well-behaved Python exceptions class: + + - the values for *eq*, *order*, and *hash* are ignored and the + instances compare and hash by the instance's ids (N.B. *attrs* will + *not* remove existing implementations of ``__hash__`` or the equality + methods. It just won't add own ones.), + - all attributes that are either passed into ``__init__`` or have a + default value are additionally available as a tuple in the ``args`` + attribute, + - the value of *str* is ignored leaving ``__str__`` to base classes. + :param bool collect_by_mro: Setting this to `True` fixes the way *attrs* + collects attributes from base classes. The default behavior is + incorrect in certain cases of multiple inheritance. It should be on by + default but is kept off for backward-compatibility. + + .. seealso:: + Issue `#428 `_ + + :param bool | None getstate_setstate: + .. note:: + This is usually only interesting for slotted classes and you should + probably just set *auto_detect* to `True`. + + If `True`, ``__getstate__`` and ``__setstate__`` are generated and + attached to the class. This is necessary for slotted classes to be + pickleable. If left `None`, it's `True` by default for slotted classes + and ``False`` for dict classes. + + If *auto_detect* is `True`, and *getstate_setstate* is left `None`, and + **either** ``__getstate__`` or ``__setstate__`` is detected directly on + the class (i.e. not inherited), it is set to `False` (this is usually + what you want). + + :param on_setattr: A callable that is run whenever the user attempts to set + an attribute (either by assignment like ``i.x = 42`` or by using + `setattr` like ``setattr(i, "x", 42)``). It receives the same arguments + as validators: the instance, the attribute that is being modified, and + the new value. + + If no exception is raised, the attribute is set to the return value of + the callable. + + If a list of callables is passed, they're automatically wrapped in an + `attrs.setters.pipe`. + :type on_setattr: `callable`, or a list of callables, or `None`, or + `attrs.setters.NO_OP` + + :param callable | None field_transformer: + A function that is called with the original class object and all fields + right before *attrs* finalizes the class. You can use this, e.g., to + automatically add converters or validators to fields based on their + types. + + .. seealso:: `transform-fields` + + :param bool match_args: + If `True` (default), set ``__match_args__`` on the class to support + :pep:`634` (Structural Pattern Matching). It is a tuple of all + non-keyword-only ``__init__`` parameter names on Python 3.10 and later. + Ignored on older Python versions. + + .. versionadded:: 16.0.0 *slots* + .. versionadded:: 16.1.0 *frozen* + .. versionadded:: 16.3.0 *str* + .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``. + .. versionchanged:: 17.1.0 + *hash* supports ``None`` as value which is also the default now. + .. versionadded:: 17.3.0 *auto_attribs* + .. versionchanged:: 18.1.0 + If *these* is passed, no attributes are deleted from the class body. + .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained. + .. versionadded:: 18.2.0 *weakref_slot* + .. deprecated:: 18.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a + `DeprecationWarning` if the classes compared are subclasses of + each other. ``__eq`` and ``__ne__`` never tried to compared subclasses + to each other. + .. versionchanged:: 19.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider + subclasses comparable anymore. + .. versionadded:: 18.2.0 *kw_only* + .. versionadded:: 18.2.0 *cache_hash* + .. versionadded:: 19.1.0 *auto_exc* + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *auto_detect* + .. versionadded:: 20.1.0 *collect_by_mro* + .. versionadded:: 20.1.0 *getstate_setstate* + .. versionadded:: 20.1.0 *on_setattr* + .. versionadded:: 20.3.0 *field_transformer* + .. versionchanged:: 21.1.0 + ``init=False`` injects ``__attrs_init__`` + .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__`` + .. versionchanged:: 21.1.0 *cmp* undeprecated + .. versionadded:: 21.3.0 *match_args* + .. versionadded:: 22.2.0 + *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance). + """ + eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None) + + # unsafe_hash takes precedence due to PEP 681. + if unsafe_hash is not None: + hash = unsafe_hash + + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + def wrap(cls): + is_frozen = frozen or _has_frozen_base_class(cls) + is_exc = auto_exc is True and issubclass(cls, BaseException) + has_own_setattr = auto_detect and _has_own_attribute( + cls, "__setattr__" + ) + + if has_own_setattr and is_frozen: + msg = "Can't freeze a class with a custom __setattr__." + raise ValueError(msg) + + builder = _ClassBuilder( + cls, + these, + slots, + is_frozen, + weakref_slot, + _determine_whether_to_implement( + cls, + getstate_setstate, + auto_detect, + ("__getstate__", "__setstate__"), + default=slots, + ), + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_own_setattr, + field_transformer, + ) + if _determine_whether_to_implement( + cls, repr, auto_detect, ("__repr__",) + ): + builder.add_repr(repr_ns) + if str is True: + builder.add_str() + + eq = _determine_whether_to_implement( + cls, eq_, auto_detect, ("__eq__", "__ne__") + ) + if not is_exc and eq is True: + builder.add_eq() + if not is_exc and _determine_whether_to_implement( + cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__") + ): + builder.add_order() + + builder.add_setattr() + + nonlocal hash + if ( + hash is None + and auto_detect is True + and _has_own_attribute(cls, "__hash__") + ): + hash = False + + if hash is not True and hash is not False and hash is not None: + # Can't use `hash in` because 1 == True for example. + msg = "Invalid value for hash. Must be True, False, or None." + raise TypeError(msg) + + if hash is False or (hash is None and eq is False) or is_exc: + # Don't do anything. Should fall back to __object__'s __hash__ + # which is by id. + if cache_hash: + msg = "Invalid value for cache_hash. To use hash caching, hashing must be either explicitly or implicitly enabled." + raise TypeError(msg) + elif hash is True or ( + hash is None and eq is True and is_frozen is True + ): + # Build a __hash__ if told so, or if it's safe. + builder.add_hash() + else: + # Raise TypeError on attempts to hash. + if cache_hash: + msg = "Invalid value for cache_hash. To use hash caching, hashing must be either explicitly or implicitly enabled." + raise TypeError(msg) + builder.make_unhashable() + + if _determine_whether_to_implement( + cls, init, auto_detect, ("__init__",) + ): + builder.add_init() + else: + builder.add_attrs_init() + if cache_hash: + msg = "Invalid value for cache_hash. To use hash caching, init must be True." + raise TypeError(msg) + + if ( + PY310 + and match_args + and not _has_own_attribute(cls, "__match_args__") + ): + builder.add_match_args() + + return builder.build_class() + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but ``None`` if used as `@attrs()`. + if maybe_cls is None: + return wrap + + return wrap(maybe_cls) + + +_attrs = attrs +""" +Internal alias so we can use it in functions that take an argument called +*attrs*. +""" + + +def _has_frozen_base_class(cls): + """ + Check whether *cls* has a frozen ancestor by looking at its + __setattr__. + """ + return cls.__setattr__ is _frozen_setattrs + + +def _generate_unique_filename(cls, func_name): + """ + Create a "filename" suitable for a function being generated. + """ + return ( + f"" + ) + + +def _make_hash(cls, attrs, frozen, cache_hash): + attrs = tuple( + a for a in attrs if a.hash is True or (a.hash is None and a.eq is True) + ) + + tab = " " + + unique_filename = _generate_unique_filename(cls, "hash") + type_hash = hash(unique_filename) + # If eq is custom generated, we need to include the functions in globs + globs = {} + + hash_def = "def __hash__(self" + hash_func = "hash((" + closing_braces = "))" + if not cache_hash: + hash_def += "):" + else: + hash_def += ", *" + + hash_def += ", _cache_wrapper=__import__('attr._make')._make._CacheHashWrapper):" + hash_func = "_cache_wrapper(" + hash_func + closing_braces += ")" + + method_lines = [hash_def] + + def append_hash_computation_lines(prefix, indent): + """ + Generate the code for actually computing the hash code. + Below this will either be returned directly or used to compute + a value which is then cached, depending on the value of cache_hash + """ + + method_lines.extend( + [ + indent + prefix + hash_func, + indent + f" {type_hash},", + ] + ) + + for a in attrs: + if a.eq_key: + cmp_name = f"_{a.name}_key" + globs[cmp_name] = a.eq_key + method_lines.append( + indent + f" {cmp_name}(self.{a.name})," + ) + else: + method_lines.append(indent + f" self.{a.name},") + + method_lines.append(indent + " " + closing_braces) + + if cache_hash: + method_lines.append(tab + f"if self.{_hash_cache_field} is None:") + if frozen: + append_hash_computation_lines( + f"object.__setattr__(self, '{_hash_cache_field}', ", tab * 2 + ) + method_lines.append(tab * 2 + ")") # close __setattr__ + else: + append_hash_computation_lines( + f"self.{_hash_cache_field} = ", tab * 2 + ) + method_lines.append(tab + f"return self.{_hash_cache_field}") + else: + append_hash_computation_lines("return ", tab) + + script = "\n".join(method_lines) + return _make_method("__hash__", script, unique_filename, globs) + + +def _add_hash(cls, attrs): + """ + Add a hash method to *cls*. + """ + cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False) + return cls + + +def _make_ne(): + """ + Create __ne__ method. + """ + + def __ne__(self, other): + """ + Check equality and either forward a NotImplemented or + return the result negated. + """ + result = self.__eq__(other) + if result is NotImplemented: + return NotImplemented + + return not result + + return __ne__ + + +def _make_eq(cls, attrs): + """ + Create __eq__ method for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.eq] + + unique_filename = _generate_unique_filename(cls, "eq") + lines = [ + "def __eq__(self, other):", + " if other.__class__ is not self.__class__:", + " return NotImplemented", + ] + + # We can't just do a big self.x = other.x and... clause due to + # irregularities like nan == nan is false but (nan,) == (nan,) is true. + globs = {} + if attrs: + lines.append(" return (") + others = [" ) == ("] + for a in attrs: + if a.eq_key: + cmp_name = f"_{a.name}_key" + # Add the key function to the global namespace + # of the evaluated function. + globs[cmp_name] = a.eq_key + lines.append(f" {cmp_name}(self.{a.name}),") + others.append(f" {cmp_name}(other.{a.name}),") + else: + lines.append(f" self.{a.name},") + others.append(f" other.{a.name},") + + lines += [*others, " )"] + else: + lines.append(" return True") + + script = "\n".join(lines) + + return _make_method("__eq__", script, unique_filename, globs) + + +def _make_order(cls, attrs): + """ + Create ordering methods for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.order] + + def attrs_to_tuple(obj): + """ + Save us some typing. + """ + return tuple( + key(value) if key else value + for value, key in ( + (getattr(obj, a.name), a.order_key) for a in attrs + ) + ) + + def __lt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) < attrs_to_tuple(other) + + return NotImplemented + + def __le__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) <= attrs_to_tuple(other) + + return NotImplemented + + def __gt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) > attrs_to_tuple(other) + + return NotImplemented + + def __ge__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) >= attrs_to_tuple(other) + + return NotImplemented + + return __lt__, __le__, __gt__, __ge__ + + +def _add_eq(cls, attrs=None): + """ + Add equality methods to *cls* with *attrs*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__eq__ = _make_eq(cls, attrs) + cls.__ne__ = _make_ne() + + return cls + + +def _make_repr(attrs, ns, cls): + unique_filename = _generate_unique_filename(cls, "repr") + # Figure out which attributes to include, and which function to use to + # format them. The a.repr value can be either bool or a custom + # callable. + attr_names_with_reprs = tuple( + (a.name, (repr if a.repr is True else a.repr), a.init) + for a in attrs + if a.repr is not False + ) + globs = { + name + "_repr": r for name, r, _ in attr_names_with_reprs if r != repr + } + globs["_compat"] = _compat + globs["AttributeError"] = AttributeError + globs["NOTHING"] = NOTHING + attribute_fragments = [] + for name, r, i in attr_names_with_reprs: + accessor = ( + "self." + name if i else 'getattr(self, "' + name + '", NOTHING)' + ) + fragment = ( + "%s={%s!r}" % (name, accessor) + if r == repr + else "%s={%s_repr(%s)}" % (name, name, accessor) + ) + attribute_fragments.append(fragment) + repr_fragment = ", ".join(attribute_fragments) + + if ns is None: + cls_name_fragment = '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}' + else: + cls_name_fragment = ns + ".{self.__class__.__name__}" + + lines = [ + "def __repr__(self):", + " try:", + " already_repring = _compat.repr_context.already_repring", + " except AttributeError:", + " already_repring = {id(self),}", + " _compat.repr_context.already_repring = already_repring", + " else:", + " if id(self) in already_repring:", + " return '...'", + " else:", + " already_repring.add(id(self))", + " try:", + f" return f'{cls_name_fragment}({repr_fragment})'", + " finally:", + " already_repring.remove(id(self))", + ] + + return _make_method( + "__repr__", "\n".join(lines), unique_filename, globs=globs + ) + + +def _add_repr(cls, ns=None, attrs=None): + """ + Add a repr method to *cls*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__repr__ = _make_repr(attrs, ns, cls) + return cls + + +def fields(cls): + """ + Return the tuple of *attrs* attributes for a class. + + The tuple also allows accessing the fields by their names (see below for + examples). + + :param type cls: Class to introspect. + + :raise TypeError: If *cls* is not a class. + :raise attrs.exceptions.NotAnAttrsClassError: If *cls* is not an *attrs* + class. + + :rtype: tuple (with name accessors) of `attrs.Attribute` + + .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields + by name. + .. versionchanged:: 23.1.0 Add support for generic classes. + """ + generic_base = get_generic_base(cls) + + if generic_base is None and not isinstance(cls, type): + msg = "Passed object must be a class." + raise TypeError(msg) + + attrs = getattr(cls, "__attrs_attrs__", None) + + if attrs is None: + if generic_base is not None: + attrs = getattr(generic_base, "__attrs_attrs__", None) + if attrs is not None: + # Even though this is global state, stick it on here to speed + # it up. We rely on `cls` being cached for this to be + # efficient. + cls.__attrs_attrs__ = attrs + return attrs + msg = f"{cls!r} is not an attrs-decorated class." + raise NotAnAttrsClassError(msg) + + return attrs + + +def fields_dict(cls): + """ + Return an ordered dictionary of *attrs* attributes for a class, whose + keys are the attribute names. + + :param type cls: Class to introspect. + + :raise TypeError: If *cls* is not a class. + :raise attrs.exceptions.NotAnAttrsClassError: If *cls* is not an *attrs* + class. + + :rtype: dict + + .. versionadded:: 18.1.0 + """ + if not isinstance(cls, type): + msg = "Passed object must be a class." + raise TypeError(msg) + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is None: + msg = f"{cls!r} is not an attrs-decorated class." + raise NotAnAttrsClassError(msg) + return {a.name: a for a in attrs} + + +def validate(inst): + """ + Validate all attributes on *inst* that have a validator. + + Leaves all exceptions through. + + :param inst: Instance of a class with *attrs* attributes. + """ + if _config._run_validators is False: + return + + for a in fields(inst.__class__): + v = a.validator + if v is not None: + v(inst, a, getattr(inst, a.name)) + + +def _is_slot_cls(cls): + return "__slots__" in cls.__dict__ + + +def _is_slot_attr(a_name, base_attr_map): + """ + Check if the attribute name comes from a slot class. + """ + return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name]) + + +def _make_init( + cls, + attrs, + pre_init, + pre_init_has_args, + post_init, + frozen, + slots, + cache_hash, + base_attr_map, + is_exc, + cls_on_setattr, + attrs_init, +): + has_cls_on_setattr = ( + cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP + ) + + if frozen and has_cls_on_setattr: + msg = "Frozen classes can't use on_setattr." + raise ValueError(msg) + + needs_cached_setattr = cache_hash or frozen + filtered_attrs = [] + attr_dict = {} + for a in attrs: + if not a.init and a.default is NOTHING: + continue + + filtered_attrs.append(a) + attr_dict[a.name] = a + + if a.on_setattr is not None: + if frozen is True: + msg = "Frozen classes can't use on_setattr." + raise ValueError(msg) + + needs_cached_setattr = True + elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP: + needs_cached_setattr = True + + unique_filename = _generate_unique_filename(cls, "init") + + script, globs, annotations = _attrs_to_init_script( + filtered_attrs, + frozen, + slots, + pre_init, + pre_init_has_args, + post_init, + cache_hash, + base_attr_map, + is_exc, + needs_cached_setattr, + has_cls_on_setattr, + attrs_init, + ) + if cls.__module__ in sys.modules: + # This makes typing.get_type_hints(CLS.__init__) resolve string types. + globs.update(sys.modules[cls.__module__].__dict__) + + globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict}) + + if needs_cached_setattr: + # Save the lookup overhead in __init__ if we need to circumvent + # setattr hooks. + globs["_cached_setattr_get"] = _obj_setattr.__get__ + + init = _make_method( + "__attrs_init__" if attrs_init else "__init__", + script, + unique_filename, + globs, + ) + init.__annotations__ = annotations + + return init + + +def _setattr(attr_name, value_var, has_on_setattr): + """ + Use the cached object.setattr to set *attr_name* to *value_var*. + """ + return f"_setattr('{attr_name}', {value_var})" + + +def _setattr_with_converter(attr_name, value_var, has_on_setattr): + """ + Use the cached object.setattr to set *attr_name* to *value_var*, but run + its converter first. + """ + return "_setattr('%s', %s(%s))" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + +def _assign(attr_name, value, has_on_setattr): + """ + Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise + relegate to _setattr. + """ + if has_on_setattr: + return _setattr(attr_name, value, True) + + return f"self.{attr_name} = {value}" + + +def _assign_with_converter(attr_name, value_var, has_on_setattr): + """ + Unless *attr_name* has an on_setattr hook, use normal assignment after + conversion. Otherwise relegate to _setattr_with_converter. + """ + if has_on_setattr: + return _setattr_with_converter(attr_name, value_var, True) + + return "self.%s = %s(%s)" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + +def _attrs_to_init_script( + attrs, + frozen, + slots, + pre_init, + pre_init_has_args, + post_init, + cache_hash, + base_attr_map, + is_exc, + needs_cached_setattr, + has_cls_on_setattr, + attrs_init, +): + """ + Return a script of an initializer for *attrs* and a dict of globals. + + The globals are expected by the generated script. + + If *frozen* is True, we cannot set the attributes directly so we use + a cached ``object.__setattr__``. + """ + lines = [] + if pre_init: + lines.append("self.__attrs_pre_init__()") + + if needs_cached_setattr: + lines.append( + # Circumvent the __setattr__ descriptor to save one lookup per + # assignment. + # Note _setattr will be used again below if cache_hash is True + "_setattr = _cached_setattr_get(self)" + ) + + if frozen is True: + if slots is True: + fmt_setter = _setattr + fmt_setter_with_converter = _setattr_with_converter + else: + # Dict frozen classes assign directly to __dict__. + # But only if the attribute doesn't come from an ancestor slot + # class. + # Note _inst_dict will be used again below if cache_hash is True + lines.append("_inst_dict = self.__dict__") + + def fmt_setter(attr_name, value_var, has_on_setattr): + if _is_slot_attr(attr_name, base_attr_map): + return _setattr(attr_name, value_var, has_on_setattr) + + return f"_inst_dict['{attr_name}'] = {value_var}" + + def fmt_setter_with_converter( + attr_name, value_var, has_on_setattr + ): + if has_on_setattr or _is_slot_attr(attr_name, base_attr_map): + return _setattr_with_converter( + attr_name, value_var, has_on_setattr + ) + + return "_inst_dict['%s'] = %s(%s)" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + else: + # Not frozen. + fmt_setter = _assign + fmt_setter_with_converter = _assign_with_converter + + args = [] + kw_only_args = [] + attrs_to_validate = [] + + # This is a dictionary of names to validator and converter callables. + # Injecting this into __init__ globals lets us avoid lookups. + names_for_globals = {} + annotations = {"return": None} + + for a in attrs: + if a.validator: + attrs_to_validate.append(a) + + attr_name = a.name + has_on_setattr = a.on_setattr is not None or ( + a.on_setattr is not setters.NO_OP and has_cls_on_setattr + ) + # a.alias is set to maybe-mangled attr_name in _ClassBuilder if not + # explicitly provided + arg_name = a.alias + + has_factory = isinstance(a.default, Factory) + maybe_self = "self" if has_factory and a.default.takes_self else "" + + if a.init is False: + if has_factory: + init_factory_name = _init_factory_pat % (a.name,) + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + init_factory_name + f"({maybe_self})", + has_on_setattr, + ) + ) + conv_name = _init_converter_pat % (a.name,) + names_for_globals[conv_name] = a.converter + else: + lines.append( + fmt_setter( + attr_name, + init_factory_name + f"({maybe_self})", + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + elif a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + f"attr_dict['{attr_name}'].default", + has_on_setattr, + ) + ) + conv_name = _init_converter_pat % (a.name,) + names_for_globals[conv_name] = a.converter + else: + lines.append( + fmt_setter( + attr_name, + f"attr_dict['{attr_name}'].default", + has_on_setattr, + ) + ) + elif a.default is not NOTHING and not has_factory: + arg = f"{arg_name}=attr_dict['{attr_name}'].default" + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + elif has_factory: + arg = f"{arg_name}=NOTHING" + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + lines.append(f"if {arg_name} is not NOTHING:") + + init_factory_name = _init_factory_pat % (a.name,) + if a.converter is not None: + lines.append( + " " + + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter_with_converter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append( + " " + fmt_setter(attr_name, arg_name, has_on_setattr) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + else: + if a.kw_only: + kw_only_args.append(arg_name) + else: + args.append(arg_name) + + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + if a.init is True: + if a.type is not None and a.converter is None: + annotations[arg_name] = a.type + elif a.converter is not None: + # Try to get the type from the converter. + t = _AnnotationExtractor(a.converter).get_first_param_type() + if t: + annotations[arg_name] = t + + if attrs_to_validate: # we can skip this if there are no validators. + names_for_globals["_config"] = _config + lines.append("if _config._run_validators is True:") + for a in attrs_to_validate: + val_name = "__attr_validator_" + a.name + attr_name = "__attr_" + a.name + lines.append(f" {val_name}(self, {attr_name}, self.{a.name})") + names_for_globals[val_name] = a.validator + names_for_globals[attr_name] = a + + if post_init: + lines.append("self.__attrs_post_init__()") + + # because this is set only after __attrs_post_init__ is called, a crash + # will result if post-init tries to access the hash code. This seemed + # preferable to setting this beforehand, in which case alteration to + # field values during post-init combined with post-init accessing the + # hash code would result in silent bugs. + if cache_hash: + if frozen: + if slots: # noqa: SIM108 + # if frozen and slots, then _setattr defined above + init_hash_cache = "_setattr('%s', %s)" + else: + # if frozen and not slots, then _inst_dict defined above + init_hash_cache = "_inst_dict['%s'] = %s" + else: + init_hash_cache = "self.%s = %s" + lines.append(init_hash_cache % (_hash_cache_field, "None")) + + # For exceptions we rely on BaseException.__init__ for proper + # initialization. + if is_exc: + vals = ",".join(f"self.{a.name}" for a in attrs if a.init) + + lines.append(f"BaseException.__init__(self, {vals})") + + args = ", ".join(args) + pre_init_args = args + if kw_only_args: + args += "%s*, %s" % ( + ", " if args else "", # leading comma + ", ".join(kw_only_args), # kw_only args + ) + pre_init_kw_only_args = ", ".join( + ["%s=%s" % (kw_arg, kw_arg) for kw_arg in kw_only_args] + ) + pre_init_args += ( + ", " if pre_init_args else "" + ) # handle only kwargs and no regular args + pre_init_args += pre_init_kw_only_args + + if pre_init and pre_init_has_args: + # If pre init method has arguments, pass same arguments as `__init__` + lines[0] = "self.__attrs_pre_init__(%s)" % pre_init_args + + return ( + "def %s(self, %s):\n %s\n" + % ( + ("__attrs_init__" if attrs_init else "__init__"), + args, + "\n ".join(lines) if lines else "pass", + ), + names_for_globals, + annotations, + ) + + +def _default_init_alias_for(name: str) -> str: + """ + The default __init__ parameter name for a field. + + This performs private-name adjustment via leading-unscore stripping, + and is the default value of Attribute.alias if not provided. + """ + + return name.lstrip("_") + + +class Attribute: + """ + *Read-only* representation of an attribute. + + .. warning:: + + You should never instantiate this class yourself. + + The class has *all* arguments of `attr.ib` (except for ``factory`` + which is only syntactic sugar for ``default=Factory(...)`` plus the + following: + + - ``name`` (`str`): The name of the attribute. + - ``alias`` (`str`): The __init__ parameter name of the attribute, after + any explicit overrides and default private-attribute-name handling. + - ``inherited`` (`bool`): Whether or not that attribute has been inherited + from a base class. + - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The callables + that are used for comparing and ordering objects by this attribute, + respectively. These are set by passing a callable to `attr.ib`'s ``eq``, + ``order``, or ``cmp`` arguments. See also :ref:`comparison customization + `. + + Instances of this class are frequently used for introspection purposes + like: + + - `fields` returns a tuple of them. + - Validators get them passed as the first argument. + - The :ref:`field transformer ` hook receives a list of + them. + - The ``alias`` property exposes the __init__ parameter name of the field, + with any overrides and default private-attribute handling applied. + + + .. versionadded:: 20.1.0 *inherited* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.2.0 *inherited* is not taken into account for + equality checks and hashing anymore. + .. versionadded:: 21.1.0 *eq_key* and *order_key* + .. versionadded:: 22.2.0 *alias* + + For the full version history of the fields, see `attr.ib`. + """ + + __slots__ = ( + "name", + "default", + "validator", + "repr", + "eq", + "eq_key", + "order", + "order_key", + "hash", + "init", + "metadata", + "type", + "converter", + "kw_only", + "inherited", + "on_setattr", + "alias", + ) + + def __init__( + self, + name, + default, + validator, + repr, + cmp, # XXX: unused, remove along with other cmp code. + hash, + init, + inherited, + metadata=None, + type=None, + converter=None, + kw_only=False, + eq=None, + eq_key=None, + order=None, + order_key=None, + on_setattr=None, + alias=None, + ): + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq_key or eq, order_key or order, True + ) + + # Cache this descriptor here to speed things up later. + bound_setattr = _obj_setattr.__get__(self) + + # Despite the big red warning, people *do* instantiate `Attribute` + # themselves. + bound_setattr("name", name) + bound_setattr("default", default) + bound_setattr("validator", validator) + bound_setattr("repr", repr) + bound_setattr("eq", eq) + bound_setattr("eq_key", eq_key) + bound_setattr("order", order) + bound_setattr("order_key", order_key) + bound_setattr("hash", hash) + bound_setattr("init", init) + bound_setattr("converter", converter) + bound_setattr( + "metadata", + ( + types.MappingProxyType(dict(metadata)) # Shallow copy + if metadata + else _empty_metadata_singleton + ), + ) + bound_setattr("type", type) + bound_setattr("kw_only", kw_only) + bound_setattr("inherited", inherited) + bound_setattr("on_setattr", on_setattr) + bound_setattr("alias", alias) + + def __setattr__(self, name, value): + raise FrozenInstanceError() + + @classmethod + def from_counting_attr(cls, name, ca, type=None): + # type holds the annotated value. deal with conflicts: + if type is None: + type = ca.type + elif ca.type is not None: + msg = "Type annotation and type argument cannot both be present" + raise ValueError(msg) + inst_dict = { + k: getattr(ca, k) + for k in Attribute.__slots__ + if k + not in ( + "name", + "validator", + "default", + "type", + "inherited", + ) # exclude methods and deprecated alias + } + return cls( + name=name, + validator=ca._validator, + default=ca._default, + type=type, + cmp=None, + inherited=False, + **inst_dict, + ) + + # Don't use attrs.evolve since fields(Attribute) doesn't work + def evolve(self, **changes): + """ + Copy *self* and apply *changes*. + + This works similarly to `attrs.evolve` but that function does not work + with `Attribute`. + + It is mainly meant to be used for `transform-fields`. + + .. versionadded:: 20.3.0 + """ + new = copy.copy(self) + + new._setattrs(changes.items()) + + return new + + # Don't use _add_pickle since fields(Attribute) doesn't work + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple( + getattr(self, name) if name != "metadata" else dict(self.metadata) + for name in self.__slots__ + ) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + self._setattrs(zip(self.__slots__, state)) + + def _setattrs(self, name_values_pairs): + bound_setattr = _obj_setattr.__get__(self) + for name, value in name_values_pairs: + if name != "metadata": + bound_setattr(name, value) + else: + bound_setattr( + name, + types.MappingProxyType(dict(value)) + if value + else _empty_metadata_singleton, + ) + + +_a = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=(name != "metadata"), + init=True, + inherited=False, + alias=_default_init_alias_for(name), + ) + for name in Attribute.__slots__ +] + +Attribute = _add_hash( + _add_eq( + _add_repr(Attribute, attrs=_a), + attrs=[a for a in _a if a.name != "inherited"], + ), + attrs=[a for a in _a if a.hash and a.name != "inherited"], +) + + +class _CountingAttr: + """ + Intermediate representation of attributes that uses a counter to preserve + the order in which the attributes have been defined. + + *Internal* data structure of the attrs library. Running into is most + likely the result of a bug like a forgotten `@attr.s` decorator. + """ + + __slots__ = ( + "counter", + "_default", + "repr", + "eq", + "eq_key", + "order", + "order_key", + "hash", + "init", + "metadata", + "_validator", + "converter", + "type", + "kw_only", + "on_setattr", + "alias", + ) + __attrs_attrs__ = ( + *tuple( + Attribute( + name=name, + alias=_default_init_alias_for(name), + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=True, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ) + for name in ( + "counter", + "_default", + "repr", + "eq", + "order", + "hash", + "init", + "on_setattr", + "alias", + ) + ), + Attribute( + name="metadata", + alias="metadata", + default=None, + validator=None, + repr=True, + cmp=None, + hash=False, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ), + ) + cls_counter = 0 + + def __init__( + self, + default, + validator, + repr, + cmp, + hash, + init, + converter, + metadata, + type, + kw_only, + eq, + eq_key, + order, + order_key, + on_setattr, + alias, + ): + _CountingAttr.cls_counter += 1 + self.counter = _CountingAttr.cls_counter + self._default = default + self._validator = validator + self.converter = converter + self.repr = repr + self.eq = eq + self.eq_key = eq_key + self.order = order + self.order_key = order_key + self.hash = hash + self.init = init + self.metadata = metadata + self.type = type + self.kw_only = kw_only + self.on_setattr = on_setattr + self.alias = alias + + def validator(self, meth): + """ + Decorator that adds *meth* to the list of validators. + + Returns *meth* unchanged. + + .. versionadded:: 17.1.0 + """ + if self._validator is None: + self._validator = meth + else: + self._validator = and_(self._validator, meth) + return meth + + def default(self, meth): + """ + Decorator that allows to set the default for an attribute. + + Returns *meth* unchanged. + + :raises DefaultAlreadySetError: If default has been set before. + + .. versionadded:: 17.1.0 + """ + if self._default is not NOTHING: + raise DefaultAlreadySetError() + + self._default = Factory(meth, takes_self=True) + + return meth + + +_CountingAttr = _add_eq(_add_repr(_CountingAttr)) + + +class Factory: + """ + Stores a factory callable. + + If passed as the default value to `attrs.field`, the factory is used to + generate a new value. + + :param callable factory: A callable that takes either none or exactly one + mandatory positional argument depending on *takes_self*. + :param bool takes_self: Pass the partially initialized instance that is + being initialized as a positional argument. + + .. versionadded:: 17.1.0 *takes_self* + """ + + __slots__ = ("factory", "takes_self") + + def __init__(self, factory, takes_self=False): + self.factory = factory + self.takes_self = takes_self + + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple(getattr(self, name) for name in self.__slots__) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + for name, value in zip(self.__slots__, state): + setattr(self, name, value) + + +_f = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=True, + init=True, + inherited=False, + ) + for name in Factory.__slots__ +] + +Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f) + + +def make_class( + name, attrs, bases=(object,), class_body=None, **attributes_arguments +): + r""" + A quick way to create a new class called *name* with *attrs*. + + :param str name: The name for the new class. + + :param attrs: A list of names or a dictionary of mappings of names to + `attr.ib`\ s / `attrs.field`\ s. + + The order is deduced from the order of the names or attributes inside + *attrs*. Otherwise the order of the definition of the attributes is + used. + :type attrs: `list` or `dict` + + :param tuple bases: Classes that the new class will subclass. + + :param dict class_body: An optional dictionary of class attributes for the new class. + + :param attributes_arguments: Passed unmodified to `attr.s`. + + :return: A new class with *attrs*. + :rtype: type + + .. versionadded:: 17.1.0 *bases* + .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained. + .. versionchanged:: 23.2.0 *class_body* + """ + if isinstance(attrs, dict): + cls_dict = attrs + elif isinstance(attrs, (list, tuple)): + cls_dict = {a: attrib() for a in attrs} + else: + msg = "attrs argument must be a dict or a list." + raise TypeError(msg) + + pre_init = cls_dict.pop("__attrs_pre_init__", None) + post_init = cls_dict.pop("__attrs_post_init__", None) + user_init = cls_dict.pop("__init__", None) + + body = {} + if class_body is not None: + body.update(class_body) + if pre_init is not None: + body["__attrs_pre_init__"] = pre_init + if post_init is not None: + body["__attrs_post_init__"] = post_init + if user_init is not None: + body["__init__"] = user_init + + type_ = types.new_class(name, bases, {}, lambda ns: ns.update(body)) + + # For pickling to work, the __module__ variable needs to be set to the + # frame where the class is created. Bypass this step in environments where + # sys._getframe is not defined (Jython for example) or sys._getframe is not + # defined for arguments greater than 0 (IronPython). + with contextlib.suppress(AttributeError, ValueError): + type_.__module__ = sys._getframe(1).f_globals.get( + "__name__", "__main__" + ) + + # We do it here for proper warnings with meaningful stacklevel. + cmp = attributes_arguments.pop("cmp", None) + ( + attributes_arguments["eq"], + attributes_arguments["order"], + ) = _determine_attrs_eq_order( + cmp, + attributes_arguments.get("eq"), + attributes_arguments.get("order"), + True, + ) + + return _attrs(these=cls_dict, **attributes_arguments)(type_) + + +# These are required by within this module so we define them here and merely +# import into .validators / .converters. + + +@attrs(slots=True, hash=True) +class _AndValidator: + """ + Compose many validators to a single one. + """ + + _validators = attrib() + + def __call__(self, inst, attr, value): + for v in self._validators: + v(inst, attr, value) + + +def and_(*validators): + """ + A validator that composes multiple validators into one. + + When called on a value, it runs all wrapped validators. + + :param callables validators: Arbitrary number of validators. + + .. versionadded:: 17.1.0 + """ + vals = [] + for validator in validators: + vals.extend( + validator._validators + if isinstance(validator, _AndValidator) + else [validator] + ) + + return _AndValidator(tuple(vals)) + + +def pipe(*converters): + """ + A converter that composes multiple converters into one. + + When called on a value, it runs all wrapped converters, returning the + *last* value. + + Type annotations will be inferred from the wrapped converters', if + they have any. + + :param callables converters: Arbitrary number of converters. + + .. versionadded:: 20.1.0 + """ + + def pipe_converter(val): + for converter in converters: + val = converter(val) + + return val + + if not converters: + # If the converter list is empty, pipe_converter is the identity. + A = typing.TypeVar("A") + pipe_converter.__annotations__ = {"val": A, "return": A} + else: + # Get parameter type from first converter. + t = _AnnotationExtractor(converters[0]).get_first_param_type() + if t: + pipe_converter.__annotations__["val"] = t + + # Get return type from last converter. + rt = _AnnotationExtractor(converters[-1]).get_return_type() + if rt: + pipe_converter.__annotations__["return"] = rt + + return pipe_converter diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_next_gen.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_next_gen.py new file mode 100644 index 000000000..1fb9f259b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_next_gen.py @@ -0,0 +1,229 @@ +# SPDX-License-Identifier: MIT + +""" +These are keyword-only APIs that call `attr.s` and `attr.ib` with different +default values. +""" + + +from functools import partial + +from . import setters +from ._funcs import asdict as _asdict +from ._funcs import astuple as _astuple +from ._make import ( + NOTHING, + _frozen_setattrs, + _ng_default_on_setattr, + attrib, + attrs, +) +from .exceptions import UnannotatedAttributeError + + +def define( + maybe_cls=None, + *, + these=None, + repr=None, + unsafe_hash=None, + hash=None, + init=None, + slots=True, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=None, + kw_only=False, + cache_hash=False, + auto_exc=True, + eq=None, + order=False, + auto_detect=True, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, + match_args=True, +): + r""" + Define an *attrs* class. + + Differences to the classic `attr.s` that it uses underneath: + + - Automatically detect whether or not *auto_attribs* should be `True` (c.f. + *auto_attribs* parameter). + - Converters and validators run when attributes are set by default -- if + *frozen* is `False`. + - *slots=True* + + .. caution:: + + Usually this has only upsides and few visible effects in everyday + programming. But it *can* lead to some surprising behaviors, so please + make sure to read :term:`slotted classes`. + - *auto_exc=True* + - *auto_detect=True* + - *order=False* + - Some options that were only relevant on Python 2 or were kept around for + backwards-compatibility have been removed. + + Please note that these are all defaults and you can change them as you + wish. + + :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves + exactly like `attr.s`. If left `None`, `attr.s` will try to guess: + + 1. If any attributes are annotated and no unannotated `attrs.fields`\ s + are found, it assumes *auto_attribs=True*. + 2. Otherwise it assumes *auto_attribs=False* and tries to collect + `attrs.fields`\ s. + + For now, please refer to `attr.s` for the rest of the parameters. + + .. versionadded:: 20.1.0 + .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``. + .. versionadded:: 22.2.0 + *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance). + """ + + def do_it(cls, auto_attribs): + return attrs( + maybe_cls=cls, + these=these, + repr=repr, + hash=hash, + unsafe_hash=unsafe_hash, + init=init, + slots=slots, + frozen=frozen, + weakref_slot=weakref_slot, + str=str, + auto_attribs=auto_attribs, + kw_only=kw_only, + cache_hash=cache_hash, + auto_exc=auto_exc, + eq=eq, + order=order, + auto_detect=auto_detect, + collect_by_mro=True, + getstate_setstate=getstate_setstate, + on_setattr=on_setattr, + field_transformer=field_transformer, + match_args=match_args, + ) + + def wrap(cls): + """ + Making this a wrapper ensures this code runs during class creation. + + We also ensure that frozen-ness of classes is inherited. + """ + nonlocal frozen, on_setattr + + had_on_setattr = on_setattr not in (None, setters.NO_OP) + + # By default, mutable classes convert & validate on setattr. + if frozen is False and on_setattr is None: + on_setattr = _ng_default_on_setattr + + # However, if we subclass a frozen class, we inherit the immutability + # and disable on_setattr. + for base_cls in cls.__bases__: + if base_cls.__setattr__ is _frozen_setattrs: + if had_on_setattr: + msg = "Frozen classes can't use on_setattr (frozen-ness was inherited)." + raise ValueError(msg) + + on_setattr = setters.NO_OP + break + + if auto_attribs is not None: + return do_it(cls, auto_attribs) + + try: + return do_it(cls, True) + except UnannotatedAttributeError: + return do_it(cls, False) + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but ``None`` if used as `@attrs()`. + if maybe_cls is None: + return wrap + + return wrap(maybe_cls) + + +mutable = define +frozen = partial(define, frozen=True, on_setattr=None) + + +def field( + *, + default=NOTHING, + validator=None, + repr=True, + hash=None, + init=True, + metadata=None, + type=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, + alias=None, +): + """ + Identical to `attr.ib`, except keyword-only and with some arguments + removed. + + .. versionadded:: 23.1.0 + The *type* parameter has been re-added; mostly for `attrs.make_class`. + Please note that type checkers ignore this metadata. + .. versionadded:: 20.1.0 + """ + return attrib( + default=default, + validator=validator, + repr=repr, + hash=hash, + init=init, + metadata=metadata, + type=type, + converter=converter, + factory=factory, + kw_only=kw_only, + eq=eq, + order=order, + on_setattr=on_setattr, + alias=alias, + ) + + +def asdict(inst, *, recurse=True, filter=None, value_serializer=None): + """ + Same as `attr.asdict`, except that collections types are always retained + and dict is always used as *dict_factory*. + + .. versionadded:: 21.3.0 + """ + return _asdict( + inst=inst, + recurse=recurse, + filter=filter, + value_serializer=value_serializer, + retain_collection_types=True, + ) + + +def astuple(inst, *, recurse=True, filter=None): + """ + Same as `attr.astuple`, except that collections types are always retained + and `tuple` is always used as the *tuple_factory*. + + .. versionadded:: 21.3.0 + """ + return _astuple( + inst=inst, recurse=recurse, filter=filter, retain_collection_types=True + ) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_typing_compat.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_typing_compat.pyi new file mode 100644 index 000000000..ca7b71e90 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_typing_compat.pyi @@ -0,0 +1,15 @@ +from typing import Any, ClassVar, Protocol + +# MYPY is a special constant in mypy which works the same way as `TYPE_CHECKING`. +MYPY = False + +if MYPY: + # A protocol to be able to statically accept an attrs class. + class AttrsInstance_(Protocol): + __attrs_attrs__: ClassVar[Any] + +else: + # For type checkers without plug-in support use an empty protocol that + # will (hopefully) be combined into a union. + class AttrsInstance_(Protocol): + pass diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_version_info.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_version_info.py new file mode 100644 index 000000000..51a1312f9 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_version_info.py @@ -0,0 +1,86 @@ +# SPDX-License-Identifier: MIT + + +from functools import total_ordering + +from ._funcs import astuple +from ._make import attrib, attrs + + +@total_ordering +@attrs(eq=False, order=False, slots=True, frozen=True) +class VersionInfo: + """ + A version object that can be compared to tuple of length 1--4: + + >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2) + True + >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1) + True + >>> vi = attr.VersionInfo(19, 2, 0, "final") + >>> vi < (19, 1, 1) + False + >>> vi < (19,) + False + >>> vi == (19, 2,) + True + >>> vi == (19, 2, 1) + False + + .. versionadded:: 19.2 + """ + + year = attrib(type=int) + minor = attrib(type=int) + micro = attrib(type=int) + releaselevel = attrib(type=str) + + @classmethod + def _from_version_string(cls, s): + """ + Parse *s* and return a _VersionInfo. + """ + v = s.split(".") + if len(v) == 3: + v.append("final") + + return cls( + year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3] + ) + + def _ensure_tuple(self, other): + """ + Ensure *other* is a tuple of a valid length. + + Returns a possibly transformed *other* and ourselves as a tuple of + the same length as *other*. + """ + + if self.__class__ is other.__class__: + other = astuple(other) + + if not isinstance(other, tuple): + raise NotImplementedError + + if not (1 <= len(other) <= 4): + raise NotImplementedError + + return astuple(self)[: len(other)], other + + def __eq__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + return us == them + + def __lt__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't + # have to do anything special with releaselevel for now. + return us < them diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_version_info.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_version_info.pyi new file mode 100644 index 000000000..45ced0863 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/_version_info.pyi @@ -0,0 +1,9 @@ +class VersionInfo: + @property + def year(self) -> int: ... + @property + def minor(self) -> int: ... + @property + def micro(self) -> int: ... + @property + def releaselevel(self) -> str: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/converters.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/converters.py new file mode 100644 index 000000000..2bf4c902a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/converters.py @@ -0,0 +1,144 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful converters. +""" + + +import typing + +from ._compat import _AnnotationExtractor +from ._make import NOTHING, Factory, pipe + + +__all__ = [ + "default_if_none", + "optional", + "pipe", + "to_bool", +] + + +def optional(converter): + """ + A converter that allows an attribute to be optional. An optional attribute + is one which can be set to ``None``. + + Type annotations will be inferred from the wrapped converter's, if it + has any. + + :param callable converter: the converter that is used for non-``None`` + values. + + .. versionadded:: 17.1.0 + """ + + def optional_converter(val): + if val is None: + return None + return converter(val) + + xtr = _AnnotationExtractor(converter) + + t = xtr.get_first_param_type() + if t: + optional_converter.__annotations__["val"] = typing.Optional[t] + + rt = xtr.get_return_type() + if rt: + optional_converter.__annotations__["return"] = typing.Optional[rt] + + return optional_converter + + +def default_if_none(default=NOTHING, factory=None): + """ + A converter that allows to replace ``None`` values by *default* or the + result of *factory*. + + :param default: Value to be used if ``None`` is passed. Passing an instance + of `attrs.Factory` is supported, however the ``takes_self`` option + is *not*. + :param callable factory: A callable that takes no parameters whose result + is used if ``None`` is passed. + + :raises TypeError: If **neither** *default* or *factory* is passed. + :raises TypeError: If **both** *default* and *factory* are passed. + :raises ValueError: If an instance of `attrs.Factory` is passed with + ``takes_self=True``. + + .. versionadded:: 18.2.0 + """ + if default is NOTHING and factory is None: + msg = "Must pass either `default` or `factory`." + raise TypeError(msg) + + if default is not NOTHING and factory is not None: + msg = "Must pass either `default` or `factory` but not both." + raise TypeError(msg) + + if factory is not None: + default = Factory(factory) + + if isinstance(default, Factory): + if default.takes_self: + msg = "`takes_self` is not supported by default_if_none." + raise ValueError(msg) + + def default_if_none_converter(val): + if val is not None: + return val + + return default.factory() + + else: + + def default_if_none_converter(val): + if val is not None: + return val + + return default + + return default_if_none_converter + + +def to_bool(val): + """ + Convert "boolean" strings (e.g., from env. vars.) to real booleans. + + Values mapping to :code:`True`: + + - :code:`True` + - :code:`"true"` / :code:`"t"` + - :code:`"yes"` / :code:`"y"` + - :code:`"on"` + - :code:`"1"` + - :code:`1` + + Values mapping to :code:`False`: + + - :code:`False` + - :code:`"false"` / :code:`"f"` + - :code:`"no"` / :code:`"n"` + - :code:`"off"` + - :code:`"0"` + - :code:`0` + + :raises ValueError: for any other value. + + .. versionadded:: 21.3.0 + """ + if isinstance(val, str): + val = val.lower() + truthy = {True, "true", "t", "yes", "y", "on", "1", 1} + falsy = {False, "false", "f", "no", "n", "off", "0", 0} + try: + if val in truthy: + return True + if val in falsy: + return False + except TypeError: + # Raised when "val" is not hashable (e.g., lists) + pass + msg = f"Cannot convert value to bool: {val}" + raise ValueError(msg) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/converters.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/converters.pyi new file mode 100644 index 000000000..5abb49f6d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/converters.pyi @@ -0,0 +1,13 @@ +from typing import Callable, TypeVar, overload + +from . import _ConverterType + +_T = TypeVar("_T") + +def pipe(*validators: _ConverterType) -> _ConverterType: ... +def optional(converter: _ConverterType) -> _ConverterType: ... +@overload +def default_if_none(default: _T) -> _ConverterType: ... +@overload +def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ... +def to_bool(val: str) -> bool: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/exceptions.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/exceptions.py new file mode 100644 index 000000000..3b7abb815 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/exceptions.py @@ -0,0 +1,95 @@ +# SPDX-License-Identifier: MIT + +from __future__ import annotations + +from typing import ClassVar + + +class FrozenError(AttributeError): + """ + A frozen/immutable instance or attribute have been attempted to be + modified. + + It mirrors the behavior of ``namedtuples`` by using the same error message + and subclassing `AttributeError`. + + .. versionadded:: 20.1.0 + """ + + msg = "can't set attribute" + args: ClassVar[tuple[str]] = [msg] + + +class FrozenInstanceError(FrozenError): + """ + A frozen instance has been attempted to be modified. + + .. versionadded:: 16.1.0 + """ + + +class FrozenAttributeError(FrozenError): + """ + A frozen attribute has been attempted to be modified. + + .. versionadded:: 20.1.0 + """ + + +class AttrsAttributeNotFoundError(ValueError): + """ + An *attrs* function couldn't find an attribute that the user asked for. + + .. versionadded:: 16.2.0 + """ + + +class NotAnAttrsClassError(ValueError): + """ + A non-*attrs* class has been passed into an *attrs* function. + + .. versionadded:: 16.2.0 + """ + + +class DefaultAlreadySetError(RuntimeError): + """ + A default has been set when defining the field and is attempted to be reset + using the decorator. + + .. versionadded:: 17.1.0 + """ + + +class UnannotatedAttributeError(RuntimeError): + """ + A class with ``auto_attribs=True`` has a field without a type annotation. + + .. versionadded:: 17.3.0 + """ + + +class PythonTooOldError(RuntimeError): + """ + It was attempted to use an *attrs* feature that requires a newer Python + version. + + .. versionadded:: 18.2.0 + """ + + +class NotCallableError(TypeError): + """ + A field requiring a callable has been set with a value that is not + callable. + + .. versionadded:: 19.2.0 + """ + + def __init__(self, msg, value): + super(TypeError, self).__init__(msg, value) + self.msg = msg + self.value = value + + def __str__(self): + return str(self.msg) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/exceptions.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/exceptions.pyi new file mode 100644 index 000000000..f2680118b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/exceptions.pyi @@ -0,0 +1,17 @@ +from typing import Any + +class FrozenError(AttributeError): + msg: str = ... + +class FrozenInstanceError(FrozenError): ... +class FrozenAttributeError(FrozenError): ... +class AttrsAttributeNotFoundError(ValueError): ... +class NotAnAttrsClassError(ValueError): ... +class DefaultAlreadySetError(RuntimeError): ... +class UnannotatedAttributeError(RuntimeError): ... +class PythonTooOldError(RuntimeError): ... + +class NotCallableError(TypeError): + msg: str = ... + value: Any = ... + def __init__(self, msg: str, value: Any) -> None: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/filters.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/filters.py new file mode 100644 index 000000000..a1e40c98d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/filters.py @@ -0,0 +1,66 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful filters for `attr.asdict`. +""" + +from ._make import Attribute + + +def _split_what(what): + """ + Returns a tuple of `frozenset`s of classes and attributes. + """ + return ( + frozenset(cls for cls in what if isinstance(cls, type)), + frozenset(cls for cls in what if isinstance(cls, str)), + frozenset(cls for cls in what if isinstance(cls, Attribute)), + ) + + +def include(*what): + """ + Include *what*. + + :param what: What to include. + :type what: `list` of classes `type`, field names `str` or + `attrs.Attribute`\\ s + + :rtype: `callable` + + .. versionchanged:: 23.1.0 Accept strings with field names. + """ + cls, names, attrs = _split_what(what) + + def include_(attribute, value): + return ( + value.__class__ in cls + or attribute.name in names + or attribute in attrs + ) + + return include_ + + +def exclude(*what): + """ + Exclude *what*. + + :param what: What to exclude. + :type what: `list` of classes `type`, field names `str` or + `attrs.Attribute`\\ s. + + :rtype: `callable` + + .. versionchanged:: 23.3.0 Accept field name string as input argument + """ + cls, names, attrs = _split_what(what) + + def exclude_(attribute, value): + return not ( + value.__class__ in cls + or attribute.name in names + or attribute in attrs + ) + + return exclude_ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/filters.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/filters.pyi new file mode 100644 index 000000000..8a02fa0fc --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/filters.pyi @@ -0,0 +1,6 @@ +from typing import Any, Union + +from . import Attribute, _FilterType + +def include(*what: Union[type, str, Attribute[Any]]) -> _FilterType[Any]: ... +def exclude(*what: Union[type, str, Attribute[Any]]) -> _FilterType[Any]: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/py.typed b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/setters.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/setters.py new file mode 100644 index 000000000..12ed6750d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/setters.py @@ -0,0 +1,73 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly used hooks for on_setattr. +""" + + +from . import _config +from .exceptions import FrozenAttributeError + + +def pipe(*setters): + """ + Run all *setters* and return the return value of the last one. + + .. versionadded:: 20.1.0 + """ + + def wrapped_pipe(instance, attrib, new_value): + rv = new_value + + for setter in setters: + rv = setter(instance, attrib, rv) + + return rv + + return wrapped_pipe + + +def frozen(_, __, ___): + """ + Prevent an attribute to be modified. + + .. versionadded:: 20.1.0 + """ + raise FrozenAttributeError() + + +def validate(instance, attrib, new_value): + """ + Run *attrib*'s validator on *new_value* if it has one. + + .. versionadded:: 20.1.0 + """ + if _config._run_validators is False: + return new_value + + v = attrib.validator + if not v: + return new_value + + v(instance, attrib, new_value) + + return new_value + + +def convert(instance, attrib, new_value): + """ + Run *attrib*'s converter -- if it has one -- on *new_value* and return the + result. + + .. versionadded:: 20.1.0 + """ + c = attrib.converter + if c: + return c(new_value) + + return new_value + + +# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes. +# autodata stopped working, so the docstring is inlined in the API docs. +NO_OP = object() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/setters.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/setters.pyi new file mode 100644 index 000000000..72f7ce476 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/setters.pyi @@ -0,0 +1,19 @@ +from typing import Any, NewType, NoReturn, TypeVar + +from . import Attribute, _OnSetAttrType + +_T = TypeVar("_T") + +def frozen( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> NoReturn: ... +def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ... +def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ... + +# convert is allowed to return Any, because they can be chained using pipe. +def convert( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> Any: ... + +_NoOpType = NewType("_NoOpType", object) +NO_OP: _NoOpType diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/validators.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/validators.py new file mode 100644 index 000000000..34d6b761d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/validators.py @@ -0,0 +1,681 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful validators. +""" + + +import operator +import re + +from contextlib import contextmanager +from re import Pattern + +from ._config import get_run_validators, set_run_validators +from ._make import _AndValidator, and_, attrib, attrs +from .converters import default_if_none +from .exceptions import NotCallableError + + +__all__ = [ + "and_", + "deep_iterable", + "deep_mapping", + "disabled", + "ge", + "get_disabled", + "gt", + "in_", + "instance_of", + "is_callable", + "le", + "lt", + "matches_re", + "max_len", + "min_len", + "not_", + "optional", + "provides", + "set_disabled", +] + + +def set_disabled(disabled): + """ + Globally disable or enable running validators. + + By default, they are run. + + :param disabled: If ``True``, disable running all validators. + :type disabled: bool + + .. warning:: + + This function is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(not disabled) + + +def get_disabled(): + """ + Return a bool indicating whether validators are currently disabled or not. + + :return: ``True`` if validators are currently disabled. + :rtype: bool + + .. versionadded:: 21.3.0 + """ + return not get_run_validators() + + +@contextmanager +def disabled(): + """ + Context manager that disables running validators within its context. + + .. warning:: + + This context manager is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(False) + try: + yield + finally: + set_run_validators(True) + + +@attrs(repr=False, slots=True, hash=True) +class _InstanceOfValidator: + type = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not isinstance(value, self.type): + msg = "'{name}' must be {type!r} (got {value!r} that is a {actual!r}).".format( + name=attr.name, + type=self.type, + actual=value.__class__, + value=value, + ) + raise TypeError( + msg, + attr, + self.type, + value, + ) + + def __repr__(self): + return f"" + + +def instance_of(type): + """ + A validator that raises a `TypeError` if the initializer is called + with a wrong type for this particular attribute (checks are performed using + `isinstance` therefore it's also valid to pass a tuple of types). + + :param type: The type to check for. + :type type: type or tuple of type + + :raises TypeError: With a human readable error message, the attribute + (of type `attrs.Attribute`), the expected type, and the value it + got. + """ + return _InstanceOfValidator(type) + + +@attrs(repr=False, frozen=True, slots=True) +class _MatchesReValidator: + pattern = attrib() + match_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.match_func(value): + msg = "'{name}' must match regex {pattern!r} ({value!r} doesn't)".format( + name=attr.name, pattern=self.pattern.pattern, value=value + ) + raise ValueError( + msg, + attr, + self.pattern, + value, + ) + + def __repr__(self): + return f"" + + +def matches_re(regex, flags=0, func=None): + r""" + A validator that raises `ValueError` if the initializer is called + with a string that doesn't match *regex*. + + :param regex: a regex string or precompiled pattern to match against + :param int flags: flags that will be passed to the underlying re function + (default 0) + :param callable func: which underlying `re` function to call. Valid options + are `re.fullmatch`, `re.search`, and `re.match`; the default ``None`` + means `re.fullmatch`. For performance reasons, the pattern is always + precompiled using `re.compile`. + + .. versionadded:: 19.2.0 + .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern. + """ + valid_funcs = (re.fullmatch, None, re.search, re.match) + if func not in valid_funcs: + msg = "'func' must be one of {}.".format( + ", ".join( + sorted(e and e.__name__ or "None" for e in set(valid_funcs)) + ) + ) + raise ValueError(msg) + + if isinstance(regex, Pattern): + if flags: + msg = "'flags' can only be used with a string pattern; pass flags to re.compile() instead" + raise TypeError(msg) + pattern = regex + else: + pattern = re.compile(regex, flags) + + if func is re.match: + match_func = pattern.match + elif func is re.search: + match_func = pattern.search + else: + match_func = pattern.fullmatch + + return _MatchesReValidator(pattern, match_func) + + +@attrs(repr=False, slots=True, hash=True) +class _ProvidesValidator: + interface = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.interface.providedBy(value): + msg = "'{name}' must provide {interface!r} which {value!r} doesn't.".format( + name=attr.name, interface=self.interface, value=value + ) + raise TypeError( + msg, + attr, + self.interface, + value, + ) + + def __repr__(self): + return f"" + + +def provides(interface): + """ + A validator that raises a `TypeError` if the initializer is called + with an object that does not provide the requested *interface* (checks are + performed using ``interface.providedBy(value)`` (see `zope.interface + `_). + + :param interface: The interface to check for. + :type interface: ``zope.interface.Interface`` + + :raises TypeError: With a human readable error message, the attribute + (of type `attrs.Attribute`), the expected interface, and the + value it got. + + .. deprecated:: 23.1.0 + """ + import warnings + + warnings.warn( + "attrs's zope-interface support is deprecated and will be removed in, " + "or after, April 2024.", + DeprecationWarning, + stacklevel=2, + ) + return _ProvidesValidator(interface) + + +@attrs(repr=False, slots=True, hash=True) +class _OptionalValidator: + validator = attrib() + + def __call__(self, inst, attr, value): + if value is None: + return + + self.validator(inst, attr, value) + + def __repr__(self): + return f"" + + +def optional(validator): + """ + A validator that makes an attribute optional. An optional attribute is one + which can be set to ``None`` in addition to satisfying the requirements of + the sub-validator. + + :param Callable | tuple[Callable] | list[Callable] validator: A validator + (or validators) that is used for non-``None`` values. + + .. versionadded:: 15.1.0 + .. versionchanged:: 17.1.0 *validator* can be a list of validators. + .. versionchanged:: 23.1.0 *validator* can also be a tuple of validators. + """ + if isinstance(validator, (list, tuple)): + return _OptionalValidator(_AndValidator(validator)) + + return _OptionalValidator(validator) + + +@attrs(repr=False, slots=True, hash=True) +class _InValidator: + options = attrib() + + def __call__(self, inst, attr, value): + try: + in_options = value in self.options + except TypeError: # e.g. `1 in "abc"` + in_options = False + + if not in_options: + msg = f"'{attr.name}' must be in {self.options!r} (got {value!r})" + raise ValueError( + msg, + attr, + self.options, + value, + ) + + def __repr__(self): + return f"" + + +def in_(options): + """ + A validator that raises a `ValueError` if the initializer is called + with a value that does not belong in the options provided. The check is + performed using ``value in options``. + + :param options: Allowed options. + :type options: list, tuple, `enum.Enum`, ... + + :raises ValueError: With a human readable error message, the attribute (of + type `attrs.Attribute`), the expected options, and the value it + got. + + .. versionadded:: 17.1.0 + .. versionchanged:: 22.1.0 + The ValueError was incomplete until now and only contained the human + readable error message. Now it contains all the information that has + been promised since 17.1.0. + """ + return _InValidator(options) + + +@attrs(repr=False, slots=False, hash=True) +class _IsCallableValidator: + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not callable(value): + message = ( + "'{name}' must be callable " + "(got {value!r} that is a {actual!r})." + ) + raise NotCallableError( + msg=message.format( + name=attr.name, value=value, actual=value.__class__ + ), + value=value, + ) + + def __repr__(self): + return "" + + +def is_callable(): + """ + A validator that raises a `attrs.exceptions.NotCallableError` if the + initializer is called with a value for this particular attribute + that is not callable. + + .. versionadded:: 19.1.0 + + :raises attrs.exceptions.NotCallableError: With a human readable error + message containing the attribute (`attrs.Attribute`) name, + and the value it got. + """ + return _IsCallableValidator() + + +@attrs(repr=False, slots=True, hash=True) +class _DeepIterable: + member_validator = attrib(validator=is_callable()) + iterable_validator = attrib( + default=None, validator=optional(is_callable()) + ) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.iterable_validator is not None: + self.iterable_validator(inst, attr, value) + + for member in value: + self.member_validator(inst, attr, member) + + def __repr__(self): + iterable_identifier = ( + "" + if self.iterable_validator is None + else f" {self.iterable_validator!r}" + ) + return ( + f"" + ) + + +def deep_iterable(member_validator, iterable_validator=None): + """ + A validator that performs deep validation of an iterable. + + :param member_validator: Validator(s) to apply to iterable members + :param iterable_validator: Validator to apply to iterable itself + (optional) + + .. versionadded:: 19.1.0 + + :raises TypeError: if any sub-validators fail + """ + if isinstance(member_validator, (list, tuple)): + member_validator = and_(*member_validator) + return _DeepIterable(member_validator, iterable_validator) + + +@attrs(repr=False, slots=True, hash=True) +class _DeepMapping: + key_validator = attrib(validator=is_callable()) + value_validator = attrib(validator=is_callable()) + mapping_validator = attrib(default=None, validator=optional(is_callable())) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.mapping_validator is not None: + self.mapping_validator(inst, attr, value) + + for key in value: + self.key_validator(inst, attr, key) + self.value_validator(inst, attr, value[key]) + + def __repr__(self): + return ( + "" + ).format(key=self.key_validator, value=self.value_validator) + + +def deep_mapping(key_validator, value_validator, mapping_validator=None): + """ + A validator that performs deep validation of a dictionary. + + :param key_validator: Validator to apply to dictionary keys + :param value_validator: Validator to apply to dictionary values + :param mapping_validator: Validator to apply to top-level mapping + attribute (optional) + + .. versionadded:: 19.1.0 + + :raises TypeError: if any sub-validators fail + """ + return _DeepMapping(key_validator, value_validator, mapping_validator) + + +@attrs(repr=False, frozen=True, slots=True) +class _NumberValidator: + bound = attrib() + compare_op = attrib() + compare_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.compare_func(value, self.bound): + msg = f"'{attr.name}' must be {self.compare_op} {self.bound}: {value}" + raise ValueError(msg) + + def __repr__(self): + return f"" + + +def lt(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number larger or equal to *val*. + + :param val: Exclusive upper bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<", operator.lt) + + +def le(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number greater than *val*. + + :param val: Inclusive upper bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<=", operator.le) + + +def ge(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number smaller than *val*. + + :param val: Inclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">=", operator.ge) + + +def gt(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number smaller or equal to *val*. + + :param val: Exclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">", operator.gt) + + +@attrs(repr=False, frozen=True, slots=True) +class _MaxLengthValidator: + max_length = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if len(value) > self.max_length: + msg = f"Length of '{attr.name}' must be <= {self.max_length}: {len(value)}" + raise ValueError(msg) + + def __repr__(self): + return f"" + + +def max_len(length): + """ + A validator that raises `ValueError` if the initializer is called + with a string or iterable that is longer than *length*. + + :param int length: Maximum length of the string or iterable + + .. versionadded:: 21.3.0 + """ + return _MaxLengthValidator(length) + + +@attrs(repr=False, frozen=True, slots=True) +class _MinLengthValidator: + min_length = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if len(value) < self.min_length: + msg = f"Length of '{attr.name}' must be >= {self.min_length}: {len(value)}" + raise ValueError(msg) + + def __repr__(self): + return f"" + + +def min_len(length): + """ + A validator that raises `ValueError` if the initializer is called + with a string or iterable that is shorter than *length*. + + :param int length: Minimum length of the string or iterable + + .. versionadded:: 22.1.0 + """ + return _MinLengthValidator(length) + + +@attrs(repr=False, slots=True, hash=True) +class _SubclassOfValidator: + type = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not issubclass(value, self.type): + msg = f"'{attr.name}' must be a subclass of {self.type!r} (got {value!r})." + raise TypeError( + msg, + attr, + self.type, + value, + ) + + def __repr__(self): + return f"" + + +def _subclass_of(type): + """ + A validator that raises a `TypeError` if the initializer is called + with a wrong type for this particular attribute (checks are performed using + `issubclass` therefore it's also valid to pass a tuple of types). + + :param type: The type to check for. + :type type: type or tuple of types + + :raises TypeError: With a human readable error message, the attribute + (of type `attrs.Attribute`), the expected type, and the value it + got. + """ + return _SubclassOfValidator(type) + + +@attrs(repr=False, slots=True, hash=True) +class _NotValidator: + validator = attrib() + msg = attrib( + converter=default_if_none( + "not_ validator child '{validator!r}' " + "did not raise a captured error" + ) + ) + exc_types = attrib( + validator=deep_iterable( + member_validator=_subclass_of(Exception), + iterable_validator=instance_of(tuple), + ), + ) + + def __call__(self, inst, attr, value): + try: + self.validator(inst, attr, value) + except self.exc_types: + pass # suppress error to invert validity + else: + raise ValueError( + self.msg.format( + validator=self.validator, + exc_types=self.exc_types, + ), + attr, + self.validator, + value, + self.exc_types, + ) + + def __repr__(self): + return ( + "" + ).format( + what=self.validator, + exc_types=self.exc_types, + ) + + +def not_(validator, *, msg=None, exc_types=(ValueError, TypeError)): + """ + A validator that wraps and logically 'inverts' the validator passed to it. + It will raise a `ValueError` if the provided validator *doesn't* raise a + `ValueError` or `TypeError` (by default), and will suppress the exception + if the provided validator *does*. + + Intended to be used with existing validators to compose logic without + needing to create inverted variants, for example, ``not_(in_(...))``. + + :param validator: A validator to be logically inverted. + :param msg: Message to raise if validator fails. + Formatted with keys ``exc_types`` and ``validator``. + :type msg: str + :param exc_types: Exception type(s) to capture. + Other types raised by child validators will not be intercepted and + pass through. + + :raises ValueError: With a human readable error message, + the attribute (of type `attrs.Attribute`), + the validator that failed to raise an exception, + the value it got, + and the expected exception types. + + .. versionadded:: 22.2.0 + """ + try: + exc_types = tuple(exc_types) + except TypeError: + exc_types = (exc_types,) + return _NotValidator(validator, msg, exc_types) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/validators.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/validators.pyi new file mode 100644 index 000000000..d194a75ab --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attr/validators.pyi @@ -0,0 +1,88 @@ +from typing import ( + Any, + AnyStr, + Callable, + Container, + ContextManager, + Iterable, + List, + Mapping, + Match, + Optional, + Pattern, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +from . import _ValidatorType +from . import _ValidatorArgType + +_T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") +_I = TypeVar("_I", bound=Iterable) +_K = TypeVar("_K") +_V = TypeVar("_V") +_M = TypeVar("_M", bound=Mapping) + +def set_disabled(run: bool) -> None: ... +def get_disabled() -> bool: ... +def disabled() -> ContextManager[None]: ... + +# To be more precise on instance_of use some overloads. +# If there are more than 3 items in the tuple then we fall back to Any +@overload +def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ... +@overload +def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ... +@overload +def instance_of( + type: Tuple[Type[_T1], Type[_T2]] +) -> _ValidatorType[Union[_T1, _T2]]: ... +@overload +def instance_of( + type: Tuple[Type[_T1], Type[_T2], Type[_T3]] +) -> _ValidatorType[Union[_T1, _T2, _T3]]: ... +@overload +def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ... +def provides(interface: Any) -> _ValidatorType[Any]: ... +def optional( + validator: Union[ + _ValidatorType[_T], List[_ValidatorType[_T]], Tuple[_ValidatorType[_T]] + ] +) -> _ValidatorType[Optional[_T]]: ... +def in_(options: Container[_T]) -> _ValidatorType[_T]: ... +def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ... +def matches_re( + regex: Union[Pattern[AnyStr], AnyStr], + flags: int = ..., + func: Optional[ + Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]] + ] = ..., +) -> _ValidatorType[AnyStr]: ... +def deep_iterable( + member_validator: _ValidatorArgType[_T], + iterable_validator: Optional[_ValidatorType[_I]] = ..., +) -> _ValidatorType[_I]: ... +def deep_mapping( + key_validator: _ValidatorType[_K], + value_validator: _ValidatorType[_V], + mapping_validator: Optional[_ValidatorType[_M]] = ..., +) -> _ValidatorType[_M]: ... +def is_callable() -> _ValidatorType[_T]: ... +def lt(val: _T) -> _ValidatorType[_T]: ... +def le(val: _T) -> _ValidatorType[_T]: ... +def ge(val: _T) -> _ValidatorType[_T]: ... +def gt(val: _T) -> _ValidatorType[_T]: ... +def max_len(length: int) -> _ValidatorType[_T]: ... +def min_len(length: int) -> _ValidatorType[_T]: ... +def not_( + validator: _ValidatorType[_T], + *, + msg: Optional[str] = None, + exc_types: Union[Type[Exception], Iterable[Type[Exception]]] = ..., +) -> _ValidatorType[_T]: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs-23.2.0.dist-info/INSTALLER b/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs-23.2.0.dist-info/INSTALLER new file mode 100644 index 000000000..ce3133dc1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs-23.2.0.dist-info/INSTALLER @@ -0,0 +1 @@ +Poetry 2.2.1 \ No newline at end of file diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs-23.2.0.dist-info/METADATA b/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs-23.2.0.dist-info/METADATA new file mode 100644 index 000000000..c20be76c7 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs-23.2.0.dist-info/METADATA @@ -0,0 +1,202 @@ +Metadata-Version: 2.1 +Name: attrs +Version: 23.2.0 +Summary: Classes Without Boilerplate +Project-URL: Documentation, https://www.attrs.org/ +Project-URL: Changelog, https://www.attrs.org/en/stable/changelog.html +Project-URL: GitHub, https://github.com/python-attrs/attrs +Project-URL: Funding, https://github.com/sponsors/hynek +Project-URL: Tidelift, https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=pypi +Author-email: Hynek Schlawack +License-Expression: MIT +License-File: LICENSE +Keywords: attribute,boilerplate,class +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Typing :: Typed +Requires-Python: >=3.7 +Requires-Dist: importlib-metadata; python_version < '3.8' +Provides-Extra: cov +Requires-Dist: attrs[tests]; extra == 'cov' +Requires-Dist: coverage[toml]>=5.3; extra == 'cov' +Provides-Extra: dev +Requires-Dist: attrs[tests]; extra == 'dev' +Requires-Dist: pre-commit; extra == 'dev' +Provides-Extra: docs +Requires-Dist: furo; extra == 'docs' +Requires-Dist: myst-parser; extra == 'docs' +Requires-Dist: sphinx; extra == 'docs' +Requires-Dist: sphinx-notfound-page; extra == 'docs' +Requires-Dist: sphinxcontrib-towncrier; extra == 'docs' +Requires-Dist: towncrier; extra == 'docs' +Requires-Dist: zope-interface; extra == 'docs' +Provides-Extra: tests +Requires-Dist: attrs[tests-no-zope]; extra == 'tests' +Requires-Dist: zope-interface; extra == 'tests' +Provides-Extra: tests-mypy +Requires-Dist: mypy>=1.6; (platform_python_implementation == 'CPython' and python_version >= '3.8') and extra == 'tests-mypy' +Requires-Dist: pytest-mypy-plugins; (platform_python_implementation == 'CPython' and python_version >= '3.8') and extra == 'tests-mypy' +Provides-Extra: tests-no-zope +Requires-Dist: attrs[tests-mypy]; extra == 'tests-no-zope' +Requires-Dist: cloudpickle; (platform_python_implementation == 'CPython') and extra == 'tests-no-zope' +Requires-Dist: hypothesis; extra == 'tests-no-zope' +Requires-Dist: pympler; extra == 'tests-no-zope' +Requires-Dist: pytest-xdist[psutil]; extra == 'tests-no-zope' +Requires-Dist: pytest>=4.3.0; extra == 'tests-no-zope' +Description-Content-Type: text/markdown + +

    + + attrs + +

    + + +*attrs* is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka [dunder methods](https://www.attrs.org/en/latest/glossary.html#term-dunder-methods)). +[Trusted by NASA](https://docs.github.com/en/account-and-profile/setting-up-and-managing-your-github-profile/customizing-your-profile/personalizing-your-profile#list-of-qualifying-repositories-for-mars-2020-helicopter-contributor-achievement) for Mars missions since 2020! + +Its main goal is to help you to write **concise** and **correct** software without slowing down your code. + + +## Sponsors + +*attrs* would not be possible without our [amazing sponsors](https://github.com/sponsors/hynek). +Especially those generously supporting us at the *The Organization* tier and higher: + +

    + + + +

    + +

    + Please consider joining them to help make attrs’s maintenance more sustainable! +

    + + + +## Example + +*attrs* gives you a class decorator and a way to declaratively define the attributes on that class: + + + +```pycon +>>> from attrs import asdict, define, make_class, Factory + +>>> @define +... class SomeClass: +... a_number: int = 42 +... list_of_numbers: list[int] = Factory(list) +... +... def hard_math(self, another_number): +... return self.a_number + sum(self.list_of_numbers) * another_number + + +>>> sc = SomeClass(1, [1, 2, 3]) +>>> sc +SomeClass(a_number=1, list_of_numbers=[1, 2, 3]) + +>>> sc.hard_math(3) +19 +>>> sc == SomeClass(1, [1, 2, 3]) +True +>>> sc != SomeClass(2, [3, 2, 1]) +True + +>>> asdict(sc) +{'a_number': 1, 'list_of_numbers': [1, 2, 3]} + +>>> SomeClass() +SomeClass(a_number=42, list_of_numbers=[]) + +>>> C = make_class("C", ["a", "b"]) +>>> C("foo", "bar") +C(a='foo', b='bar') +``` + +After *declaring* your attributes, *attrs* gives you: + +- a concise and explicit overview of the class's attributes, +- a nice human-readable `__repr__`, +- equality-checking methods, +- an initializer, +- and much more, + +*without* writing dull boilerplate code again and again and *without* runtime performance penalties. + +**Hate type annotations**!? +No problem! +Types are entirely **optional** with *attrs*. +Simply assign `attrs.field()` to the attributes instead of annotating them with types. + +--- + +This example uses *attrs*'s modern APIs that have been introduced in version 20.1.0, and the *attrs* package import name that has been added in version 21.3.0. +The classic APIs (`@attr.s`, `attr.ib`, plus their serious-business aliases) and the `attr` package import name will remain **indefinitely**. + +Please check out [*On The Core API Names*](https://www.attrs.org/en/latest/names.html) for a more in-depth explanation. + + +## Data Classes + +On the tin, *attrs* might remind you of `dataclasses` (and indeed, `dataclasses` [are a descendant](https://hynek.me/articles/import-attrs/) of *attrs*). +In practice it does a lot more and is more flexible. +For instance it allows you to define [special handling of NumPy arrays for equality checks](https://www.attrs.org/en/stable/comparison.html#customization), allows more ways to [plug into the initialization process](https://www.attrs.org/en/stable/init.html#hooking-yourself-into-initialization), and allows for stepping through the generated methods using a debugger. + +For more details, please refer to our [comparison page](https://www.attrs.org/en/stable/why.html#data-classes). + + +## Project Information + +- [**Changelog**](https://www.attrs.org/en/stable/changelog.html) +- [**Documentation**](https://www.attrs.org/) +- [**PyPI**](https://pypi.org/project/attrs/) +- [**Source Code**](https://github.com/python-attrs/attrs) +- [**Contributing**](https://github.com/python-attrs/attrs/blob/main/.github/CONTRIBUTING.md) +- [**Third-party Extensions**](https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs) +- **Get Help**: please use the `python-attrs` tag on [StackOverflow](https://stackoverflow.com/questions/tagged/python-attrs) + + +### *attrs* for Enterprise + +Available as part of the Tidelift Subscription. + +The maintainers of *attrs* and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source packages you use to build your applications. +Save time, reduce risk, and improve code health, while paying the maintainers of the exact packages you use. +[Learn more.](https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=referral&utm_campaign=enterprise&utm_term=repo) + +## Release Information + +### Changes + +- The type annotation for `attrs.resolve_types()` is now correct. + [#1141](https://github.com/python-attrs/attrs/issues/1141) +- Type stubs now use `typing.dataclass_transform` to decorate dataclass-like decorators, instead of the non-standard `__dataclass_transform__` special form, which is only supported by Pyright. + [#1158](https://github.com/python-attrs/attrs/issues/1158) +- Fixed serialization of namedtuple fields using `attrs.asdict/astuple()` with `retain_collection_types=True`. + [#1165](https://github.com/python-attrs/attrs/issues/1165) +- `attrs.AttrsInstance` is now a `typing.Protocol` in both type hints and code. + This allows you to subclass it along with another `Protocol`. + [#1172](https://github.com/python-attrs/attrs/issues/1172) +- If *attrs* detects that `__attrs_pre_init__` accepts more than just `self`, it will call it with the same arguments as `__init__` was called. + This allows you to, for example, pass arguments to `super().__init__()`. + [#1187](https://github.com/python-attrs/attrs/issues/1187) +- Slotted classes now transform `functools.cached_property` decorated methods to support equivalent semantics. + [#1200](https://github.com/python-attrs/attrs/issues/1200) +- Added *class_body* argument to `attrs.make_class()` to provide additional attributes for newly created classes. + It is, for example, now possible to attach methods. + [#1203](https://github.com/python-attrs/attrs/issues/1203) + + +--- + +[Full changelog](https://www.attrs.org/en/stable/changelog.html) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs-23.2.0.dist-info/RECORD b/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs-23.2.0.dist-info/RECORD new file mode 100644 index 000000000..84a947cfc --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs-23.2.0.dist-info/RECORD @@ -0,0 +1,36 @@ +attr/__init__.py,sha256=WlXJN6ICB0Y_HZ0lmuTUgia0kuSdn2p67d4N6cYxNZM,3307 +attr/__init__.pyi,sha256=u08EujYHy_rSyebNn-I9Xv2S_cXmtA9xWGc0cBsyl18,16976 +attr/_cmp.py,sha256=OQZlWdFX74z18adGEUp40Ojqm0NNu1Flqnv2JE8B2ng,4025 +attr/_cmp.pyi,sha256=sGQmOM0w3_K4-X8cTXR7g0Hqr290E8PTObA9JQxWQqc,399 +attr/_compat.py,sha256=QmRyxii295wcQfaugWqxuIumAPsNQ2-RUF82QZPqMKw,2540 +attr/_config.py,sha256=z81Vt-GeT_2taxs1XZfmHx9TWlSxjPb6eZH1LTGsS54,843 +attr/_funcs.py,sha256=VBTUFKLklsmqxys3qWSTK_Ac9Z4s0mAJWwgW9nA7Llk,17173 +attr/_make.py,sha256=LnVy2e0HygoqaZknhC19z7JmOt7qGkAadf2LZgWVJWI,101923 +attr/_next_gen.py,sha256=as1voi8siAI_o2OQG8YIiZvmn0G7-S3_j_774rnoZ_g,6203 +attr/_typing_compat.pyi,sha256=XDP54TUn-ZKhD62TOQebmzrwFyomhUCoGRpclb6alRA,469 +attr/_version_info.py,sha256=exSqb3b5E-fMSsgZAlEw9XcLpEgobPORCZpcaEglAM4,2121 +attr/_version_info.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209 +attr/converters.py,sha256=Kyw5MY0yfnUR_RwN1Vydf0EiE---htDxOgSc_-NYL6A,3622 +attr/converters.pyi,sha256=jKlpHBEt6HVKJvgrMFJRrHq8p61GXg4-Nd5RZWKJX7M,406 +attr/exceptions.py,sha256=HRFq4iybmv7-DcZwyjl6M1euM2YeJVK_hFxuaBGAngI,1977 +attr/exceptions.pyi,sha256=zZq8bCUnKAy9mDtBEw42ZhPhAUIHoTKedDQInJD883M,539 +attr/filters.py,sha256=9pYvXqdg6mtLvKIIb56oALRMoHFnQTcGCO4EXTc1qyM,1470 +attr/filters.pyi,sha256=0mRCjLKxdcvAo0vD-Cr81HfRXXCp9j_cAXjOoAHtPGM,225 +attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attr/setters.py,sha256=pbCZQ-pE6ZxjDqZfWWUhUFefXtpekIU4qS_YDMLPQ50,1400 +attr/setters.pyi,sha256=pyY8TVNBu8TWhOldv_RxHzmGvdgFQH981db70r0fn5I,567 +attr/validators.py,sha256=LGVpbiNg_KGzYrKUD5JPiZkx8TMfynDZGoQoLJNCIMo,19676 +attr/validators.pyi,sha256=167Dl9nt7NUhE9wht1I-buo039qyUT1nEUT_nKjSWr4,2580 +attrs/__init__.py,sha256=9_5waVbFs7rLqtXZ73tNDrxhezyZ8VZeX4BbvQ3EeJw,1039 +attrs/__init__.pyi,sha256=s_ajQ_U14DOsOz0JbmAKDOi46B3v2PcdO0UAV1MY6Ek,2168 +attrs/converters.py,sha256=8kQljrVwfSTRu8INwEk8SI0eGrzmWftsT7rM0EqyohM,76 +attrs/exceptions.py,sha256=ACCCmg19-vDFaDPY9vFl199SPXCQMN_bENs4DALjzms,76 +attrs/filters.py,sha256=VOUMZug9uEU6dUuA0dF1jInUK0PL3fLgP0VBS5d-CDE,73 +attrs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attrs/setters.py,sha256=eL1YidYQV3T2h9_SYIZSZR1FAcHGb1TuCTy0E0Lv2SU,73 +attrs/validators.py,sha256=xcy6wD5TtTkdCG1f4XWbocPSO0faBjk5IfVJfP6SUj0,76 +attrs-23.2.0.dist-info/METADATA,sha256=WwvG7OHyKjEPpyFUZCCYt1n0E_CcqdRb7bliGEdcm-A,9531 +attrs-23.2.0.dist-info/WHEEL,sha256=mRYSEL3Ih6g5a_CVMIcwiF__0Ae4_gLYh01YFNwiq1k,87 +attrs-23.2.0.dist-info/licenses/LICENSE,sha256=iCEVyV38KvHutnFPjsbVy8q_Znyv-HKfQkINpj9xTp8,1109 +attrs-23.2.0.dist-info/INSTALLER,sha256=sz0Tnp99msIbbLB51q7U9nA6AvRKcsOeUKhrHH0Ulg4,12 +attrs-23.2.0.dist-info/RECORD,, diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs-23.2.0.dist-info/WHEEL b/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs-23.2.0.dist-info/WHEEL new file mode 100644 index 000000000..2860816ab --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs-23.2.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.21.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs-23.2.0.dist-info/licenses/LICENSE b/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs-23.2.0.dist-info/licenses/LICENSE new file mode 100644 index 000000000..2bd6453d2 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs-23.2.0.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Hynek Schlawack and the attrs contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs/__init__.py new file mode 100644 index 000000000..0c2481561 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs/__init__.py @@ -0,0 +1,65 @@ +# SPDX-License-Identifier: MIT + +from attr import ( + NOTHING, + Attribute, + AttrsInstance, + Factory, + _make_getattr, + assoc, + cmp_using, + define, + evolve, + field, + fields, + fields_dict, + frozen, + has, + make_class, + mutable, + resolve_types, + validate, +) +from attr._next_gen import asdict, astuple + +from . import converters, exceptions, filters, setters, validators + + +__all__ = [ + "__author__", + "__copyright__", + "__description__", + "__doc__", + "__email__", + "__license__", + "__title__", + "__url__", + "__version__", + "__version_info__", + "asdict", + "assoc", + "astuple", + "Attribute", + "AttrsInstance", + "cmp_using", + "converters", + "define", + "evolve", + "exceptions", + "Factory", + "field", + "fields_dict", + "fields", + "filters", + "frozen", + "has", + "make_class", + "mutable", + "NOTHING", + "resolve_types", + "setters", + "validate", + "validators", +] + +__getattr__ = _make_getattr(__name__) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs/__init__.pyi b/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs/__init__.pyi new file mode 100644 index 000000000..9372cfea1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs/__init__.pyi @@ -0,0 +1,67 @@ +from typing import ( + Any, + Callable, + Dict, + Mapping, + Optional, + Sequence, + Tuple, + Type, +) + +# Because we need to type our own stuff, we have to make everything from +# attr explicitly public too. +from attr import __author__ as __author__ +from attr import __copyright__ as __copyright__ +from attr import __description__ as __description__ +from attr import __email__ as __email__ +from attr import __license__ as __license__ +from attr import __title__ as __title__ +from attr import __url__ as __url__ +from attr import __version__ as __version__ +from attr import __version_info__ as __version_info__ +from attr import _FilterType +from attr import assoc as assoc +from attr import Attribute as Attribute +from attr import AttrsInstance as AttrsInstance +from attr import cmp_using as cmp_using +from attr import converters as converters +from attr import define as define +from attr import evolve as evolve +from attr import exceptions as exceptions +from attr import Factory as Factory +from attr import field as field +from attr import fields as fields +from attr import fields_dict as fields_dict +from attr import filters as filters +from attr import frozen as frozen +from attr import has as has +from attr import make_class as make_class +from attr import mutable as mutable +from attr import NOTHING as NOTHING +from attr import resolve_types as resolve_types +from attr import setters as setters +from attr import validate as validate +from attr import validators as validators + +# TODO: see definition of attr.asdict/astuple +def asdict( + inst: AttrsInstance, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + dict_factory: Type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., + value_serializer: Optional[ + Callable[[type, Attribute[Any], Any], Any] + ] = ..., + tuple_keys: bool = ..., +) -> Dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: AttrsInstance, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + tuple_factory: Type[Sequence[Any]] = ..., + retain_collection_types: bool = ..., +) -> Tuple[Any, ...]: ... diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs/converters.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs/converters.py new file mode 100644 index 000000000..7821f6c02 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs/converters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.converters import * # noqa: F403 diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs/exceptions.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs/exceptions.py new file mode 100644 index 000000000..3323f9d21 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs/exceptions.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.exceptions import * # noqa: F403 diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs/filters.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs/filters.py new file mode 100644 index 000000000..3080f4839 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs/filters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.filters import * # noqa: F403 diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs/py.typed b/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs/setters.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs/setters.py new file mode 100644 index 000000000..f3d73bb79 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs/setters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.setters import * # noqa: F403 diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs/validators.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs/validators.py new file mode 100644 index 000000000..037e124f2 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/attrs/validators.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.validators import * # noqa: F403 diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/__init__.py new file mode 100644 index 000000000..2a2e5adc5 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/__init__.py @@ -0,0 +1,17 @@ +""" + authlib + ~~~~~~~ + + The ultimate Python library in building OAuth 1.0, OAuth 2.0 and OpenID + Connect clients and providers. It covers from low level specification + implementation to high level framework integrations. + + :copyright: (c) 2017 by Hsiaoming Yang. + :license: BSD, see LICENSE for more details. +""" +from .consts import version, homepage, author + +__version__ = version +__homepage__ = homepage +__author__ = author +__license__ = 'BSD-3-Clause' diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/common/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/common/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/common/encoding.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/common/encoding.py new file mode 100644 index 000000000..31df0b039 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/common/encoding.py @@ -0,0 +1,88 @@ +import sys +import json +import base64 +import struct + +is_py2 = sys.version_info[0] == 2 + +if is_py2: + unicode_type = unicode # noqa: F821 + byte_type = str + text_types = (unicode, str) # noqa: F821 +else: + unicode_type = str + byte_type = bytes + text_types = (str, ) + + +def to_bytes(x, charset='utf-8', errors='strict'): + if x is None: + return None + if isinstance(x, byte_type): + return x + if isinstance(x, unicode_type): + return x.encode(charset, errors) + if isinstance(x, (int, float)): + return str(x).encode(charset, errors) + return byte_type(x) + + +def to_unicode(x, charset='utf-8', errors='strict'): + if x is None or isinstance(x, unicode_type): + return x + if isinstance(x, byte_type): + return x.decode(charset, errors) + return unicode_type(x) + + +def to_native(x, encoding='ascii'): + if isinstance(x, str): + return x + if is_py2: + return x.encode(encoding) + return x.decode(encoding) + + +def json_loads(s): + return json.loads(s) + + +def json_dumps(data, ensure_ascii=False): + return json.dumps(data, ensure_ascii=ensure_ascii, separators=(',', ':')) + + +def urlsafe_b64decode(s): + s += b'=' * (-len(s) % 4) + return base64.urlsafe_b64decode(s) + + +def urlsafe_b64encode(s): + return base64.urlsafe_b64encode(s).rstrip(b'=') + + +def base64_to_int(s): + data = urlsafe_b64decode(to_bytes(s, charset='ascii')) + buf = struct.unpack('%sB' % len(data), data) + return int(''.join(["%02x" % byte for byte in buf]), 16) + + +def int_to_base64(num): + if num < 0: + raise ValueError('Must be a positive integer') + + if hasattr(int, 'to_bytes'): + s = num.to_bytes((num.bit_length() + 7) // 8, 'big', signed=False) + else: + buf = [] + while num: + num, remainder = divmod(num, 256) + buf.append(remainder) + buf.reverse() + s = struct.pack('%sB' % len(buf), *buf) + return to_unicode(urlsafe_b64encode(s)) + + +def json_b64encode(text): + if isinstance(text, dict): + text = json_dumps(text) + return urlsafe_b64encode(to_bytes(text)) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/common/errors.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/common/errors.py new file mode 100644 index 000000000..015ab4beb --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/common/errors.py @@ -0,0 +1,76 @@ +#: coding: utf-8 +from authlib.consts import default_json_headers + + +class AuthlibBaseError(Exception): + """Base Exception for all errors in Authlib.""" + + #: short-string error code + error = None + #: long-string to describe this error + description = '' + #: web page that describes this error + uri = None + + def __init__(self, error=None, description=None, uri=None): + if error is not None: + self.error = error + if description is not None: + self.description = description + if uri is not None: + self.uri = uri + + message = '{}: {}'.format(self.error, self.description) + super(AuthlibBaseError, self).__init__(message) + + def __repr__(self): + return '<{} "{}">'.format(self.__class__.__name__, self.error) + + +class AuthlibHTTPError(AuthlibBaseError): + #: HTTP status code + status_code = 400 + + def __init__(self, error=None, description=None, uri=None, + status_code=None): + super(AuthlibHTTPError, self).__init__(error, description, uri) + if status_code is not None: + self.status_code = status_code + self._translations = None + self._error_uris = None + + def gettext(self, s): + if self._translations: + return self._translations.gettext(s) + return s + + def get_error_description(self): + return self.description + + def get_error_uri(self): + if self.uri: + return self.uri + if self._error_uris: + return self._error_uris.get(self.error) + + def get_body(self): + error = [('error', self.error)] + + description = self.get_error_description() + if description: + error.append(('error_description', description)) + + uri = self.get_error_uri() + if uri: + error.append(('error_uri', uri)) + return error + + def get_headers(self): + return default_json_headers[:] + + def __call__(self, translations=None, error_uris=None): + self._translations = translations + self._error_uris = error_uris + body = dict(self.get_body()) + headers = self.get_headers() + return self.status_code, body, headers diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/common/security.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/common/security.py new file mode 100644 index 000000000..b05ea1443 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/common/security.py @@ -0,0 +1,19 @@ +import os +import string +import random + +UNICODE_ASCII_CHARACTER_SET = string.ascii_letters + string.digits + + +def generate_token(length=30, chars=UNICODE_ASCII_CHARACTER_SET): + rand = random.SystemRandom() + return ''.join(rand.choice(chars) for _ in range(length)) + + +def is_secure_transport(uri): + """Check if the uri is over ssl.""" + if os.getenv('AUTHLIB_INSECURE_TRANSPORT'): + return True + + uri = uri.lower() + return uri.startswith(('https://', 'http://localhost:')) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/common/urls.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/common/urls.py new file mode 100644 index 000000000..d03b1735c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/common/urls.py @@ -0,0 +1,162 @@ +""" + authlib.util.urls + ~~~~~~~~~~~~~~~~~ + + Wrapper functions for URL encoding and decoding. +""" + +import re +try: + from urllib import quote as _quote + from urllib import unquote as _unquote + from urllib import urlencode as _urlencode +except ImportError: + from urllib.parse import quote as _quote + from urllib.parse import unquote as _unquote + from urllib.parse import urlencode as _urlencode + +try: + from urllib2 import parse_keqv_list # noqa: F401 + from urllib2 import parse_http_list # noqa: F401 +except ImportError: + from urllib.request import parse_keqv_list # noqa: F401 + from urllib.request import parse_http_list # noqa: F401 + +try: + import urlparse +except ImportError: + import urllib.parse as urlparse + +from .encoding import to_unicode, to_bytes + +always_safe = ( + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' + 'abcdefghijklmnopqrstuvwxyz' + '0123456789_.-' +) +urlencoded = set(always_safe) | set('=&;:%+~,*@!()/?') +INVALID_HEX_PATTERN = re.compile(r'%[^0-9A-Fa-f]|%[0-9A-Fa-f][^0-9A-Fa-f]') + + +def url_encode(params): + encoded = [] + for k, v in params: + encoded.append((to_bytes(k), to_bytes(v))) + return to_unicode(_urlencode(encoded)) + + +def url_decode(query): + """Decode a query string in x-www-form-urlencoded format into a sequence + of two-element tuples. + + Unlike urlparse.parse_qsl(..., strict_parsing=True) urldecode will enforce + correct formatting of the query string by validation. If validation fails + a ValueError will be raised. urllib.parse_qsl will only raise errors if + any of name-value pairs omits the equals sign. + """ + # Check if query contains invalid characters + if query and not set(query) <= urlencoded: + error = ("Error trying to decode a non urlencoded string. " + "Found invalid characters: %s " + "in the string: '%s'. " + "Please ensure the request/response body is " + "x-www-form-urlencoded.") + raise ValueError(error % (set(query) - urlencoded, query)) + + # Check for correctly hex encoded values using a regular expression + # All encoded values begin with % followed by two hex characters + # correct = %00, %A0, %0A, %FF + # invalid = %G0, %5H, %PO + if INVALID_HEX_PATTERN.search(query): + raise ValueError('Invalid hex encoding in query string.') + + # We encode to utf-8 prior to parsing because parse_qsl behaves + # differently on unicode input in python 2 and 3. + # Python 2.7 + # >>> urlparse.parse_qsl(u'%E5%95%A6%E5%95%A6') + # u'\xe5\x95\xa6\xe5\x95\xa6' + # Python 2.7, non unicode input gives the same + # >>> urlparse.parse_qsl('%E5%95%A6%E5%95%A6') + # '\xe5\x95\xa6\xe5\x95\xa6' + # but now we can decode it to unicode + # >>> urlparse.parse_qsl('%E5%95%A6%E5%95%A6').decode('utf-8') + # u'\u5566\u5566' + # Python 3.3 however + # >>> urllib.parse.parse_qsl(u'%E5%95%A6%E5%95%A6') + # u'\u5566\u5566' + + # We want to allow queries such as "c2" whereas urlparse.parse_qsl + # with the strict_parsing flag will not. + params = urlparse.parse_qsl(query, keep_blank_values=True) + + # unicode all the things + decoded = [] + for k, v in params: + decoded.append((to_unicode(k), to_unicode(v))) + return decoded + + +def add_params_to_qs(query, params): + """Extend a query with a list of two-tuples.""" + if isinstance(params, dict): + params = params.items() + + qs = urlparse.parse_qsl(query, keep_blank_values=True) + qs.extend(params) + return url_encode(qs) + + +def add_params_to_uri(uri, params, fragment=False): + """Add a list of two-tuples to the uri query components.""" + sch, net, path, par, query, fra = urlparse.urlparse(uri) + if fragment: + fra = add_params_to_qs(fra, params) + else: + query = add_params_to_qs(query, params) + return urlparse.urlunparse((sch, net, path, par, query, fra)) + + +def quote(s, safe=b'/'): + return to_unicode(_quote(to_bytes(s), safe)) + + +def unquote(s): + return to_unicode(_unquote(s)) + + +def quote_url(s): + return quote(s, b'~@#$&()*!+=:;,.?/\'') + + +def extract_params(raw): + """Extract parameters and return them as a list of 2-tuples. + + Will successfully extract parameters from urlencoded query strings, + dicts, or lists of 2-tuples. Empty strings/dicts/lists will return an + empty list of parameters. Any other input will result in a return + value of None. + """ + if isinstance(raw, (list, tuple)): + try: + raw = dict(raw) + except (TypeError, ValueError): + return None + + if isinstance(raw, dict): + params = [] + for k, v in raw.items(): + params.append((to_unicode(k), to_unicode(v))) + return params + + if not raw: + return None + + try: + return url_decode(raw) + except ValueError: + return None + + +def is_valid_url(url): + parsed = urlparse.urlparse(url) + return parsed.scheme and parsed.hostname diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/consts.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/consts.py new file mode 100644 index 000000000..649918992 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/consts.py @@ -0,0 +1,11 @@ +name = 'Authlib' +version = '0.15.6' +author = 'Hsiaoming Yang ' +homepage = 'https://authlib.org/' +default_user_agent = '{}/{} (+{})'.format(name, version, homepage) + +default_json_headers = [ + ('Content-Type', 'application/json'), + ('Cache-Control', 'no-store'), + ('Pragma', 'no-cache'), +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/deprecate.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/deprecate.py new file mode 100644 index 000000000..ba87f3c32 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/deprecate.py @@ -0,0 +1,16 @@ +import warnings + + +class AuthlibDeprecationWarning(DeprecationWarning): + pass + + +warnings.simplefilter('always', AuthlibDeprecationWarning) + + +def deprecate(message, version=None, link_uid=None, link_file=None): + if version: + message += '\nIt will be compatible before version {}.'.format(version) + if link_uid and link_file: + message += '\nRead more '.format(link_uid, link_file) + warnings.warn(AuthlibDeprecationWarning(message), stacklevel=2) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/base_client/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/base_client/__init__.py new file mode 100644 index 000000000..4fa35b8a3 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/base_client/__init__.py @@ -0,0 +1,16 @@ +from .base_oauth import BaseOAuth +from .base_app import BaseApp +from .remote_app import RemoteApp +from .framework_integration import FrameworkIntegration +from .errors import ( + OAuthError, MissingRequestTokenError, MissingTokenError, + TokenExpiredError, InvalidTokenError, UnsupportedTokenTypeError, + MismatchingStateError, +) + +__all__ = [ + 'BaseOAuth', 'BaseApp', 'RemoteApp', 'FrameworkIntegration', + 'OAuthError', 'MissingRequestTokenError', 'MissingTokenError', + 'TokenExpiredError', 'InvalidTokenError', 'UnsupportedTokenTypeError', + 'MismatchingStateError', +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/base_client/async_app.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/base_client/async_app.py new file mode 100644 index 000000000..b1b1d6e8b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/base_client/async_app.py @@ -0,0 +1,207 @@ +import time +import logging +from authlib.common.urls import urlparse +from authlib.jose import JsonWebToken, JsonWebKey +from authlib.oidc.core import UserInfo, CodeIDToken, ImplicitIDToken +from .base_app import BaseApp +from .errors import ( + MissingRequestTokenError, + MissingTokenError, +) + +__all__ = ['AsyncRemoteApp'] + +log = logging.getLogger(__name__) + + +class AsyncRemoteApp(BaseApp): + async def load_server_metadata(self): + if self._server_metadata_url and '_loaded_at' not in self.server_metadata: + metadata = await self._fetch_server_metadata(self._server_metadata_url) + metadata['_loaded_at'] = time.time() + self.server_metadata.update(metadata) + return self.server_metadata + + async def _on_update_token(self, token, refresh_token=None, access_token=None): + if self._update_token: + await self._update_token( + token, + refresh_token=refresh_token, + access_token=access_token, + ) + + async def _create_oauth1_authorization_url(self, client, authorization_endpoint, **kwargs): + params = {} + if self.request_token_params: + params.update(self.request_token_params) + token = await client.fetch_request_token( + self.request_token_url, **params + ) + log.debug('Fetch request token: {!r}'.format(token)) + url = client.create_authorization_url(authorization_endpoint, **kwargs) + return {'url': url, 'request_token': token} + + async def create_authorization_url(self, redirect_uri=None, **kwargs): + """Generate the authorization url and state for HTTP redirect. + + :param redirect_uri: Callback or redirect URI for authorization. + :param kwargs: Extra parameters to include. + :return: dict + """ + metadata = await self.load_server_metadata() + authorization_endpoint = self.authorize_url + if not authorization_endpoint and not self.request_token_url: + authorization_endpoint = metadata.get('authorization_endpoint') + + if not authorization_endpoint: + raise RuntimeError('Missing "authorize_url" value') + + if self.authorize_params: + kwargs.update(self.authorize_params) + + async with self._get_oauth_client(**metadata) as client: + client.redirect_uri = redirect_uri + + if self.request_token_url: + return await self._create_oauth1_authorization_url( + client, authorization_endpoint, **kwargs) + else: + return self._create_oauth2_authorization_url( + client, authorization_endpoint, **kwargs) + + async def fetch_access_token(self, redirect_uri=None, request_token=None, **params): + """Fetch access token in one step. + + :param redirect_uri: Callback or Redirect URI that is used in + previous :meth:`authorize_redirect`. + :param request_token: A previous request token for OAuth 1. + :param params: Extra parameters to fetch access token. + :return: A token dict. + """ + metadata = await self.load_server_metadata() + token_endpoint = self.access_token_url + if not token_endpoint and not self.request_token_url: + token_endpoint = metadata.get('token_endpoint') + + async with self._get_oauth_client(**metadata) as client: + if self.request_token_url: + if redirect_uri is not None: + client.redirect_uri = redirect_uri + if request_token is None: + raise MissingRequestTokenError() + # merge request token with verifier + token = {} + token.update(request_token) + token.update(params) + client.token = token + kwargs = self.access_token_params or {} + token = await client.fetch_access_token(token_endpoint, **kwargs) + client.redirect_uri = None + else: + if redirect_uri is not None: + client.redirect_uri = redirect_uri + kwargs = {} + if self.access_token_params: + kwargs.update(self.access_token_params) + kwargs.update(params) + token = await client.fetch_token(token_endpoint, **kwargs) + return token + + async def request(self, method, url, token=None, **kwargs): + if self.api_base_url and not url.startswith(('https://', 'http://')): + url = urlparse.urljoin(self.api_base_url, url) + + withhold_token = kwargs.get('withhold_token') + if not withhold_token: + metadata = await self.load_server_metadata() + else: + metadata = {} + + async with self._get_oauth_client(**metadata) as client: + request = kwargs.pop('request', None) + + if withhold_token: + return await client.request(method, url, **kwargs) + + if token is None and request: + token = await self._fetch_token(request) + + if token is None: + raise MissingTokenError() + + client.token = token + return await client.request(method, url, **kwargs) + + async def userinfo(self, **kwargs): + """Fetch user info from ``userinfo_endpoint``.""" + metadata = await self.load_server_metadata() + resp = await self.get(metadata['userinfo_endpoint'], **kwargs) + data = resp.json() + + compliance_fix = metadata.get('userinfo_compliance_fix') + if compliance_fix: + data = await compliance_fix(self, data) + return UserInfo(data) + + async def _parse_id_token(self, token, nonce, claims_options=None): + """Return an instance of UserInfo from token's ``id_token``.""" + claims_params = dict( + nonce=nonce, + client_id=self.client_id, + ) + if 'access_token' in token: + claims_params['access_token'] = token['access_token'] + claims_cls = CodeIDToken + else: + claims_cls = ImplicitIDToken + + metadata = await self.load_server_metadata() + if claims_options is None and 'issuer' in metadata: + claims_options = {'iss': {'values': [metadata['issuer']]}} + + alg_values = metadata.get('id_token_signing_alg_values_supported') + if not alg_values: + alg_values = ['RS256'] + + jwt = JsonWebToken(alg_values) + + jwk_set = await self._fetch_jwk_set() + try: + claims = jwt.decode( + token['id_token'], + key=JsonWebKey.import_key_set(jwk_set), + claims_cls=claims_cls, + claims_options=claims_options, + claims_params=claims_params, + ) + except ValueError: + jwk_set = await self._fetch_jwk_set(force=True) + claims = jwt.decode( + token['id_token'], + key=JsonWebKey.import_key_set(jwk_set), + claims_cls=claims_cls, + claims_options=claims_options, + claims_params=claims_params, + ) + + claims.validate(leeway=120) + return UserInfo(claims) + + async def _fetch_jwk_set(self, force=False): + metadata = await self.load_server_metadata() + jwk_set = metadata.get('jwks') + if jwk_set and not force: + return jwk_set + + uri = metadata.get('jwks_uri') + if not uri: + raise RuntimeError('Missing "jwks_uri" in metadata') + + jwk_set = await self._fetch_server_metadata(uri) + self.server_metadata['jwks'] = jwk_set + return jwk_set + + async def _fetch_server_metadata(self, url): + async with self._get_oauth_client() as client: + resp = await client.request('GET', url, withhold_token=True) + return resp.json() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/base_client/base_app.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/base_client/base_app.py new file mode 100644 index 000000000..d6fb0a2f0 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/base_client/base_app.py @@ -0,0 +1,241 @@ +import logging + +from authlib.common.security import generate_token +from authlib.consts import default_user_agent +from .errors import ( + MismatchingStateError, +) + +__all__ = ['BaseApp'] + +log = logging.getLogger(__name__) + + +class BaseApp(object): + """The remote application for OAuth 1 and OAuth 2. It is used together + with OAuth registry. + + :param name: The name of the OAuth client, like: github, twitter + :param fetch_token: A function to fetch access token from database + :param update_token: A function to update access token to database + :param client_id: Client key of OAuth 1, or Client ID of OAuth 2 + :param client_secret: Client secret of OAuth 2, or Client Secret of OAuth 2 + :param request_token_url: Request Token endpoint for OAuth 1 + :param request_token_params: Extra parameters for Request Token endpoint + :param access_token_url: Access Token endpoint for OAuth 1 and OAuth 2 + :param access_token_params: Extra parameters for Access Token endpoint + :param authorize_url: Endpoint for user authorization of OAuth 1 or OAuth 2 + :param authorize_params: Extra parameters for Authorization Endpoint + :param api_base_url: The base API endpoint to make requests simple + :param client_kwargs: Extra keyword arguments for session + :param server_metadata_url: Discover server metadata from this URL + :param user_agent: Define a custom user agent to be used in HTTP request + :param kwargs: Extra server metadata + + Create an instance of ``RemoteApp``. If ``request_token_url`` is configured, + it would be an OAuth 1 instance, otherwise it is OAuth 2 instance:: + + oauth1_client = RemoteApp( + client_id='Twitter Consumer Key', + client_secret='Twitter Consumer Secret', + request_token_url='https://api.twitter.com/oauth/request_token', + access_token_url='https://api.twitter.com/oauth/access_token', + authorize_url='https://api.twitter.com/oauth/authenticate', + api_base_url='https://api.twitter.com/1.1/', + ) + + oauth2_client = RemoteApp( + client_id='GitHub Client ID', + client_secret='GitHub Client Secret', + api_base_url='https://api.github.com/', + access_token_url='https://github.com/login/oauth/access_token', + authorize_url='https://github.com/login/oauth/authorize', + client_kwargs={'scope': 'user:email'}, + ) + """ + OAUTH_APP_CONFIG = None + + def __init__( + self, framework, name=None, fetch_token=None, update_token=None, + client_id=None, client_secret=None, + request_token_url=None, request_token_params=None, + access_token_url=None, access_token_params=None, + authorize_url=None, authorize_params=None, + api_base_url=None, client_kwargs=None, server_metadata_url=None, + compliance_fix=None, client_auth_methods=None, user_agent=None, **kwargs): + + self.framework = framework + self.name = name + self.client_id = client_id + self.client_secret = client_secret + self.request_token_url = request_token_url + self.request_token_params = request_token_params + self.access_token_url = access_token_url + self.access_token_params = access_token_params + self.authorize_url = authorize_url + self.authorize_params = authorize_params + self.api_base_url = api_base_url + self.client_kwargs = client_kwargs or {} + + self.compliance_fix = compliance_fix + self.client_auth_methods = client_auth_methods + self._fetch_token = fetch_token + self._update_token = update_token + self._user_agent = user_agent or default_user_agent + + self._server_metadata_url = server_metadata_url + self.server_metadata = kwargs + + def _on_update_token(self, token, refresh_token=None, access_token=None): + raise NotImplementedError() + + def _get_oauth_client(self, **kwargs): + client_kwargs = {} + client_kwargs.update(self.client_kwargs) + client_kwargs.update(kwargs) + if self.request_token_url: + session = self.framework.oauth1_client_cls( + self.client_id, self.client_secret, + **client_kwargs + ) + else: + if self.authorize_url: + client_kwargs['authorization_endpoint'] = self.authorize_url + if self.access_token_url: + client_kwargs['token_endpoint'] = self.access_token_url + session = self.framework.oauth2_client_cls( + client_id=self.client_id, + client_secret=self.client_secret, + update_token=self._on_update_token, + **client_kwargs + ) + if self.client_auth_methods: + for f in self.client_auth_methods: + session.register_client_auth_method(f) + # only OAuth2 has compliance_fix currently + if self.compliance_fix: + self.compliance_fix(session) + + session.headers['User-Agent'] = self._user_agent + return session + + def _retrieve_oauth2_access_token_params(self, request, params): + request_state = params.pop('state', None) + state = self.framework.get_session_data(request, 'state') + if state != request_state: + raise MismatchingStateError() + + code_verifier = self.framework.get_session_data(request, 'code_verifier') + if code_verifier: + params['code_verifier'] = code_verifier + return params + + def retrieve_access_token_params(self, request, request_token=None): + """Retrieve parameters for fetching access token, those parameters come + from request and previously saved temporary data in session. + """ + params = self.framework.generate_access_token_params(self.request_token_url, request) + if self.request_token_url: + if request_token is None: + request_token = self.framework.get_session_data(request, 'request_token') + params['request_token'] = request_token + else: + params = self._retrieve_oauth2_access_token_params(request, params) + redirect_uri = self.framework.get_session_data(request, 'redirect_uri') + if redirect_uri: + params['redirect_uri'] = redirect_uri + + log.debug('Retrieve temporary data: {!r}'.format(params)) + return params + + def save_authorize_data(self, request, **kwargs): + """Save temporary data into session for the authorization step. These + data can be retrieved later when fetching access token. + """ + log.debug('Saving authorize data: {!r}'.format(kwargs)) + keys = [ + 'redirect_uri', 'request_token', + 'state', 'code_verifier', 'nonce' + ] + for k in keys: + if k in kwargs: + self.framework.set_session_data(request, k, kwargs[k]) + + @staticmethod + def _create_oauth2_authorization_url(client, authorization_endpoint, **kwargs): + rv = {} + if client.code_challenge_method: + code_verifier = kwargs.get('code_verifier') + if not code_verifier: + code_verifier = generate_token(48) + kwargs['code_verifier'] = code_verifier + rv['code_verifier'] = code_verifier + log.debug('Using code_verifier: {!r}'.format(code_verifier)) + + scope = kwargs.get('scope', client.scope) + if scope and scope.startswith('openid'): + # this is an OpenID Connect service + nonce = kwargs.get('nonce') + if not nonce: + nonce = generate_token(20) + kwargs['nonce'] = nonce + rv['nonce'] = nonce + + url, state = client.create_authorization_url( + authorization_endpoint, **kwargs) + rv['url'] = url + rv['state'] = state + return rv + + def request(self, method, url, token=None, **kwargs): + raise NotImplementedError() + + def get(self, url, **kwargs): + """Invoke GET http request. + + If ``api_base_url`` configured, shortcut is available:: + + client.get('users/lepture') + """ + return self.request('GET', url, **kwargs) + + def post(self, url, **kwargs): + """Invoke POST http request. + + If ``api_base_url`` configured, shortcut is available:: + + client.post('timeline', json={'text': 'Hi'}) + """ + return self.request('POST', url, **kwargs) + + def patch(self, url, **kwargs): + """Invoke PATCH http request. + + If ``api_base_url`` configured, shortcut is available:: + + client.patch('profile', json={'name': 'Hsiaoming Yang'}) + """ + return self.request('PATCH', url, **kwargs) + + def put(self, url, **kwargs): + """Invoke PUT http request. + + If ``api_base_url`` configured, shortcut is available:: + + client.put('profile', json={'name': 'Hsiaoming Yang'}) + """ + return self.request('PUT', url, **kwargs) + + def delete(self, url, **kwargs): + """Invoke DELETE http request. + + If ``api_base_url`` configured, shortcut is available:: + + client.delete('posts/123') + """ + return self.request('DELETE', url, **kwargs) + + def _fetch_server_metadata(self, url): + with self._get_oauth_client() as session: + resp = session.request('GET', url, withhold_token=True) + return resp.json() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/base_client/base_oauth.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/base_client/base_oauth.py new file mode 100644 index 000000000..36c027b01 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/base_client/base_oauth.py @@ -0,0 +1,120 @@ +import functools +from .framework_integration import FrameworkIntegration + +__all__ = ['BaseOAuth'] + + +OAUTH_CLIENT_PARAMS = ( + 'client_id', 'client_secret', + 'request_token_url', 'request_token_params', + 'access_token_url', 'access_token_params', + 'refresh_token_url', 'refresh_token_params', + 'authorize_url', 'authorize_params', + 'api_base_url', 'client_kwargs', + 'server_metadata_url', +) + + +class BaseOAuth(object): + """Registry for oauth clients. + + Create an instance for registry:: + + oauth = OAuth() + """ + framework_client_cls = None + framework_integration_cls = FrameworkIntegration + + def __init__(self, fetch_token=None, update_token=None): + self._registry = {} + self._clients = {} + self.fetch_token = fetch_token + self.update_token = update_token + + def create_client(self, name): + """Create or get the given named OAuth client. For instance, the + OAuth registry has ``.register`` a twitter client, developers may + access the client with:: + + client = oauth.create_client('twitter') + + :param: name: Name of the remote application + :return: OAuth remote app + """ + if name in self._clients: + return self._clients[name] + + if name not in self._registry: + return None + + overwrite, config = self._registry[name] + client_cls = config.pop('client_cls', self.framework_client_cls) + if client_cls.OAUTH_APP_CONFIG: + kwargs = client_cls.OAUTH_APP_CONFIG + kwargs.update(config) + else: + kwargs = config + kwargs = self.generate_client_kwargs(name, overwrite, **kwargs) + client = client_cls(self.framework_integration_cls(name), name, **kwargs) + self._clients[name] = client + return client + + def register(self, name, overwrite=False, **kwargs): + """Registers a new remote application. + + :param name: Name of the remote application. + :param overwrite: Overwrite existing config with framework settings. + :param kwargs: Parameters for :class:`RemoteApp`. + + Find parameters for the given remote app class. When a remote app is + registered, it can be accessed with *named* attribute:: + + oauth.register('twitter', client_id='', ...) + oauth.twitter.get('timeline') + """ + self._registry[name] = (overwrite, kwargs) + return self.create_client(name) + + def generate_client_kwargs(self, name, overwrite, **kwargs): + fetch_token = kwargs.pop('fetch_token', None) + update_token = kwargs.pop('update_token', None) + + config = self.load_config(name, OAUTH_CLIENT_PARAMS) + if config: + kwargs = _config_client(config, kwargs, overwrite) + + if not fetch_token and self.fetch_token: + fetch_token = functools.partial(self.fetch_token, name) + + kwargs['fetch_token'] = fetch_token + + if not kwargs.get('request_token_url'): + if not update_token and self.update_token: + update_token = functools.partial(self.update_token, name) + + kwargs['update_token'] = update_token + return kwargs + + def load_config(self, name, params): + return self.framework_integration_cls.load_config(self, name, params) + + def __getattr__(self, key): + try: + return object.__getattribute__(self, key) + except AttributeError: + if key in self._registry: + return self.create_client(key) + raise AttributeError('No such client: %s' % key) + + +def _config_client(config, kwargs, overwrite): + for k in OAUTH_CLIENT_PARAMS: + v = config.get(k, None) + if k not in kwargs: + kwargs[k] = v + elif overwrite and v: + if isinstance(kwargs[k], dict): + kwargs[k].update(v) + else: + kwargs[k] = v + return kwargs diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/base_client/errors.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/base_client/errors.py new file mode 100644 index 000000000..bb4dd2b12 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/base_client/errors.py @@ -0,0 +1,30 @@ +from authlib.common.errors import AuthlibBaseError + + +class OAuthError(AuthlibBaseError): + error = 'oauth_error' + + +class MissingRequestTokenError(OAuthError): + error = 'missing_request_token' + + +class MissingTokenError(OAuthError): + error = 'missing_token' + + +class TokenExpiredError(OAuthError): + error = 'token_expired' + + +class InvalidTokenError(OAuthError): + error = 'token_invalid' + + +class UnsupportedTokenTypeError(OAuthError): + error = 'unsupported_token_type' + + +class MismatchingStateError(OAuthError): + error = 'mismatching_state' + description = 'CSRF Warning! State not equal in request and response.' diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/base_client/framework_integration.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/base_client/framework_integration.py new file mode 100644 index 000000000..2f27689c2 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/base_client/framework_integration.py @@ -0,0 +1,25 @@ + +class FrameworkIntegration(object): + oauth1_client_cls = None + oauth2_client_cls = None + + def __init__(self, name): + self.name = name + + def set_session_data(self, request, key, value): + sess_key = '_{}_authlib_{}_'.format(self.name, key) + request.session[sess_key] = value + + def get_session_data(self, request, key): + sess_key = '_{}_authlib_{}_'.format(self.name, key) + return request.session.pop(sess_key, None) + + def update_token(self, token, refresh_token=None, access_token=None): + raise NotImplementedError() + + def generate_access_token_params(self, request_token_url, request): + raise NotImplementedError() + + @staticmethod + def load_config(oauth, name, params): + raise NotImplementedError() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/base_client/remote_app.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/base_client/remote_app.py new file mode 100644 index 000000000..01b063589 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/base_client/remote_app.py @@ -0,0 +1,206 @@ +import time +import logging +from authlib.common.urls import urlparse +from authlib.jose import JsonWebToken, JsonWebKey +from authlib.oidc.core import UserInfo, CodeIDToken, ImplicitIDToken +from .base_app import BaseApp +from .errors import ( + MissingRequestTokenError, + MissingTokenError, +) + +__all__ = ['RemoteApp'] + +log = logging.getLogger(__name__) + + +class RemoteApp(BaseApp): + def load_server_metadata(self): + if self._server_metadata_url and '_loaded_at' not in self.server_metadata: + metadata = self._fetch_server_metadata(self._server_metadata_url) + metadata['_loaded_at'] = time.time() + self.server_metadata.update(metadata) + return self.server_metadata + + def _on_update_token(self, token, refresh_token=None, access_token=None): + if callable(self._update_token): + self._update_token( + token, + refresh_token=refresh_token, + access_token=access_token, + ) + self.framework.update_token( + token, + refresh_token=refresh_token, + access_token=access_token, + ) + + def _create_oauth1_authorization_url(self, client, authorization_endpoint, **kwargs): + params = {} + if self.request_token_params: + params.update(self.request_token_params) + token = client.fetch_request_token( + self.request_token_url, **params + ) + log.debug('Fetch request token: {!r}'.format(token)) + url = client.create_authorization_url(authorization_endpoint, **kwargs) + return {'url': url, 'request_token': token} + + def create_authorization_url(self, redirect_uri=None, **kwargs): + """Generate the authorization url and state for HTTP redirect. + + :param redirect_uri: Callback or redirect URI for authorization. + :param kwargs: Extra parameters to include. + :return: dict + """ + metadata = self.load_server_metadata() + authorization_endpoint = self.authorize_url + if not authorization_endpoint and not self.request_token_url: + authorization_endpoint = metadata.get('authorization_endpoint') + + if not authorization_endpoint: + raise RuntimeError('Missing "authorize_url" value') + + if self.authorize_params: + kwargs.update(self.authorize_params) + + with self._get_oauth_client(**metadata) as client: + client.redirect_uri = redirect_uri + + if self.request_token_url: + return self._create_oauth1_authorization_url( + client, authorization_endpoint, **kwargs) + else: + return self._create_oauth2_authorization_url( + client, authorization_endpoint, **kwargs) + + def fetch_access_token(self, redirect_uri=None, request_token=None, **params): + """Fetch access token in one step. + + :param redirect_uri: Callback or Redirect URI that is used in + previous :meth:`authorize_redirect`. + :param request_token: A previous request token for OAuth 1. + :param params: Extra parameters to fetch access token. + :return: A token dict. + """ + metadata = self.load_server_metadata() + token_endpoint = self.access_token_url + if not token_endpoint and not self.request_token_url: + token_endpoint = metadata.get('token_endpoint') + + with self._get_oauth_client(**metadata) as client: + if self.request_token_url: + if redirect_uri is not None: + client.redirect_uri = redirect_uri + if request_token is None: + raise MissingRequestTokenError() + # merge request token with verifier + token = {} + token.update(request_token) + token.update(params) + client.token = token + kwargs = self.access_token_params or {} + token = client.fetch_access_token(token_endpoint, **kwargs) + client.redirect_uri = None + else: + if redirect_uri is not None: + client.redirect_uri = redirect_uri + kwargs = {} + if self.access_token_params: + kwargs.update(self.access_token_params) + kwargs.update(params) + token = client.fetch_token(token_endpoint, **kwargs) + return token + + def request(self, method, url, token=None, **kwargs): + if self.api_base_url and not url.startswith(('https://', 'http://')): + url = urlparse.urljoin(self.api_base_url, url) + + withhold_token = kwargs.get('withhold_token') + if not withhold_token: + metadata = self.load_server_metadata() + else: + metadata = {} + + with self._get_oauth_client(**metadata) as session: + request = kwargs.pop('request', None) + if withhold_token: + return session.request(method, url, **kwargs) + + if token is None and self._fetch_token and request: + token = self._fetch_token(request) + if token is None: + raise MissingTokenError() + + session.token = token + return session.request(method, url, **kwargs) + + def fetch_jwk_set(self, force=False): + metadata = self.load_server_metadata() + jwk_set = metadata.get('jwks') + if jwk_set and not force: + return jwk_set + uri = metadata.get('jwks_uri') + if not uri: + raise RuntimeError('Missing "jwks_uri" in metadata') + + jwk_set = self._fetch_server_metadata(uri) + self.server_metadata['jwks'] = jwk_set + return jwk_set + + def userinfo(self, **kwargs): + """Fetch user info from ``userinfo_endpoint``.""" + metadata = self.load_server_metadata() + resp = self.get(metadata['userinfo_endpoint'], **kwargs) + data = resp.json() + + compliance_fix = metadata.get('userinfo_compliance_fix') + if compliance_fix: + data = compliance_fix(self, data) + return UserInfo(data) + + def _parse_id_token(self, request, token, claims_options=None, leeway=120): + """Return an instance of UserInfo from token's ``id_token``.""" + if 'id_token' not in token: + return None + + def load_key(header, payload): + jwk_set = JsonWebKey.import_key_set(self.fetch_jwk_set()) + try: + return jwk_set.find_by_kid(header.get('kid')) + except ValueError: + # re-try with new jwk set + jwk_set = JsonWebKey.import_key_set(self.fetch_jwk_set(force=True)) + return jwk_set.find_by_kid(header.get('kid')) + + nonce = self.framework.get_session_data(request, 'nonce') + claims_params = dict( + nonce=nonce, + client_id=self.client_id, + ) + if 'access_token' in token: + claims_params['access_token'] = token['access_token'] + claims_cls = CodeIDToken + else: + claims_cls = ImplicitIDToken + + metadata = self.load_server_metadata() + if claims_options is None and 'issuer' in metadata: + claims_options = {'iss': {'values': [metadata['issuer']]}} + + alg_values = metadata.get('id_token_signing_alg_values_supported') + if not alg_values: + alg_values = ['RS256'] + + jwt = JsonWebToken(alg_values) + claims = jwt.decode( + token['id_token'], key=load_key, + claims_cls=claims_cls, + claims_options=claims_options, + claims_params=claims_params, + ) + # https://github.com/lepture/authlib/issues/259 + if claims.get('nonce_supported') is False: + claims.params['nonce'] = None + claims.validate(leeway=leeway) + return UserInfo(claims) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_client/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_client/__init__.py new file mode 100644 index 000000000..18a30ca4a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_client/__init__.py @@ -0,0 +1,15 @@ +# flake8: noqa + +from .integration import DjangoIntegration, DjangoRemoteApp, token_update +from ..base_client import BaseOAuth, OAuthError + + +class OAuth(BaseOAuth): + framework_integration_cls = DjangoIntegration + framework_client_cls = DjangoRemoteApp + + +__all__ = [ + 'OAuth', 'DjangoRemoteApp', 'DjangoIntegration', + 'token_update', 'OAuthError', +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_client/integration.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_client/integration.py new file mode 100644 index 000000000..0665172f7 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_client/integration.py @@ -0,0 +1,76 @@ +from django.conf import settings +from django.dispatch import Signal +from django.http import HttpResponseRedirect +from ..base_client import FrameworkIntegration, RemoteApp, OAuthError +from ..requests_client import OAuth1Session, OAuth2Session + + +token_update = Signal() + + +class DjangoIntegration(FrameworkIntegration): + oauth1_client_cls = OAuth1Session + oauth2_client_cls = OAuth2Session + + def update_token(self, token, refresh_token=None, access_token=None): + token_update.send( + sender=self.__class__, + name=self.name, + token=token, + refresh_token=refresh_token, + access_token=access_token, + ) + + def generate_access_token_params(self, request_token_url, request): + if request_token_url: + return request.GET.dict() + + if request.method == 'GET': + error = request.GET.get('error') + if error: + description = request.GET.get('error_description') + raise OAuthError(error=error, description=description) + + params = { + 'code': request.GET.get('code'), + 'state': request.GET.get('state'), + } + else: + params = { + 'code': request.POST.get('code'), + 'state': request.POST.get('state'), + } + return params + + @staticmethod + def load_config(oauth, name, params): + config = getattr(settings, 'AUTHLIB_OAUTH_CLIENTS', None) + if config: + return config.get(name) + + +class DjangoRemoteApp(RemoteApp): + def authorize_redirect(self, request, redirect_uri=None, **kwargs): + """Create a HTTP Redirect for Authorization Endpoint. + + :param request: HTTP request instance from Django view. + :param redirect_uri: Callback or redirect URI for authorization. + :param kwargs: Extra parameters to include. + :return: A HTTP redirect response. + """ + rv = self.create_authorization_url(redirect_uri, **kwargs) + self.save_authorize_data(request, redirect_uri=redirect_uri, **rv) + return HttpResponseRedirect(rv['url']) + + def authorize_access_token(self, request, **kwargs): + """Fetch access token in one step. + + :param request: HTTP request instance from Django view. + :return: A token dict. + """ + params = self.retrieve_access_token_params(request) + params.update(kwargs) + return self.fetch_access_token(**params) + + def parse_id_token(self, request, token, claims_options=None, leeway=120): + return self._parse_id_token(request, token, claims_options, leeway) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_helpers.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_helpers.py new file mode 100644 index 000000000..a41499da7 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_helpers.py @@ -0,0 +1,72 @@ +try: + from collections.abc import MutableMapping as DictMixin +except ImportError: + from collections import MutableMapping as DictMixin +from authlib.common.encoding import to_unicode, json_loads + + +def create_oauth_request(request, request_cls, use_json=False): + if isinstance(request, request_cls): + return request + + if request.method == 'POST': + if use_json: + body = json_loads(request.body) + else: + body = request.POST.dict() + else: + body = None + + headers = parse_request_headers(request) + url = request.build_absolute_uri() + return request_cls(request.method, url, body, headers) + + +def parse_request_headers(request): + return WSGIHeaderDict(request.META) + + +class WSGIHeaderDict(DictMixin): + CGI_KEYS = ('CONTENT_TYPE', 'CONTENT_LENGTH') + + def __init__(self, environ): + self.environ = environ + + def keys(self): + return [x for x in self] + + def _ekey(self, key): + key = key.replace('-', '_').upper() + if key in self.CGI_KEYS: + return key + return 'HTTP_' + key + + def __getitem__(self, key): + return _unicode_value(self.environ[self._ekey(key)]) + + def __delitem__(self, key): # pragma: no cover + raise ValueError('Can not delete item') + + def __setitem__(self, key, value): # pragma: no cover + raise ValueError('Can not set item') + + def __iter__(self): + for key in self.environ: + if key[:5] == 'HTTP_': + yield _unify_key(key[5:]) + elif key in self.CGI_KEYS: + yield _unify_key(key) + + def __len__(self): + return len(self.keys()) + + def __contains__(self, key): + return self._ekey(key) in self.environ + + +def _unicode_value(value): + return to_unicode(value, 'latin-1') + + +def _unify_key(key): + return key.replace('_', '-').title() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth1/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth1/__init__.py new file mode 100644 index 000000000..39f0e1307 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth1/__init__.py @@ -0,0 +1,9 @@ +# flake8: noqa + +from .authorization_server import ( + BaseServer, CacheAuthorizationServer +) +from .resource_protector import ResourceProtector + + +__all__ = ['BaseServer', 'CacheAuthorizationServer', 'ResourceProtector'] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth1/authorization_server.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth1/authorization_server.py new file mode 100644 index 000000000..0ac8b5c1f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth1/authorization_server.py @@ -0,0 +1,121 @@ +import logging +from authlib.oauth1 import ( + OAuth1Request, + AuthorizationServer as _AuthorizationServer, +) +from authlib.oauth1 import TemporaryCredential +from authlib.common.security import generate_token +from authlib.common.urls import url_encode +from django.core.cache import cache +from django.conf import settings +from django.http import HttpResponse +from .nonce import exists_nonce_in_cache +from ..django_helpers import create_oauth_request + +log = logging.getLogger(__name__) + + +class BaseServer(_AuthorizationServer): + def __init__(self, client_model, token_model, token_generator=None): + self.client_model = client_model + self.token_model = token_model + + if token_generator is None: + def token_generator(): + return { + 'oauth_token': generate_token(42), + 'oauth_token_secret': generate_token(48) + } + + self.token_generator = token_generator + self._config = getattr(settings, 'AUTHLIB_OAUTH1_PROVIDER', {}) + self._nonce_expires_in = self._config.get('nonce_expires_in', 86400) + methods = self._config.get('signature_methods') + if methods: + self.SUPPORTED_SIGNATURE_METHODS = methods + + def get_client_by_id(self, client_id): + try: + return self.client_model.objects.get(client_id=client_id) + except self.client_model.DoesNotExist: + return None + + def exists_nonce(self, nonce, request): + return exists_nonce_in_cache(nonce, request, self._nonce_expires_in) + + def create_token_credential(self, request): + temporary_credential = request.credential + token = self.token_generator() + item = self.token_model( + oauth_token=token['oauth_token'], + oauth_token_secret=token['oauth_token_secret'], + user_id=temporary_credential.get_user_id(), + client_id=temporary_credential.get_client_id() + ) + item.save() + return item + + def check_authorization_request(self, request): + req = self.create_oauth1_request(request) + self.validate_authorization_request(req) + return req + + def create_oauth1_request(self, request): + return create_oauth_request(request, OAuth1Request) + + def handle_response(self, status_code, payload, headers): + resp = HttpResponse(url_encode(payload), status=status_code) + for k, v in headers: + resp[k] = v + return resp + + +class CacheAuthorizationServer(BaseServer): + def __init__(self, client_model, token_model, token_generator=None): + super(CacheAuthorizationServer, self).__init__( + client_model, token_model, token_generator) + self._temporary_expires_in = self._config.get( + 'temporary_credential_expires_in', 86400) + self._temporary_credential_key_prefix = self._config.get( + 'temporary_credential_key_prefix', 'temporary_credential:') + + def create_temporary_credential(self, request): + key_prefix = self._temporary_credential_key_prefix + token = self.token_generator() + + client_id = request.client_id + redirect_uri = request.redirect_uri + key = key_prefix + token['oauth_token'] + token['client_id'] = client_id + if redirect_uri: + token['oauth_callback'] = redirect_uri + + cache.set(key, token, timeout=self._temporary_expires_in) + return TemporaryCredential(token) + + def get_temporary_credential(self, request): + if not request.token: + return None + + key_prefix = self._temporary_credential_key_prefix + key = key_prefix + request.token + value = cache.get(key) + if value: + return TemporaryCredential(value) + + def delete_temporary_credential(self, request): + if request.token: + key_prefix = self._temporary_credential_key_prefix + key = key_prefix + request.token + cache.delete(key) + + def create_authorization_verifier(self, request): + key_prefix = self._temporary_credential_key_prefix + verifier = generate_token(36) + credential = request.credential + user = request.user + key = key_prefix + credential.get_oauth_token() + credential['oauth_verifier'] = verifier + credential['user_id'] = user.pk + cache.set(key, credential, timeout=self._temporary_expires_in) + return verifier diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth1/nonce.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth1/nonce.py new file mode 100644 index 000000000..535bf7e6b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth1/nonce.py @@ -0,0 +1,15 @@ +from django.core.cache import cache + + +def exists_nonce_in_cache(nonce, request, timeout): + key_prefix = 'nonce:' + timestamp = request.timestamp + client_id = request.client_id + token = request.token + key = '{}{}-{}-{}'.format(key_prefix, nonce, timestamp, client_id) + if token: + key = '{}-{}'.format(key, token) + + rv = bool(cache.get(key)) + cache.set(key, 1, timeout=timeout) + return rv diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth1/resource_protector.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth1/resource_protector.py new file mode 100644 index 000000000..05b6c4c83 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth1/resource_protector.py @@ -0,0 +1,66 @@ +import functools +from authlib.oauth1.errors import OAuth1Error +from authlib.oauth1 import ResourceProtector as _ResourceProtector +from django.http import JsonResponse +from django.conf import settings +from .nonce import exists_nonce_in_cache +from ..django_helpers import parse_request_headers + + +class ResourceProtector(_ResourceProtector): + def __init__(self, client_model, token_model): + self.client_model = client_model + self.token_model = token_model + + config = getattr(settings, 'AUTHLIB_OAUTH1_PROVIDER', {}) + methods = config.get('signature_methods', []) + if methods and isinstance(methods, (list, tuple)): + self.SUPPORTED_SIGNATURE_METHODS = methods + + self._nonce_expires_in = config.get('nonce_expires_in', 86400) + + def get_client_by_id(self, client_id): + try: + return self.client_model.objects.get(client_id=client_id) + except self.client_model.DoesNotExist: + return None + + def get_token_credential(self, request): + try: + return self.token_model.objects.get( + client_id=request.client_id, + oauth_token=request.token + ) + except self.token_model.DoesNotExist: + return None + + def exists_nonce(self, nonce, request): + return exists_nonce_in_cache(nonce, request, self._nonce_expires_in) + + def acquire_credential(self, request): + if request.method in ['POST', 'PUT']: + body = request.POST.dict() + else: + body = None + + headers = parse_request_headers(request) + url = request.build_absolute_uri() + req = self.validate_request(request.method, url, body, headers) + return req.credential + + def __call__(self, realm=None): + def wrapper(f): + @functools.wraps(f) + def decorated(request, *args, **kwargs): + try: + credential = self.acquire_credential(request) + request.oauth1_credential = credential + except OAuth1Error as error: + body = dict(error.get_body()) + resp = JsonResponse(body, status=error.status_code) + resp['Cache-Control'] = 'no-store' + resp['Pragma'] = 'no-cache' + return resp + return f(request, *args, **kwargs) + return decorated + return wrapper diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth2/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth2/__init__.py new file mode 100644 index 000000000..05c1fdfe9 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth2/__init__.py @@ -0,0 +1,10 @@ +# flake8: noqa + +from .authorization_server import AuthorizationServer +from .resource_protector import ResourceProtector, BearerTokenValidator +from .endpoints import RevocationEndpoint +from .signals import ( + client_authenticated, + token_authenticated, + token_revoked +) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth2/authorization_server.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth2/authorization_server.py new file mode 100644 index 000000000..fae60aa42 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth2/authorization_server.py @@ -0,0 +1,152 @@ +import json +from django.http import HttpResponse +from django.utils.module_loading import import_string +from django.conf import settings +from authlib.oauth2 import ( + OAuth2Request, + HttpRequest, + AuthorizationServer as _AuthorizationServer, +) +from authlib.oauth2.rfc6750 import BearerToken +from authlib.oauth2.rfc8414 import AuthorizationServerMetadata +from authlib.common.security import generate_token as _generate_token +from authlib.common.encoding import json_dumps +from .signals import client_authenticated, token_revoked +from ..django_helpers import create_oauth_request + + +class AuthorizationServer(_AuthorizationServer): + """Django implementation of :class:`authlib.oauth2.rfc6749.AuthorizationServer`. + Initialize it with client model and token model:: + + from authlib.integrations.django_oauth2 import AuthorizationServer + from your_project.models import OAuth2Client, OAuth2Token + + server = AuthorizationServer(OAuth2Client, OAuth2Token) + """ + metadata_class = AuthorizationServerMetadata + + def __init__(self, client_model, token_model, generate_token=None, metadata=None): + self.config = getattr(settings, 'AUTHLIB_OAUTH2_PROVIDER', {}) + self.client_model = client_model + self.token_model = token_model + if generate_token is None: + generate_token = self.create_bearer_token_generator() + + if metadata is None: + metadata_file = self.config.get('metadata_file') + if metadata_file: + with open(metadata_file) as f: + metadata = json.load(f) + + if metadata: + metadata = self.metadata_class(metadata) + metadata.validate() + + super(AuthorizationServer, self).__init__( + query_client=self.get_client_by_id, + save_token=self.save_oauth2_token, + generate_token=generate_token, + metadata=metadata, + ) + + def get_client_by_id(self, client_id): + """Default method for ``AuthorizationServer.query_client``. Developers MAY + rewrite this function to meet their own needs. + """ + try: + return self.client_model.objects.get(client_id=client_id) + except self.client_model.DoesNotExist: + return None + + def save_oauth2_token(self, token, request): + """Default method for ``AuthorizationServer.save_token``. Developers MAY + rewrite this function to meet their own needs. + """ + client = request.client + if request.user: + user_id = request.user.pk + else: + user_id = client.user_id + item = self.token_model( + client_id=client.client_id, + user_id=user_id, + **token + ) + item.save() + return item + + def create_oauth2_request(self, request): + return create_oauth_request(request, OAuth2Request) + + def create_json_request(self, request): + req = create_oauth_request(request, HttpRequest, True) + req.user = request.user + return req + + def handle_response(self, status_code, payload, headers): + if isinstance(payload, dict): + payload = json_dumps(payload) + resp = HttpResponse(payload, status=status_code) + for k, v in headers: + resp[k] = v + return resp + + def send_signal(self, name, *args, **kwargs): + if name == 'after_authenticate_client': + client_authenticated.send(sender=self.__class__, *args, **kwargs) + elif name == 'after_revoke_token': + token_revoked.send(sender=self.__class__, *args, **kwargs) + + def create_bearer_token_generator(self): + """Default method to create BearerToken generator.""" + conf = self.config.get('access_token_generator', True) + access_token_generator = create_token_generator(conf, 42) + + conf = self.config.get('refresh_token_generator', False) + refresh_token_generator = create_token_generator(conf, 48) + + conf = self.config.get('token_expires_in') + expires_generator = create_token_expires_in_generator(conf) + + return BearerToken( + access_token_generator=access_token_generator, + refresh_token_generator=refresh_token_generator, + expires_generator=expires_generator, + ) + + def get_consent_grant(self, request): + grant = self.get_authorization_grant(request) + grant.validate_consent_request() + if not hasattr(grant, 'prompt'): + grant.prompt = None + return grant + + def validate_consent_request(self, request, end_user=None): + req = self.create_oauth2_request(request) + req.user = end_user + return self.get_consent_grant(req) + + +def create_token_generator(token_generator_conf, length=42): + if callable(token_generator_conf): + return token_generator_conf + + if isinstance(token_generator_conf, str): + return import_string(token_generator_conf) + elif token_generator_conf is True: + def token_generator(*args, **kwargs): + return _generate_token(length) + return token_generator + + +def create_token_expires_in_generator(expires_in_conf=None): + data = {} + data.update(BearerToken.GRANT_TYPES_EXPIRES_IN) + if expires_in_conf: + data.update(expires_in_conf) + + def expires_in(client, grant_type): + return data.get(grant_type, BearerToken.DEFAULT_EXPIRES_IN) + + return expires_in diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth2/endpoints.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth2/endpoints.py new file mode 100644 index 000000000..b3a8ccd37 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth2/endpoints.py @@ -0,0 +1,59 @@ +from authlib.oauth2.rfc7009 import RevocationEndpoint as _RevocationEndpoint + + +class RevocationEndpoint(_RevocationEndpoint): + """The revocation endpoint for OAuth authorization servers allows clients + to notify the authorization server that a previously obtained refresh or + access token is no longer needed. + + Register it into authorization server, and create token endpoint response + for token revocation:: + + from django.views.decorators.http import require_http_methods + + # see register into authorization server instance + server.register_endpoint(RevocationEndpoint) + + @require_http_methods(["POST"]) + def revoke_token(request): + return server.create_endpoint_response( + RevocationEndpoint.ENDPOINT_NAME, + request + ) + """ + + def query_token(self, token, token_type_hint, client): + """Query requested token from database.""" + token_model = self.server.token_model + if token_type_hint == 'access_token': + rv = _query_access_token(token_model, token) + elif token_type_hint == 'refresh_token': + rv = _query_refresh_token(token_model, token) + else: + rv = _query_access_token(token_model, token) + if not rv: + rv = _query_refresh_token(token_model, token) + + client_id = client.get_client_id() + if rv and rv.client_id == client_id: + return rv + return None + + def revoke_token(self, token): + """Mark the give token as revoked.""" + token.revoked = True + token.save() + + +def _query_access_token(token_model, token): + try: + return token_model.objects.get(access_token=token) + except token_model.DoesNotExist: + return None + + +def _query_refresh_token(token_model, token): + try: + return token_model.objects.get(refresh_token=token) + except token_model.DoesNotExist: + return None diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth2/resource_protector.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth2/resource_protector.py new file mode 100644 index 000000000..51b6ebca0 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth2/resource_protector.py @@ -0,0 +1,79 @@ +import functools +from django.http import JsonResponse +from authlib.oauth2 import ( + OAuth2Error, + ResourceProtector as _ResourceProtector, +) +from authlib.oauth2.rfc6749 import ( + MissingAuthorizationError, + HttpRequest, +) +from authlib.oauth2.rfc6750 import ( + BearerTokenValidator as _BearerTokenValidator +) +from .signals import token_authenticated +from ..django_helpers import parse_request_headers + + +class ResourceProtector(_ResourceProtector): + def acquire_token(self, request, scope=None, operator='AND'): + """A method to acquire current valid token with the given scope. + + :param request: Django HTTP request instance + :param scope: string or list of scope values + :param operator: value of "AND" or "OR" + :return: token object + """ + headers = parse_request_headers(request) + url = request.build_absolute_uri() + req = HttpRequest(request.method, url, request.body, headers) + if not callable(operator): + operator = operator.upper() + token = self.validate_request(scope, req, operator) + token_authenticated.send(sender=self.__class__, token=token) + return token + + def __call__(self, scope=None, operator='AND', optional=False): + def wrapper(f): + @functools.wraps(f) + def decorated(request, *args, **kwargs): + try: + token = self.acquire_token(request, scope, operator) + request.oauth_token = token + except MissingAuthorizationError as error: + if optional: + request.oauth_token = None + return f(request, *args, **kwargs) + return return_error_response(error) + except OAuth2Error as error: + return return_error_response(error) + return f(request, *args, **kwargs) + return decorated + return wrapper + + +class BearerTokenValidator(_BearerTokenValidator): + def __init__(self, token_model, realm=None): + self.token_model = token_model + super(BearerTokenValidator, self).__init__(realm) + + def authenticate_token(self, token_string): + try: + return self.token_model.objects.get(access_token=token_string) + except self.token_model.DoesNotExist: + return None + + def request_invalid(self, request): + return False + + def token_revoked(self, token): + return token.revoked + + +def return_error_response(error): + body = dict(error.get_body()) + resp = JsonResponse(body, status=error.status_code) + headers = error.get_headers() + for k, v in headers: + resp[k] = v + return resp diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth2/signals.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth2/signals.py new file mode 100644 index 000000000..0e9c2659b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/django_oauth2/signals.py @@ -0,0 +1,11 @@ +from django.dispatch import Signal + + +#: signal when client is authenticated +client_authenticated = Signal() + +#: signal when token is revoked +token_revoked = Signal() + +#: signal when token is authenticated +token_authenticated = Signal() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_client/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_client/__init__.py new file mode 100644 index 000000000..9aa6f7138 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_client/__init__.py @@ -0,0 +1,11 @@ +# flake8: noqa + +from .oauth_registry import OAuth +from .remote_app import FlaskRemoteApp +from .integration import token_update, FlaskIntegration +from ..base_client import OAuthError + +__all__ = [ + 'OAuth', 'FlaskRemoteApp', 'FlaskIntegration', + 'token_update', 'OAuthError', +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_client/integration.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_client/integration.py new file mode 100644 index 000000000..875e2ddce --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_client/integration.py @@ -0,0 +1,61 @@ +from flask import current_app, session +from flask.signals import Namespace +from ..base_client import FrameworkIntegration, OAuthError +from ..requests_client import OAuth1Session, OAuth2Session + +_signal = Namespace() +#: signal when token is updated +token_update = _signal.signal('token_update') + + +class FlaskIntegration(FrameworkIntegration): + oauth1_client_cls = OAuth1Session + oauth2_client_cls = OAuth2Session + + def set_session_data(self, request, key, value): + sess_key = '_{}_authlib_{}_'.format(self.name, key) + session[sess_key] = value + + def get_session_data(self, request, key): + sess_key = '_{}_authlib_{}_'.format(self.name, key) + return session.pop(sess_key, None) + + def update_token(self, token, refresh_token=None, access_token=None): + token_update.send( + current_app, + name=self.name, + token=token, + refresh_token=refresh_token, + access_token=access_token, + ) + + def generate_access_token_params(self, request_token_url, request): + if request_token_url: + return request.args.to_dict(flat=True) + + if request.method == 'GET': + error = request.args.get('error') + if error: + description = request.args.get('error_description') + raise OAuthError(error=error, description=description) + + params = { + 'code': request.args['code'], + 'state': request.args.get('state'), + } + else: + params = { + 'code': request.form['code'], + 'state': request.form.get('state'), + } + return params + + @staticmethod + def load_config(oauth, name, params): + rv = {} + for k in params: + conf_key = '{}_{}'.format(name, k).upper() + v = oauth.app.config.get(conf_key, None) + if v is not None: + rv[k] = v + return rv diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_client/oauth_registry.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_client/oauth_registry.py new file mode 100644 index 000000000..8f5d1fe3e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_client/oauth_registry.py @@ -0,0 +1,118 @@ +import uuid +from flask import session +from werkzeug.local import LocalProxy +from .integration import FlaskIntegration +from .remote_app import FlaskRemoteApp +from ..base_client import BaseOAuth + +__all__ = ['OAuth'] +_req_token_tpl = '_{}_authlib_req_token_' + + +class OAuth(BaseOAuth): + """A Flask OAuth registry for oauth clients. + + Create an instance with Flask:: + + oauth = OAuth(app, cache=cache) + + You can also pass the instance of Flask later:: + + oauth = OAuth() + oauth.init_app(app, cache=cache) + + :param app: Flask application instance + :param cache: A cache instance that has .get .set and .delete methods + :param fetch_token: a shared function to get current user's token + :param update_token: a share function to update current user's token + """ + framework_client_cls = FlaskRemoteApp + framework_integration_cls = FlaskIntegration + + def __init__(self, app=None, cache=None, fetch_token=None, update_token=None): + super(OAuth, self).__init__(fetch_token, update_token) + + self.app = app + self.cache = cache + if app: + self.init_app(app) + + def init_app(self, app, cache=None, fetch_token=None, update_token=None): + """Initialize lazy for Flask app. This is usually used for Flask application + factory pattern. + """ + self.app = app + if cache is not None: + self.cache = cache + + if fetch_token: + self.fetch_token = fetch_token + if update_token: + self.update_token = update_token + + app.extensions = getattr(app, 'extensions', {}) + app.extensions['authlib.integrations.flask_client'] = self + + def create_client(self, name): + if not self.app: + raise RuntimeError('OAuth is not init with Flask app.') + return super(OAuth, self).create_client(name) + + def register(self, name, overwrite=False, **kwargs): + self._registry[name] = (overwrite, kwargs) + if self.app: + return self.create_client(name) + return LocalProxy(lambda: self.create_client(name)) + + def generate_client_kwargs(self, name, overwrite, **kwargs): + kwargs = super(OAuth, self).generate_client_kwargs(name, overwrite, **kwargs) + + if kwargs.get('request_token_url'): + if self.cache: + _add_cache_request_token(self.cache, name, kwargs) + else: + _add_session_request_token(name, kwargs) + return kwargs + + +def _add_cache_request_token(cache, name, kwargs): + if not kwargs.get('fetch_request_token'): + def fetch_request_token(): + key = _req_token_tpl.format(name) + sid = session.pop(key, None) + if not sid: + return None + + token = cache.get(sid) + cache.delete(sid) + return token + + kwargs['fetch_request_token'] = fetch_request_token + + if not kwargs.get('save_request_token'): + def save_request_token(token): + key = _req_token_tpl.format(name) + sid = uuid.uuid4().hex + session[key] = sid + cache.set(sid, token, 600) + + kwargs['save_request_token'] = save_request_token + return kwargs + + +def _add_session_request_token(name, kwargs): + if not kwargs.get('fetch_request_token'): + def fetch_request_token(): + key = _req_token_tpl.format(name) + return session.pop(key, None) + + kwargs['fetch_request_token'] = fetch_request_token + + if not kwargs.get('save_request_token'): + def save_request_token(token): + key = _req_token_tpl.format(name) + session[key] = token + + kwargs['save_request_token'] = save_request_token + + return kwargs diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_client/remote_app.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_client/remote_app.py new file mode 100644 index 000000000..0d8eecb73 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_client/remote_app.py @@ -0,0 +1,81 @@ +from flask import redirect +from flask import request as flask_req +from flask import _app_ctx_stack +from ..base_client import RemoteApp + + +class FlaskRemoteApp(RemoteApp): + """Flask integrated RemoteApp of :class:`~authlib.client.OAuthClient`. + It has built-in hooks for OAuthClient. The only required configuration + is token model. + """ + + def __init__(self, framework, name=None, fetch_token=None, **kwargs): + fetch_request_token = kwargs.pop('fetch_request_token', None) + save_request_token = kwargs.pop('save_request_token', None) + super(FlaskRemoteApp, self).__init__(framework, name, fetch_token, **kwargs) + + self._fetch_request_token = fetch_request_token + self._save_request_token = save_request_token + + def _on_update_token(self, token, refresh_token=None, access_token=None): + self.token = token + super(FlaskRemoteApp, self)._on_update_token( + token, refresh_token, access_token + ) + + @property + def token(self): + ctx = _app_ctx_stack.top + attr = 'authlib_oauth_token_{}'.format(self.name) + token = getattr(ctx, attr, None) + if token: + return token + if self._fetch_token: + token = self._fetch_token() + self.token = token + return token + + @token.setter + def token(self, token): + ctx = _app_ctx_stack.top + attr = 'authlib_oauth_token_{}'.format(self.name) + setattr(ctx, attr, token) + + def request(self, method, url, token=None, **kwargs): + if token is None and not kwargs.get('withhold_token'): + token = self.token + return super(FlaskRemoteApp, self).request( + method, url, token=token, **kwargs) + + def authorize_redirect(self, redirect_uri=None, **kwargs): + """Create a HTTP Redirect for Authorization Endpoint. + + :param redirect_uri: Callback or redirect URI for authorization. + :param kwargs: Extra parameters to include. + :return: A HTTP redirect response. + """ + rv = self.create_authorization_url(redirect_uri, **kwargs) + + if self.request_token_url: + request_token = rv.pop('request_token', None) + self._save_request_token(request_token) + + self.save_authorize_data(flask_req, redirect_uri=redirect_uri, **rv) + return redirect(rv['url']) + + def authorize_access_token(self, **kwargs): + """Authorize access token.""" + if self.request_token_url: + request_token = self._fetch_request_token() + else: + request_token = None + + params = self.retrieve_access_token_params(flask_req, request_token) + params.update(kwargs) + token = self.fetch_access_token(**params) + self.token = token + return token + + def parse_id_token(self, token, claims_options=None, leeway=120): + return self._parse_id_token(flask_req, token, claims_options, leeway) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_helpers.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_helpers.py new file mode 100644 index 000000000..6883e4b68 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_helpers.py @@ -0,0 +1,25 @@ +from flask import request as flask_req +from authlib.common.encoding import to_unicode + + +def create_oauth_request(request, request_cls, use_json=False): + if isinstance(request, request_cls): + return request + + if not request: + request = flask_req + + if request.method == 'POST': + if use_json: + body = request.get_json() + else: + body = request.form.to_dict(flat=True) + else: + body = None + + # query string in werkzeug Request.url is very weird + # scope=profile%20email will be scope=profile email + url = request.base_url + if request.query_string: + url = url + '?' + to_unicode(request.query_string) + return request_cls(request.method, url, body, request.headers) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth1/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth1/__init__.py new file mode 100644 index 000000000..780b05945 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth1/__init__.py @@ -0,0 +1,9 @@ +# flake8: noqa + +from .authorization_server import AuthorizationServer +from .resource_protector import ResourceProtector, current_credential +from .cache import ( + register_nonce_hooks, + register_temporary_credential_hooks, + create_exists_nonce_func, +) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth1/authorization_server.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth1/authorization_server.py new file mode 100644 index 000000000..1062a7b17 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth1/authorization_server.py @@ -0,0 +1,180 @@ +import logging +from werkzeug.utils import import_string +from flask import Response +from authlib.oauth1 import ( + OAuth1Request, + AuthorizationServer as _AuthorizationServer, +) +from authlib.common.security import generate_token +from authlib.common.urls import url_encode +from ..flask_helpers import create_oauth_request + +log = logging.getLogger(__name__) + + +class AuthorizationServer(_AuthorizationServer): + """Flask implementation of :class:`authlib.rfc5849.AuthorizationServer`. + Initialize it with Flask app instance, client model class and cache:: + + server = AuthorizationServer(app=app, query_client=query_client) + # or initialize lazily + server = AuthorizationServer() + server.init_app(app, query_client=query_client) + + :param app: A Flask app instance + :param query_client: A function to get client by client_id. The client + model class MUST implement the methods described by + :class:`~authlib.oauth1.rfc5849.ClientMixin`. + :param token_generator: A function to generate token + """ + + def __init__(self, app=None, query_client=None, token_generator=None): + self.app = app + self.query_client = query_client + self.token_generator = token_generator + + self._hooks = { + 'exists_nonce': None, + 'create_temporary_credential': None, + 'get_temporary_credential': None, + 'delete_temporary_credential': None, + 'create_authorization_verifier': None, + 'create_token_credential': None, + } + if app is not None: + self.init_app(app) + + def init_app(self, app, query_client=None, token_generator=None): + if query_client is not None: + self.query_client = query_client + if token_generator is not None: + self.token_generator = token_generator + + if self.token_generator is None: + self.token_generator = self.create_token_generator(app) + + methods = app.config.get('OAUTH1_SUPPORTED_SIGNATURE_METHODS') + if methods and isinstance(methods, (list, tuple)): + self.SUPPORTED_SIGNATURE_METHODS = methods + + self.app = app + + def register_hook(self, name, func): + if name not in self._hooks: + raise ValueError('Invalid "name" of hook') + self._hooks[name] = func + + def create_token_generator(self, app): + token_generator = app.config.get('OAUTH1_TOKEN_GENERATOR') + + if isinstance(token_generator, str): + token_generator = import_string(token_generator) + else: + length = app.config.get('OAUTH1_TOKEN_LENGTH', 42) + + def token_generator(): + return generate_token(length) + + secret_generator = app.config.get('OAUTH1_TOKEN_SECRET_GENERATOR') + if isinstance(secret_generator, str): + secret_generator = import_string(secret_generator) + else: + length = app.config.get('OAUTH1_TOKEN_SECRET_LENGTH', 48) + + def secret_generator(): + return generate_token(length) + + def create_token(): + return { + 'oauth_token': token_generator(), + 'oauth_token_secret': secret_generator() + } + return create_token + + def get_client_by_id(self, client_id): + return self.query_client(client_id) + + def exists_nonce(self, nonce, request): + func = self._hooks['exists_nonce'] + if callable(func): + timestamp = request.timestamp + client_id = request.client_id + token = request.token + return func(nonce, timestamp, client_id, token) + + raise RuntimeError('"exists_nonce" hook is required.') + + def create_temporary_credential(self, request): + func = self._hooks['create_temporary_credential'] + if callable(func): + token = self.token_generator() + return func(token, request.client_id, request.redirect_uri) + raise RuntimeError( + '"create_temporary_credential" hook is required.' + ) + + def get_temporary_credential(self, request): + func = self._hooks['get_temporary_credential'] + if callable(func): + return func(request.token) + + raise RuntimeError( + '"get_temporary_credential" hook is required.' + ) + + def delete_temporary_credential(self, request): + func = self._hooks['delete_temporary_credential'] + if callable(func): + return func(request.token) + + raise RuntimeError( + '"delete_temporary_credential" hook is required.' + ) + + def create_authorization_verifier(self, request): + func = self._hooks['create_authorization_verifier'] + if callable(func): + verifier = generate_token(36) + func(request.credential, request.user, verifier) + return verifier + + raise RuntimeError( + '"create_authorization_verifier" hook is required.' + ) + + def create_token_credential(self, request): + func = self._hooks['create_token_credential'] + if callable(func): + temporary_credential = request.credential + token = self.token_generator() + return func(token, temporary_credential) + + raise RuntimeError( + '"create_token_credential" hook is required.' + ) + + def create_temporary_credentials_response(self, request=None): + return super(AuthorizationServer, self)\ + .create_temporary_credentials_response(request) + + def check_authorization_request(self): + req = self.create_oauth1_request(None) + self.validate_authorization_request(req) + return req + + def create_authorization_response(self, request=None, grant_user=None): + return super(AuthorizationServer, self)\ + .create_authorization_response(request, grant_user) + + def create_token_response(self, request=None): + return super(AuthorizationServer, self).create_token_response(request) + + def create_oauth1_request(self, request): + return create_oauth_request(request, OAuth1Request) + + def handle_response(self, status_code, payload, headers): + return Response( + url_encode(payload), + status=status_code, + headers=headers + ) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth1/cache.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth1/cache.py new file mode 100644 index 000000000..c22211baf --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth1/cache.py @@ -0,0 +1,80 @@ +from authlib.oauth1 import TemporaryCredential + + +def register_temporary_credential_hooks( + authorization_server, cache, key_prefix='temporary_credential:'): + """Register temporary credential related hooks to authorization server. + + :param authorization_server: AuthorizationServer instance + :param cache: Cache instance + :param key_prefix: key prefix for temporary credential + """ + + def create_temporary_credential(token, client_id, redirect_uri): + key = key_prefix + token['oauth_token'] + token['client_id'] = client_id + if redirect_uri: + token['oauth_callback'] = redirect_uri + + cache.set(key, token, timeout=86400) # cache for one day + return TemporaryCredential(token) + + def get_temporary_credential(oauth_token): + if not oauth_token: + return None + key = key_prefix + oauth_token + value = cache.get(key) + if value: + return TemporaryCredential(value) + + def delete_temporary_credential(oauth_token): + if oauth_token: + key = key_prefix + oauth_token + cache.delete(key) + + def create_authorization_verifier(credential, grant_user, verifier): + key = key_prefix + credential.get_oauth_token() + credential['oauth_verifier'] = verifier + credential['user_id'] = grant_user.get_user_id() + cache.set(key, credential, timeout=86400) + return credential + + authorization_server.register_hook( + 'create_temporary_credential', create_temporary_credential) + authorization_server.register_hook( + 'get_temporary_credential', get_temporary_credential) + authorization_server.register_hook( + 'delete_temporary_credential', delete_temporary_credential) + authorization_server.register_hook( + 'create_authorization_verifier', create_authorization_verifier) + + +def create_exists_nonce_func(cache, key_prefix='nonce:', expires=86400): + """Create an ``exists_nonce`` function that can be used in hooks and + resource protector. + + :param cache: Cache instance + :param key_prefix: key prefix for temporary credential + :param expires: Expire time for nonce + """ + def exists_nonce(nonce, timestamp, client_id, oauth_token): + key = '{}{}-{}-{}'.format(key_prefix, nonce, timestamp, client_id) + if oauth_token: + key = '{}-{}'.format(key, oauth_token) + rv = cache.has(key) + cache.set(key, 1, timeout=expires) + return rv + return exists_nonce + + +def register_nonce_hooks( + authorization_server, cache, key_prefix='nonce:', expires=86400): + """Register nonce related hooks to authorization server. + + :param authorization_server: AuthorizationServer instance + :param cache: Cache instance + :param key_prefix: key prefix for temporary credential + :param expires: Expire time for nonce + """ + exists_nonce = create_exists_nonce_func(cache, key_prefix, expires) + authorization_server.register_hook('exists_nonce', exists_nonce) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth1/resource_protector.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth1/resource_protector.py new file mode 100644 index 000000000..9f3361e15 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth1/resource_protector.py @@ -0,0 +1,114 @@ +import functools +from flask import json, Response +from flask import request as _req +from flask import _app_ctx_stack +from werkzeug.local import LocalProxy +from authlib.consts import default_json_headers +from authlib.oauth1 import ResourceProtector as _ResourceProtector +from authlib.oauth1.errors import OAuth1Error + + +class ResourceProtector(_ResourceProtector): + """A protecting method for resource servers. Initialize a resource + protector with the query_token method:: + + from authlib.integrations.flask_oauth1 import ResourceProtector, current_credential + from authlib.integrations.flask_oauth1 import create_exists_nonce_func + from authlib.integrations.sqla_oauth1 import ( + create_query_client_func, + create_query_token_func, + ) + from your_project.models import Token, User, cache + + # you need to define a ``cache`` instance yourself + + require_oauth= ResourceProtector( + app, + query_client=create_query_client_func(db.session, OAuth1Client), + query_token=create_query_token_func(db.session, OAuth1Token), + exists_nonce=create_exists_nonce_func(cache) + ) + # or initialize it lazily + require_oauth = ResourceProtector() + require_oauth.init_app( + app, + query_client=create_query_client_func(db.session, OAuth1Client), + query_token=create_query_token_func(db.session, OAuth1Token), + exists_nonce=create_exists_nonce_func(cache) + ) + """ + def __init__(self, app=None, query_client=None, + query_token=None, exists_nonce=None): + self.query_client = query_client + self.query_token = query_token + self._exists_nonce = exists_nonce + + self.app = app + if app: + self.init_app(app) + + def init_app(self, app, query_client=None, query_token=None, + exists_nonce=None): + if query_client is not None: + self.query_client = query_client + if query_token is not None: + self.query_token = query_token + if exists_nonce is not None: + self._exists_nonce = exists_nonce + + methods = app.config.get('OAUTH1_SUPPORTED_SIGNATURE_METHODS') + if methods and isinstance(methods, (list, tuple)): + self.SUPPORTED_SIGNATURE_METHODS = methods + + self.app = app + + def get_client_by_id(self, client_id): + return self.query_client(client_id) + + def get_token_credential(self, request): + return self.query_token(request.client_id, request.token) + + def exists_nonce(self, nonce, request): + if not self._exists_nonce: + raise RuntimeError('"exists_nonce" function is required.') + + timestamp = request.timestamp + client_id = request.client_id + token = request.token + return self._exists_nonce(nonce, timestamp, client_id, token) + + def acquire_credential(self): + req = self.validate_request( + _req.method, + _req.url, + _req.form.to_dict(flat=True), + _req.headers + ) + ctx = _app_ctx_stack.top + ctx.authlib_server_oauth1_credential = req.credential + return req.credential + + def __call__(self, scope=None): + def wrapper(f): + @functools.wraps(f) + def decorated(*args, **kwargs): + try: + self.acquire_credential() + except OAuth1Error as error: + body = dict(error.get_body()) + return Response( + json.dumps(body), + status=error.status_code, + headers=default_json_headers, + ) + return f(*args, **kwargs) + return decorated + return wrapper + + +def _get_current_credential(): + ctx = _app_ctx_stack.top + return getattr(ctx, 'authlib_server_oauth1_credential', None) + + +current_credential = LocalProxy(_get_current_credential) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth2/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth2/__init__.py new file mode 100644 index 000000000..170a7190a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth2/__init__.py @@ -0,0 +1,12 @@ +# flake8: noqa + +from .authorization_server import AuthorizationServer +from .resource_protector import ( + ResourceProtector, + current_token, +) +from .signals import ( + client_authenticated, + token_authenticated, + token_revoked, +) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth2/authorization_server.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth2/authorization_server.py new file mode 100644 index 000000000..7eb411c6c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth2/authorization_server.py @@ -0,0 +1,212 @@ +from werkzeug.utils import import_string +from flask import Response, json +from authlib.deprecate import deprecate +from authlib.oauth2 import ( + OAuth2Request, + HttpRequest, + AuthorizationServer as _AuthorizationServer, +) +from authlib.oauth2.rfc6750 import BearerToken +from authlib.oauth2.rfc8414 import AuthorizationServerMetadata +from authlib.common.security import generate_token +from authlib.common.encoding import to_unicode +from .signals import client_authenticated, token_revoked +from ..flask_helpers import create_oauth_request + + +class AuthorizationServer(_AuthorizationServer): + """Flask implementation of :class:`authlib.oauth2.rfc6749.AuthorizationServer`. + Initialize it with ``query_client``, ``save_token`` methods and Flask + app instance:: + + def query_client(client_id): + return Client.query.filter_by(client_id=client_id).first() + + def save_token(token, request): + if request.user: + user_id = request.user.get_user_id() + else: + user_id = None + client = request.client + tok = Token( + client_id=client.client_id, + user_id=user.get_user_id(), + **token + ) + db.session.add(tok) + db.session.commit() + + server = AuthorizationServer(app, query_client, save_token) + # or initialize lazily + server = AuthorizationServer() + server.init_app(app, query_client, save_token) + """ + metadata_class = AuthorizationServerMetadata + + def __init__(self, app=None, query_client=None, save_token=None): + super(AuthorizationServer, self).__init__( + query_client=query_client, + save_token=save_token, + ) + self.config = {} + if app is not None: + self.init_app(app) + + def init_app(self, app, query_client=None, save_token=None): + """Initialize later with Flask app instance.""" + if query_client is not None: + self.query_client = query_client + if save_token is not None: + self.save_token = save_token + + self.generate_token = self.create_bearer_token_generator(app.config) + + metadata_file = app.config.get('OAUTH2_METADATA_FILE') + if metadata_file: + with open(metadata_file) as f: + metadata = self.metadata_class(json.load(f)) + metadata.validate() + self.metadata = metadata + + self.config.setdefault('error_uris', app.config.get('OAUTH2_ERROR_URIS')) + if app.config.get('OAUTH2_JWT_ENABLED'): + deprecate('Define "get_jwt_config" in OpenID Connect grants', '1.0') + self.init_jwt_config(app.config) + + def init_jwt_config(self, config): + """Initialize JWT related configuration.""" + jwt_iss = config.get('OAUTH2_JWT_ISS') + if not jwt_iss: + raise RuntimeError('Missing "OAUTH2_JWT_ISS" configuration.') + + jwt_key_path = config.get('OAUTH2_JWT_KEY_PATH') + if jwt_key_path: + with open(jwt_key_path, 'r') as f: + if jwt_key_path.endswith('.json'): + jwt_key = json.load(f) + else: + jwt_key = to_unicode(f.read()) + else: + jwt_key = config.get('OAUTH2_JWT_KEY') + + if not jwt_key: + raise RuntimeError('Missing "OAUTH2_JWT_KEY" configuration.') + + jwt_alg = config.get('OAUTH2_JWT_ALG') + if not jwt_alg: + raise RuntimeError('Missing "OAUTH2_JWT_ALG" configuration.') + + jwt_exp = config.get('OAUTH2_JWT_EXP', 3600) + self.config.setdefault('jwt_iss', jwt_iss) + self.config.setdefault('jwt_key', jwt_key) + self.config.setdefault('jwt_alg', jwt_alg) + self.config.setdefault('jwt_exp', jwt_exp) + + def get_error_uris(self, request): + error_uris = self.config.get('error_uris') + if error_uris: + return dict(error_uris) + + def create_oauth2_request(self, request): + return create_oauth_request(request, OAuth2Request) + + def create_json_request(self, request): + return create_oauth_request(request, HttpRequest, True) + + def handle_response(self, status_code, payload, headers): + if isinstance(payload, dict): + payload = json.dumps(payload) + return Response(payload, status=status_code, headers=headers) + + def send_signal(self, name, *args, **kwargs): + if name == 'after_authenticate_client': + client_authenticated.send(self, *args, **kwargs) + elif name == 'after_revoke_token': + token_revoked.send(self, *args, **kwargs) + + def create_token_expires_in_generator(self, config): + """Create a generator function for generating ``expires_in`` value. + Developers can re-implement this method with a subclass if other means + required. The default expires_in value is defined by ``grant_type``, + different ``grant_type`` has different value. It can be configured + with:: + + OAUTH2_TOKEN_EXPIRES_IN = { + 'authorization_code': 864000, + 'urn:ietf:params:oauth:grant-type:jwt-bearer': 3600, + } + """ + expires_conf = config.get('OAUTH2_TOKEN_EXPIRES_IN') + return create_token_expires_in_generator(expires_conf) + + def create_bearer_token_generator(self, config): + """Create a generator function for generating ``token`` value. This + method will create a Bearer Token generator with + :class:`authlib.oauth2.rfc6750.BearerToken`. By default, it will not + generate ``refresh_token``, which can be turn on by configuration + ``OAUTH2_REFRESH_TOKEN_GENERATOR=True``. + """ + conf = config.get('OAUTH2_ACCESS_TOKEN_GENERATOR', True) + access_token_generator = create_token_generator(conf, 42) + + conf = config.get('OAUTH2_REFRESH_TOKEN_GENERATOR', False) + refresh_token_generator = create_token_generator(conf, 48) + + expires_generator = self.create_token_expires_in_generator(config) + return BearerToken( + access_token_generator, + refresh_token_generator, + expires_generator + ) + + def validate_consent_request(self, request=None, end_user=None): + """Validate current HTTP request for authorization page. This page + is designed for resource owner to grant or deny the authorization:: + + @app.route('/authorize', methods=['GET']) + def authorize(): + try: + grant = server.validate_consent_request(end_user=current_user) + return render_template( + 'authorize.html', + grant=grant, + user=current_user + ) + except OAuth2Error as error: + return render_template( + 'error.html', + error=error + ) + """ + req = self.create_oauth2_request(request) + req.user = end_user + + grant = self.get_authorization_grant(req) + grant.validate_consent_request() + if not hasattr(grant, 'prompt'): + grant.prompt = None + return grant + + +def create_token_expires_in_generator(expires_in_conf=None): + data = {} + data.update(BearerToken.GRANT_TYPES_EXPIRES_IN) + if expires_in_conf: + data.update(expires_in_conf) + + def expires_in(client, grant_type): + return data.get(grant_type, BearerToken.DEFAULT_EXPIRES_IN) + + return expires_in + + +def create_token_generator(token_generator_conf, length=42): + if callable(token_generator_conf): + return token_generator_conf + + if isinstance(token_generator_conf, str): + return import_string(token_generator_conf) + elif token_generator_conf is True: + def token_generator(*args, **kwargs): + return generate_token(length) + return token_generator diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth2/errors.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth2/errors.py new file mode 100644 index 000000000..2217d99da --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth2/errors.py @@ -0,0 +1,37 @@ +import werkzeug +from werkzeug.exceptions import HTTPException + +_version = werkzeug.__version__.split('.')[0] + +if _version in ('0', '1'): + class _HTTPException(HTTPException): + def __init__(self, code, body, headers, response=None): + super(_HTTPException, self).__init__(None, response) + self.code = code + + self.body = body + self.headers = headers + + def get_body(self, environ=None): + return self.body + + def get_headers(self, environ=None): + return self.headers +else: + class _HTTPException(HTTPException): + def __init__(self, code, body, headers, response=None): + super(_HTTPException, self).__init__(None, response) + self.code = code + + self.body = body + self.headers = headers + + def get_body(self, environ=None, scope=None): + return self.body + + def get_headers(self, environ=None, scope=None): + return self.headers + + +def raise_http_exception(status, body, headers): + raise _HTTPException(status, body, headers) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth2/resource_protector.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth2/resource_protector.py new file mode 100644 index 000000000..41535f35f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth2/resource_protector.py @@ -0,0 +1,123 @@ +import functools +from contextlib import contextmanager +from flask import json +from flask import request as _req +from flask import _app_ctx_stack +from werkzeug.local import LocalProxy +from authlib.oauth2 import ( + OAuth2Error, + ResourceProtector as _ResourceProtector +) +from authlib.oauth2.rfc6749 import ( + MissingAuthorizationError, + HttpRequest, +) +from .signals import token_authenticated +from .errors import raise_http_exception + + +class ResourceProtector(_ResourceProtector): + """A protecting method for resource servers. Creating a ``require_oauth`` + decorator easily with ResourceProtector:: + + from authlib.integrations.flask_oauth2 import ResourceProtector + + require_oauth = ResourceProtector() + + # add bearer token validator + from authlib.oauth2.rfc6750 import BearerTokenValidator + from project.models import Token + + class MyBearerTokenValidator(BearerTokenValidator): + def authenticate_token(self, token_string): + return Token.query.filter_by(access_token=token_string).first() + + def request_invalid(self, request): + return False + + def token_revoked(self, token): + return False + + require_oauth.register_token_validator(MyBearerTokenValidator()) + + # protect resource with require_oauth + + @app.route('/user') + @require_oauth('profile') + def user_profile(): + user = User.query.get(current_token.user_id) + return jsonify(user.to_dict()) + + """ + def raise_error_response(self, error): + """Raise HTTPException for OAuth2Error. Developers can re-implement + this method to customize the error response. + + :param error: OAuth2Error + :raise: HTTPException + """ + status = error.status_code + body = json.dumps(dict(error.get_body())) + headers = error.get_headers() + raise_http_exception(status, body, headers) + + def acquire_token(self, scope=None, operator='AND'): + """A method to acquire current valid token with the given scope. + + :param scope: string or list of scope values + :param operator: value of "AND" or "OR" + :return: token object + """ + request = HttpRequest( + _req.method, + _req.full_path, + _req.data, + _req.headers + ) + if not callable(operator): + operator = operator.upper() + token = self.validate_request(scope, request, operator) + token_authenticated.send(self, token=token) + ctx = _app_ctx_stack.top + ctx.authlib_server_oauth2_token = token + return token + + @contextmanager + def acquire(self, scope=None, operator='AND'): + """The with statement of ``require_oauth``. Instead of using a + decorator, you can use a with statement instead:: + + @app.route('/api/user') + def user_api(): + with require_oauth.acquire('profile') as token: + user = User.query.get(token.user_id) + return jsonify(user.to_dict()) + """ + try: + yield self.acquire_token(scope, operator) + except OAuth2Error as error: + self.raise_error_response(error) + + def __call__(self, scope=None, operator='AND', optional=False): + def wrapper(f): + @functools.wraps(f) + def decorated(*args, **kwargs): + try: + self.acquire_token(scope, operator) + except MissingAuthorizationError as error: + if optional: + return f(*args, **kwargs) + self.raise_error_response(error) + except OAuth2Error as error: + self.raise_error_response(error) + return f(*args, **kwargs) + return decorated + return wrapper + + +def _get_current_token(): + ctx = _app_ctx_stack.top + return getattr(ctx, 'authlib_server_oauth2_token', None) + + +current_token = LocalProxy(_get_current_token) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth2/signals.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth2/signals.py new file mode 100644 index 000000000..c61e0119d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/flask_oauth2/signals.py @@ -0,0 +1,12 @@ +from flask.signals import Namespace + +_signal = Namespace() + +#: signal when client is authenticated +client_authenticated = _signal.signal('client_authenticated') + +#: signal when token is revoked +token_revoked = _signal.signal('token_revoked') + +#: signal when token is authenticated +token_authenticated = _signal.signal('token_authenticated') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/httpx_client/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/httpx_client/__init__.py new file mode 100644 index 000000000..3b5437ccb --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/httpx_client/__init__.py @@ -0,0 +1,25 @@ +from authlib.oauth1 import ( + SIGNATURE_HMAC_SHA1, + SIGNATURE_RSA_SHA1, + SIGNATURE_PLAINTEXT, + SIGNATURE_TYPE_HEADER, + SIGNATURE_TYPE_QUERY, + SIGNATURE_TYPE_BODY, +) +from .oauth1_client import OAuth1Auth, AsyncOAuth1Client, OAuth1Client +from .oauth2_client import ( + OAuth2Auth, OAuth2Client, OAuth2ClientAuth, + AsyncOAuth2Client, +) +from .assertion_client import AssertionClient, AsyncAssertionClient +from ..base_client import OAuthError + + +__all__ = [ + 'OAuthError', + 'OAuth1Auth', 'AsyncOAuth1Client', + 'SIGNATURE_HMAC_SHA1', 'SIGNATURE_RSA_SHA1', 'SIGNATURE_PLAINTEXT', + 'SIGNATURE_TYPE_HEADER', 'SIGNATURE_TYPE_QUERY', 'SIGNATURE_TYPE_BODY', + 'OAuth2Auth', 'OAuth2ClientAuth', 'OAuth2Client', 'AsyncOAuth2Client', + 'AssertionClient', 'AsyncAssertionClient', +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/httpx_client/assertion_client.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/httpx_client/assertion_client.py new file mode 100644 index 000000000..c1ce570ff --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/httpx_client/assertion_client.py @@ -0,0 +1,89 @@ +from httpx import AsyncClient, Client, USE_CLIENT_DEFAULT +try: + from httpx._config import UNSET +except ImportError: + UNSET = None +from authlib.oauth2.rfc7521 import AssertionClient as _AssertionClient +from authlib.oauth2.rfc7523 import JWTBearerGrant +from authlib.oauth2 import OAuth2Error +from .utils import extract_client_kwargs +from .oauth2_client import OAuth2Auth + +__all__ = ['AsyncAssertionClient', 'AssertionClient'] + + +class AsyncAssertionClient(_AssertionClient, AsyncClient): + token_auth_class = OAuth2Auth + JWT_BEARER_GRANT_TYPE = JWTBearerGrant.GRANT_TYPE + ASSERTION_METHODS = { + JWT_BEARER_GRANT_TYPE: JWTBearerGrant.sign, + } + DEFAULT_GRANT_TYPE = JWT_BEARER_GRANT_TYPE + + def __init__(self, token_endpoint, issuer, subject, audience=None, grant_type=None, + claims=None, token_placement='header', scope=None, **kwargs): + + client_kwargs = extract_client_kwargs(kwargs) + AsyncClient.__init__(self, **client_kwargs) + + _AssertionClient.__init__( + self, session=None, + token_endpoint=token_endpoint, issuer=issuer, subject=subject, + audience=audience, grant_type=grant_type, claims=claims, + token_placement=token_placement, scope=scope, **kwargs + ) + + async def request(self, method, url, withhold_token=False, auth=None, **kwargs): + """Send request with auto refresh token feature.""" + if not withhold_token and auth is USE_CLIENT_DEFAULT: + if not self.token or self.token.is_expired(): + await self.refresh_token() + + auth = self.token_auth + return await super(AsyncAssertionClient, self).request( + method, url, auth=auth, **kwargs) + + async def _refresh_token(self, data): + resp = await self.request( + 'POST', self.token_endpoint, data=data, withhold_token=True) + + token = resp.json() + if 'error' in token: + raise OAuth2Error( + error=token['error'], + description=token.get('error_description') + ) + self.token = token + return self.token + + +class AssertionClient(_AssertionClient, Client): + token_auth_class = OAuth2Auth + JWT_BEARER_GRANT_TYPE = JWTBearerGrant.GRANT_TYPE + ASSERTION_METHODS = { + JWT_BEARER_GRANT_TYPE: JWTBearerGrant.sign, + } + DEFAULT_GRANT_TYPE = JWT_BEARER_GRANT_TYPE + + def __init__(self, token_endpoint, issuer, subject, audience=None, grant_type=None, + claims=None, token_placement='header', scope=None, **kwargs): + + client_kwargs = extract_client_kwargs(kwargs) + Client.__init__(self, **client_kwargs) + + _AssertionClient.__init__( + self, session=self, + token_endpoint=token_endpoint, issuer=issuer, subject=subject, + audience=audience, grant_type=grant_type, claims=claims, + token_placement=token_placement, scope=scope, **kwargs + ) + + def request(self, method, url, withhold_token=False, auth=None, **kwargs): + """Send request with auto refresh token feature.""" + if not withhold_token and auth is USE_CLIENT_DEFAULT: + if not self.token or self.token.is_expired(): + self.refresh_token() + + auth = self.token_auth + return super(AssertionClient, self).request( + method, url, auth=auth, **kwargs) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/httpx_client/oauth1_client.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/httpx_client/oauth1_client.py new file mode 100644 index 000000000..7aee4e5fd --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/httpx_client/oauth1_client.py @@ -0,0 +1,101 @@ +import typing +from httpx import AsyncClient, Auth, Client, Request, Response +from authlib.oauth1 import ( + SIGNATURE_HMAC_SHA1, + SIGNATURE_TYPE_HEADER, +) +from authlib.common.encoding import to_unicode +from authlib.oauth1 import ClientAuth +from authlib.oauth1.client import OAuth1Client as _OAuth1Client +from .utils import extract_client_kwargs +from ..base_client import OAuthError + + +class OAuth1Auth(Auth, ClientAuth): + """Signs the httpx request using OAuth 1 (RFC5849)""" + requires_request_body = True + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + url, headers, body = self.prepare( + request.method, str(request.url), request.headers, request.content) + headers['Content-Length'] = str(len(body)) + yield Request(method=request.method, url=url, headers=headers, data=body) + + +class AsyncOAuth1Client(_OAuth1Client, AsyncClient): + auth_class = OAuth1Auth + + def __init__(self, client_id, client_secret=None, + token=None, token_secret=None, + redirect_uri=None, rsa_key=None, verifier=None, + signature_method=SIGNATURE_HMAC_SHA1, + signature_type=SIGNATURE_TYPE_HEADER, + force_include_body=False, **kwargs): + + _client_kwargs = extract_client_kwargs(kwargs) + AsyncClient.__init__(self, **_client_kwargs) + + _OAuth1Client.__init__( + self, None, + client_id=client_id, client_secret=client_secret, + token=token, token_secret=token_secret, + redirect_uri=redirect_uri, rsa_key=rsa_key, verifier=verifier, + signature_method=signature_method, signature_type=signature_type, + force_include_body=force_include_body, **kwargs) + + async def fetch_access_token(self, url, verifier=None, **kwargs): + """Method for fetching an access token from the token endpoint. + + This is the final step in the OAuth 1 workflow. An access token is + obtained using all previously obtained credentials, including the + verifier from the authorization step. + + :param url: Access Token endpoint. + :param verifier: A verifier string to prove authorization was granted. + :param kwargs: Extra parameters to include for fetching access token. + :return: A token dict. + """ + if verifier: + self.auth.verifier = verifier + if not self.auth.verifier: + self.handle_error('missing_verifier', 'Missing "verifier" value') + token = await self._fetch_token(url, **kwargs) + self.auth.verifier = None + return token + + async def _fetch_token(self, url, **kwargs): + resp = await self.post(url, **kwargs) + text = await resp.aread() + token = self.parse_response_token(resp.status_code, to_unicode(text)) + self.token = token + return token + + @staticmethod + def handle_error(error_type, error_description): + raise OAuthError(error_type, error_description) + + +class OAuth1Client(_OAuth1Client, Client): + auth_class = OAuth1Auth + + def __init__(self, client_id, client_secret=None, + token=None, token_secret=None, + redirect_uri=None, rsa_key=None, verifier=None, + signature_method=SIGNATURE_HMAC_SHA1, + signature_type=SIGNATURE_TYPE_HEADER, + force_include_body=False, **kwargs): + + _client_kwargs = extract_client_kwargs(kwargs) + Client.__init__(self, **_client_kwargs) + + _OAuth1Client.__init__( + self, self, + client_id=client_id, client_secret=client_secret, + token=token, token_secret=token_secret, + redirect_uri=redirect_uri, rsa_key=rsa_key, verifier=verifier, + signature_method=signature_method, signature_type=signature_type, + force_include_body=force_include_body, **kwargs) + + @staticmethod + def handle_error(error_type, error_description): + raise OAuthError(error_type, error_description) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/httpx_client/oauth2_client.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/httpx_client/oauth2_client.py new file mode 100644 index 000000000..1d1175748 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/httpx_client/oauth2_client.py @@ -0,0 +1,212 @@ +import asyncio +import typing +from httpx import AsyncClient, Auth, Client, Request, Response, USE_CLIENT_DEFAULT +from authlib.common.urls import url_decode +from authlib.oauth2.client import OAuth2Client as _OAuth2Client +from authlib.oauth2.auth import ClientAuth, TokenAuth +from .utils import HTTPX_CLIENT_KWARGS +from ..base_client import ( + OAuthError, + InvalidTokenError, + MissingTokenError, + UnsupportedTokenTypeError, +) + +__all__ = [ + 'OAuth2Auth', 'OAuth2ClientAuth', + 'AsyncOAuth2Client', +] + + +class OAuth2Auth(Auth, TokenAuth): + """Sign requests for OAuth 2.0, currently only bearer token is supported.""" + requires_request_body = True + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + try: + url, headers, body = self.prepare( + str(request.url), request.headers, request.content) + headers['Content-Length'] = str(len(body)) + yield Request(method=request.method, url=url, headers=headers, data=body) + except KeyError as error: + description = 'Unsupported token_type: {}'.format(str(error)) + raise UnsupportedTokenTypeError(description=description) + + +class OAuth2ClientAuth(Auth, ClientAuth): + requires_request_body = True + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + url, headers, body = self.prepare( + request.method, str(request.url), request.headers, request.content) + headers['Content-Length'] = str(len(body)) + yield Request(method=request.method, url=url, headers=headers, data=body) + + +class AsyncOAuth2Client(_OAuth2Client, AsyncClient): + SESSION_REQUEST_PARAMS = HTTPX_CLIENT_KWARGS + + client_auth_class = OAuth2ClientAuth + token_auth_class = OAuth2Auth + + def __init__(self, client_id=None, client_secret=None, + token_endpoint_auth_method=None, + revocation_endpoint_auth_method=None, + scope=None, redirect_uri=None, + token=None, token_placement='header', + update_token=None, **kwargs): + + # extract httpx.Client kwargs + client_kwargs = self._extract_session_request_params(kwargs) + AsyncClient.__init__(self, **client_kwargs) + + # We use a "reverse" Event to synchronize coroutines to prevent + # multiple concurrent attempts to refresh the same token + self._token_refresh_event = asyncio.Event() + self._token_refresh_event.set() + + _OAuth2Client.__init__( + self, session=None, + client_id=client_id, client_secret=client_secret, + token_endpoint_auth_method=token_endpoint_auth_method, + revocation_endpoint_auth_method=revocation_endpoint_auth_method, + scope=scope, redirect_uri=redirect_uri, + token=token, token_placement=token_placement, + update_token=update_token, **kwargs + ) + + @staticmethod + def handle_error(error_type, error_description): + raise OAuthError(error_type, error_description) + + async def request(self, method, url, withhold_token=False, auth=USE_CLIENT_DEFAULT, **kwargs): + if not withhold_token and auth is USE_CLIENT_DEFAULT: + if not self.token: + raise MissingTokenError() + + if self.token.is_expired(): + await self.ensure_active_token() + + auth = self.token_auth + + return await super(AsyncOAuth2Client, self).request( + method, url, auth=auth, **kwargs) + + async def ensure_active_token(self): + if self._token_refresh_event.is_set(): + # Unset the event so other coroutines don't try to update the token + self._token_refresh_event.clear() + refresh_token = self.token.get('refresh_token') + url = self.metadata.get('token_endpoint') + if refresh_token and url: + await self.refresh_token(url, refresh_token=refresh_token) + elif self.metadata.get('grant_type') == 'client_credentials': + access_token = self.token['access_token'] + token = await self.fetch_token(url, grant_type='client_credentials') + if self.update_token: + await self.update_token(token, access_token=access_token) + else: + raise InvalidTokenError() + # Notify coroutines that token is refreshed + self._token_refresh_event.set() + return + await self._token_refresh_event.wait() # wait until the token is ready + + async def _fetch_token(self, url, body='', headers=None, auth=USE_CLIENT_DEFAULT, + method='POST', **kwargs): + if method.upper() == 'POST': + resp = await self.post( + url, data=dict(url_decode(body)), headers=headers, + auth=auth, **kwargs) + else: + if '?' in url: + url = '&'.join([url, body]) + else: + url = '?'.join([url, body]) + resp = await self.get(url, headers=headers, auth=auth, **kwargs) + + for hook in self.compliance_hook['access_token_response']: + resp = hook(resp) + + return self.parse_response_token(resp.json()) + + async def _refresh_token(self, url, refresh_token=None, body='', + headers=None, auth=USE_CLIENT_DEFAULT, **kwargs): + resp = await self.post( + url, data=dict(url_decode(body)), headers=headers, + auth=auth, **kwargs) + + for hook in self.compliance_hook['refresh_token_response']: + resp = hook(resp) + + token = self.parse_response_token(resp.json()) + if 'refresh_token' not in token: + self.token['refresh_token'] = refresh_token + + if self.update_token: + await self.update_token(self.token, refresh_token=refresh_token) + + return self.token + + def _http_post(self, url, body=None, auth=USE_CLIENT_DEFAULT, headers=None, **kwargs): + return self.post( + url, data=dict(url_decode(body)), + headers=headers, auth=auth, **kwargs) + + +class OAuth2Client(_OAuth2Client, Client): + SESSION_REQUEST_PARAMS = HTTPX_CLIENT_KWARGS + + client_auth_class = OAuth2ClientAuth + token_auth_class = OAuth2Auth + + def __init__(self, client_id=None, client_secret=None, + token_endpoint_auth_method=None, + revocation_endpoint_auth_method=None, + scope=None, redirect_uri=None, + token=None, token_placement='header', + update_token=None, **kwargs): + + # extract httpx.Client kwargs + client_kwargs = self._extract_session_request_params(kwargs) + Client.__init__(self, **client_kwargs) + + _OAuth2Client.__init__( + self, session=self, + client_id=client_id, client_secret=client_secret, + token_endpoint_auth_method=token_endpoint_auth_method, + revocation_endpoint_auth_method=revocation_endpoint_auth_method, + scope=scope, redirect_uri=redirect_uri, + token=token, token_placement=token_placement, + update_token=update_token, **kwargs + ) + + @staticmethod + def handle_error(error_type, error_description): + raise OAuthError(error_type, error_description) + + def request(self, method, url, withhold_token=False, auth=USE_CLIENT_DEFAULT, **kwargs): + if not withhold_token and auth is USE_CLIENT_DEFAULT: + if not self.token: + raise MissingTokenError() + + if self.token.is_expired(): + self.ensure_active_token() + + auth = self.token_auth + + return super(OAuth2Client, self).request( + method, url, auth=auth, **kwargs) + + def ensure_active_token(self): + refresh_token = self.token.get('refresh_token') + url = self.metadata.get('token_endpoint') + if refresh_token and url: + self.refresh_token(url, refresh_token=refresh_token) + elif self.metadata.get('grant_type') == 'client_credentials': + access_token = self.token['access_token'] + token = self.fetch_token(url, grant_type='client_credentials') + if self.update_token: + self.update_token(token, access_token=access_token) + else: + raise InvalidTokenError() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/httpx_client/utils.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/httpx_client/utils.py new file mode 100644 index 000000000..907aa6c8d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/httpx_client/utils.py @@ -0,0 +1,14 @@ +HTTPX_CLIENT_KWARGS = [ + 'headers', 'cookies', 'verify', 'cert', 'http_versions', + 'proxies', 'timeout', 'pool_limits', 'max_redirects', + 'base_url', 'dispatch', 'app', 'backend', 'trust_env', + 'json', +] + + +def extract_client_kwargs(kwargs): + client_kwargs = {} + for k in HTTPX_CLIENT_KWARGS: + if k in kwargs: + client_kwargs[k] = kwargs.pop(k) + return client_kwargs diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/requests_client/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/requests_client/__init__.py new file mode 100644 index 000000000..fcbdec320 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/requests_client/__init__.py @@ -0,0 +1,22 @@ +from .oauth1_session import OAuth1Session, OAuth1Auth +from .oauth2_session import OAuth2Session, OAuth2Auth +from .assertion_session import AssertionSession +from ..base_client import OAuthError +from authlib.oauth1 import ( + SIGNATURE_HMAC_SHA1, + SIGNATURE_RSA_SHA1, + SIGNATURE_PLAINTEXT, + SIGNATURE_TYPE_HEADER, + SIGNATURE_TYPE_QUERY, + SIGNATURE_TYPE_BODY, +) + + +__all__ = [ + 'OAuthError', + 'OAuth1Session', 'OAuth1Auth', + 'SIGNATURE_HMAC_SHA1', 'SIGNATURE_RSA_SHA1', 'SIGNATURE_PLAINTEXT', + 'SIGNATURE_TYPE_HEADER', 'SIGNATURE_TYPE_QUERY', 'SIGNATURE_TYPE_BODY', + 'OAuth2Session', 'OAuth2Auth', + 'AssertionSession', +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/requests_client/assertion_session.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/requests_client/assertion_session.py new file mode 100644 index 000000000..1b95ea2f4 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/requests_client/assertion_session.py @@ -0,0 +1,48 @@ +from requests import Session +from authlib.deprecate import deprecate +from authlib.oauth2.rfc7521 import AssertionClient +from authlib.oauth2.rfc7523 import JWTBearerGrant +from .oauth2_session import OAuth2Auth + + +class AssertionAuth(OAuth2Auth): + def ensure_active_token(self): + if not self.token or self.token.is_expired() and self.client: + return self.client.refresh_token() + + +class AssertionSession(AssertionClient, Session): + """Constructs a new Assertion Framework for OAuth 2.0 Authorization Grants + per RFC7521_. + + .. _RFC7521: https://tools.ietf.org/html/rfc7521 + """ + token_auth_class = AssertionAuth + JWT_BEARER_GRANT_TYPE = JWTBearerGrant.GRANT_TYPE + ASSERTION_METHODS = { + JWT_BEARER_GRANT_TYPE: JWTBearerGrant.sign, + } + DEFAULT_GRANT_TYPE = JWT_BEARER_GRANT_TYPE + + def __init__(self, token_endpoint, issuer, subject, audience=None, grant_type=None, + claims=None, token_placement='header', scope=None, **kwargs): + Session.__init__(self) + + token_url = kwargs.pop('token_url', None) + if token_url: + deprecate('Use "token_endpoint" instead of "token_url"', '1.0') + token_endpoint = token_url + + AssertionClient.__init__( + self, session=self, + token_endpoint=token_endpoint, issuer=issuer, subject=subject, + audience=audience, grant_type=grant_type, claims=claims, + token_placement=token_placement, scope=scope, **kwargs + ) + + def request(self, method, url, withhold_token=False, auth=None, **kwargs): + """Send request with auto refresh token feature.""" + if not withhold_token and auth is None: + auth = self.token_auth + return super(AssertionSession, self).request( + method, url, auth=auth, **kwargs) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/requests_client/oauth1_session.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/requests_client/oauth1_session.py new file mode 100644 index 000000000..26a12ac58 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/requests_client/oauth1_session.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +from requests import Session +from requests.auth import AuthBase +from authlib.oauth1 import ( + SIGNATURE_HMAC_SHA1, + SIGNATURE_TYPE_HEADER, +) +from authlib.common.encoding import to_native +from authlib.oauth1 import ClientAuth +from authlib.oauth1.client import OAuth1Client +from ..base_client import OAuthError + + +class OAuth1Auth(AuthBase, ClientAuth): + """Signs the request using OAuth 1 (RFC5849)""" + + def __call__(self, req): + url, headers, body = self.prepare( + req.method, req.url, req.headers, req.body) + + req.url = to_native(url) + req.prepare_headers(headers) + if body: + req.body = body + return req + + +class OAuth1Session(OAuth1Client, Session): + auth_class = OAuth1Auth + + def __init__(self, client_id, client_secret=None, + token=None, token_secret=None, + redirect_uri=None, rsa_key=None, verifier=None, + signature_method=SIGNATURE_HMAC_SHA1, + signature_type=SIGNATURE_TYPE_HEADER, + force_include_body=False, **kwargs): + Session.__init__(self) + OAuth1Client.__init__( + self, session=self, + client_id=client_id, client_secret=client_secret, + token=token, token_secret=token_secret, + redirect_uri=redirect_uri, rsa_key=rsa_key, verifier=verifier, + signature_method=signature_method, signature_type=signature_type, + force_include_body=force_include_body, **kwargs) + + def rebuild_auth(self, prepared_request, response): + """When being redirected we should always strip Authorization + header, since nonce may not be reused as per OAuth spec. + """ + if 'Authorization' in prepared_request.headers: + # If we get redirected to a new host, we should strip out + # any authentication headers. + prepared_request.headers.pop('Authorization', True) + prepared_request.prepare_auth(self.auth) + + @staticmethod + def handle_error(error_type, error_description): + raise OAuthError(error_type, error_description) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/requests_client/oauth2_session.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/requests_client/oauth2_session.py new file mode 100644 index 000000000..835487d20 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/requests_client/oauth2_session.py @@ -0,0 +1,117 @@ +from requests import Session +from requests.auth import AuthBase +from authlib.oauth2.client import OAuth2Client +from authlib.oauth2.auth import ClientAuth, TokenAuth +from ..base_client import ( + OAuthError, + InvalidTokenError, + MissingTokenError, + UnsupportedTokenTypeError, +) + +__all__ = ['OAuth2Session', 'OAuth2Auth'] + + +class OAuth2Auth(AuthBase, TokenAuth): + """Sign requests for OAuth 2.0, currently only bearer token is supported.""" + + def ensure_active_token(self): + if self.client and self.token.is_expired(): + refresh_token = self.token.get('refresh_token') + client = self.client + url = client.metadata.get('token_endpoint') + if refresh_token and url: + client.refresh_token(url, refresh_token=refresh_token) + elif client.metadata.get('grant_type') == 'client_credentials': + access_token = self.token['access_token'] + token = client.fetch_token(url, grant_type='client_credentials') + if client.update_token: + client.update_token(token, access_token=access_token) + else: + raise InvalidTokenError() + + def __call__(self, req): + self.ensure_active_token() + try: + req.url, req.headers, req.body = self.prepare( + req.url, req.headers, req.body) + except KeyError as error: + description = 'Unsupported token_type: {}'.format(str(error)) + raise UnsupportedTokenTypeError(description=description) + return req + + +class OAuth2ClientAuth(AuthBase, ClientAuth): + """Attaches OAuth Client Authentication to the given Request object. + """ + def __call__(self, req): + req.url, req.headers, req.body = self.prepare( + req.method, req.url, req.headers, req.body + ) + return req + + +class OAuth2Session(OAuth2Client, Session): + """Construct a new OAuth 2 client requests session. + + :param client_id: Client ID, which you get from client registration. + :param client_secret: Client Secret, which you get from registration. + :param authorization_endpoint: URL of the authorization server's + authorization endpoint. + :param token_endpoint: URL of the authorization server's token endpoint. + :param token_endpoint_auth_method: client authentication method for + token endpoint. + :param revocation_endpoint: URL of the authorization server's OAuth 2.0 + revocation endpoint. + :param revocation_endpoint_auth_method: client authentication method for + revocation endpoint. + :param scope: Scope that you needed to access user resources. + :param redirect_uri: Redirect URI you registered as callback. + :param token: A dict of token attributes such as ``access_token``, + ``token_type`` and ``expires_at``. + :param token_placement: The place to put token in HTTP request. Available + values: "header", "body", "uri". + :param update_token: A function for you to update token. It accept a + :class:`OAuth2Token` as parameter. + """ + client_auth_class = OAuth2ClientAuth + token_auth_class = OAuth2Auth + SESSION_REQUEST_PARAMS = ( + 'allow_redirects', 'timeout', 'cookies', 'files', + 'proxies', 'hooks', 'stream', 'verify', 'cert', 'json' + ) + + def __init__(self, client_id=None, client_secret=None, + token_endpoint_auth_method=None, + revocation_endpoint_auth_method=None, + scope=None, redirect_uri=None, + token=None, token_placement='header', + update_token=None, **kwargs): + + Session.__init__(self) + OAuth2Client.__init__( + self, session=self, + client_id=client_id, client_secret=client_secret, + token_endpoint_auth_method=token_endpoint_auth_method, + revocation_endpoint_auth_method=revocation_endpoint_auth_method, + scope=scope, redirect_uri=redirect_uri, + token=token, token_placement=token_placement, + update_token=update_token, **kwargs + ) + + def fetch_access_token(self, url=None, **kwargs): + """Alias for fetch_token.""" + return self.fetch_token(url, **kwargs) + + def request(self, method, url, withhold_token=False, auth=None, **kwargs): + """Send request with auto refresh token feature (if available).""" + if not withhold_token and auth is None: + if not self.token: + raise MissingTokenError() + auth = self.token_auth + return super(OAuth2Session, self).request( + method, url, auth=auth, **kwargs) + + @staticmethod + def handle_error(error_type, error_description): + raise OAuthError(error_type, error_description) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/sqla_oauth1/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/sqla_oauth1/__init__.py new file mode 100644 index 000000000..75f7730bd --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/sqla_oauth1/__init__.py @@ -0,0 +1,17 @@ +# flake8: noqa + +from .mixins import ( + OAuth1ClientMixin, + OAuth1TemporaryCredentialMixin, + OAuth1TimestampNonceMixin, + OAuth1TokenCredentialMixin, +) +from .functions import ( + create_query_client_func, + create_query_token_func, + register_temporary_credential_hooks, + create_exists_nonce_func, + register_nonce_hooks, + register_token_credential_hooks, + register_authorization_hooks, +) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/sqla_oauth1/functions.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/sqla_oauth1/functions.py new file mode 100644 index 000000000..31bb48e87 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/sqla_oauth1/functions.py @@ -0,0 +1,154 @@ +def create_query_client_func(session, model_class): + """Create an ``query_client`` function that can be used in authorization + server and resource protector. + + :param session: SQLAlchemy session + :param model_class: Client class + """ + def query_client(client_id): + q = session.query(model_class) + return q.filter_by(client_id=client_id).first() + return query_client + + +def create_query_token_func(session, model_class): + """Create an ``query_token`` function that can be used in + resource protector. + + :param session: SQLAlchemy session + :param model_class: TokenCredential class + """ + def query_token(client_id, oauth_token): + q = session.query(model_class) + return q.filter_by( + client_id=client_id, oauth_token=oauth_token).first() + return query_token + + +def register_temporary_credential_hooks( + authorization_server, session, model_class): + """Register temporary credential related hooks to authorization server. + + :param authorization_server: AuthorizationServer instance + :param session: SQLAlchemy session + :param model_class: TemporaryCredential class + """ + + def create_temporary_credential(token, client_id, redirect_uri): + item = model_class( + client_id=client_id, + oauth_token=token['oauth_token'], + oauth_token_secret=token['oauth_token_secret'], + oauth_callback=redirect_uri, + ) + session.add(item) + session.commit() + return item + + def get_temporary_credential(oauth_token): + q = session.query(model_class).filter_by(oauth_token=oauth_token) + return q.first() + + def delete_temporary_credential(oauth_token): + q = session.query(model_class).filter_by(oauth_token=oauth_token) + q.delete(synchronize_session=False) + session.commit() + + def create_authorization_verifier(credential, grant_user, verifier): + credential.set_user_id(grant_user.get_user_id()) + credential.oauth_verifier = verifier + session.add(credential) + session.commit() + return credential + + authorization_server.register_hook( + 'create_temporary_credential', create_temporary_credential) + authorization_server.register_hook( + 'get_temporary_credential', get_temporary_credential) + authorization_server.register_hook( + 'delete_temporary_credential', delete_temporary_credential) + authorization_server.register_hook( + 'create_authorization_verifier', create_authorization_verifier) + + +def create_exists_nonce_func(session, model_class): + """Create an ``exists_nonce`` function that can be used in hooks and + resource protector. + + :param session: SQLAlchemy session + :param model_class: TimestampNonce class + """ + def exists_nonce(nonce, timestamp, client_id, oauth_token): + q = session.query(model_class.nonce).filter_by( + nonce=nonce, + timestamp=timestamp, + client_id=client_id, + ) + if oauth_token: + q = q.filter_by(oauth_token=oauth_token) + rv = q.first() + if rv: + return True + + item = model_class( + nonce=nonce, + timestamp=timestamp, + client_id=client_id, + oauth_token=oauth_token, + ) + session.add(item) + session.commit() + return False + return exists_nonce + + +def register_nonce_hooks(authorization_server, session, model_class): + """Register nonce related hooks to authorization server. + + :param authorization_server: AuthorizationServer instance + :param session: SQLAlchemy session + :param model_class: TimestampNonce class + """ + exists_nonce = create_exists_nonce_func(session, model_class) + authorization_server.register_hook('exists_nonce', exists_nonce) + + +def register_token_credential_hooks( + authorization_server, session, model_class): + """Register token credential related hooks to authorization server. + + :param authorization_server: AuthorizationServer instance + :param session: SQLAlchemy session + :param model_class: TokenCredential class + """ + def create_token_credential(token, temporary_credential): + item = model_class( + oauth_token=token['oauth_token'], + oauth_token_secret=token['oauth_token_secret'], + client_id=temporary_credential.get_client_id() + ) + item.set_user_id(temporary_credential.get_user_id()) + session.add(item) + session.commit() + return item + + authorization_server.register_hook( + 'create_token_credential', create_token_credential) + + +def register_authorization_hooks( + authorization_server, session, + token_credential_model, + temporary_credential_model=None, + timestamp_nonce_model=None): + + register_token_credential_hooks( + authorization_server, session, token_credential_model) + + if temporary_credential_model is not None: + register_temporary_credential_hooks( + authorization_server, session, temporary_credential_model) + + if timestamp_nonce_model is not None: + register_nonce_hooks( + authorization_server, session, timestamp_nonce_model) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/sqla_oauth1/mixins.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/sqla_oauth1/mixins.py new file mode 100644 index 000000000..a72dd012e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/sqla_oauth1/mixins.py @@ -0,0 +1,97 @@ +from sqlalchemy import Column, UniqueConstraint +from sqlalchemy import String, Integer, Text +from authlib.oauth1 import ( + ClientMixin, + TemporaryCredentialMixin, + TokenCredentialMixin, +) + + +class OAuth1ClientMixin(ClientMixin): + client_id = Column(String(48), index=True) + client_secret = Column(String(120), nullable=False) + default_redirect_uri = Column(Text, nullable=False, default='') + + def get_default_redirect_uri(self): + return self.default_redirect_uri + + def get_client_secret(self): + return self.client_secret + + def get_rsa_public_key(self): + return None + + +class OAuth1TemporaryCredentialMixin(TemporaryCredentialMixin): + client_id = Column(String(48), index=True) + oauth_token = Column(String(84), unique=True, index=True) + oauth_token_secret = Column(String(84)) + oauth_verifier = Column(String(84)) + oauth_callback = Column(Text, default='') + + def get_user_id(self): + """A method to get the grant user information of this temporary + credential. For instance, grant user is stored in database on + ``user_id`` column:: + + def get_user_id(self): + return self.user_id + + :return: User ID + """ + if hasattr(self, 'user_id'): + return self.user_id + else: + raise NotImplementedError() + + def set_user_id(self, user_id): + if hasattr(self, 'user_id'): + setattr(self, 'user_id', user_id) + else: + raise NotImplementedError() + + def get_client_id(self): + return self.client_id + + def get_redirect_uri(self): + return self.oauth_callback + + def check_verifier(self, verifier): + return self.oauth_verifier == verifier + + def get_oauth_token(self): + return self.oauth_token + + def get_oauth_token_secret(self): + return self.oauth_token_secret + + +class OAuth1TimestampNonceMixin(object): + __table_args__ = ( + UniqueConstraint( + 'client_id', 'timestamp', 'nonce', 'oauth_token', + name='unique_nonce' + ), + ) + client_id = Column(String(48), nullable=False) + timestamp = Column(Integer, nullable=False) + nonce = Column(String(48), nullable=False) + oauth_token = Column(String(84)) + + +class OAuth1TokenCredentialMixin(TokenCredentialMixin): + client_id = Column(String(48), index=True) + oauth_token = Column(String(84), unique=True, index=True) + oauth_token_secret = Column(String(84)) + + def set_user_id(self, user_id): + if hasattr(self, 'user_id'): + setattr(self, 'user_id', user_id) + else: + raise NotImplementedError() + + def get_oauth_token(self): + return self.oauth_token + + def get_oauth_token_secret(self): + return self.oauth_token_secret diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/sqla_oauth2/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/sqla_oauth2/__init__.py new file mode 100644 index 000000000..1964aa1a5 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/sqla_oauth2/__init__.py @@ -0,0 +1,17 @@ +from .client_mixin import OAuth2ClientMixin +from .tokens_mixins import OAuth2AuthorizationCodeMixin, OAuth2TokenMixin +from .functions import ( + create_query_client_func, + create_save_token_func, + create_query_token_func, + create_revocation_endpoint, + create_bearer_token_validator, +) + + +__all__ = [ + 'OAuth2ClientMixin', 'OAuth2AuthorizationCodeMixin', 'OAuth2TokenMixin', + 'create_query_client_func', 'create_save_token_func', + 'create_query_token_func', 'create_revocation_endpoint', + 'create_bearer_token_validator', +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/sqla_oauth2/client_mixin.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/sqla_oauth2/client_mixin.py new file mode 100644 index 000000000..b88b4ad8c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/sqla_oauth2/client_mixin.py @@ -0,0 +1,134 @@ +from sqlalchemy import Column, String, Text, Integer +from authlib.common.encoding import json_loads, json_dumps +from authlib.oauth2.rfc6749 import ClientMixin +from authlib.oauth2.rfc6749.util import scope_to_list, list_to_scope + + +class OAuth2ClientMixin(ClientMixin): + client_id = Column(String(48), index=True) + client_secret = Column(String(120)) + client_id_issued_at = Column(Integer, nullable=False, default=0) + client_secret_expires_at = Column(Integer, nullable=False, default=0) + _client_metadata = Column('client_metadata', Text) + + @property + def client_info(self): + """Implementation for Client Info in OAuth 2.0 Dynamic Client + Registration Protocol via `Section 3.2.1`_. + + .. _`Section 3.2.1`: https://tools.ietf.org/html/rfc7591#section-3.2.1 + """ + return dict( + client_id=self.client_id, + client_secret=self.client_secret, + client_id_issued_at=self.client_id_issued_at, + client_secret_expires_at=self.client_secret_expires_at, + ) + + @property + def client_metadata(self): + if 'client_metadata' in self.__dict__: + return self.__dict__['client_metadata'] + if self._client_metadata: + data = json_loads(self._client_metadata) + self.__dict__['client_metadata'] = data + return data + return {} + + def set_client_metadata(self, value): + self._client_metadata = json_dumps(value) + + @property + def redirect_uris(self): + return self.client_metadata.get('redirect_uris', []) + + @property + def token_endpoint_auth_method(self): + return self.client_metadata.get( + 'token_endpoint_auth_method', + 'client_secret_basic' + ) + + @property + def grant_types(self): + return self.client_metadata.get('grant_types', []) + + @property + def response_types(self): + return self.client_metadata.get('response_types', []) + + @property + def client_name(self): + return self.client_metadata.get('client_name') + + @property + def client_uri(self): + return self.client_metadata.get('client_uri') + + @property + def logo_uri(self): + return self.client_metadata.get('logo_uri') + + @property + def scope(self): + return self.client_metadata.get('scope', '') + + @property + def contacts(self): + return self.client_metadata.get('contacts', []) + + @property + def tos_uri(self): + return self.client_metadata.get('tos_uri') + + @property + def policy_uri(self): + return self.client_metadata.get('policy_uri') + + @property + def jwks_uri(self): + return self.client_metadata.get('jwks_uri') + + @property + def jwks(self): + return self.client_metadata.get('jwks', []) + + @property + def software_id(self): + return self.client_metadata.get('software_id') + + @property + def software_version(self): + return self.client_metadata.get('software_version') + + def get_client_id(self): + return self.client_id + + def get_default_redirect_uri(self): + if self.redirect_uris: + return self.redirect_uris[0] + + def get_allowed_scope(self, scope): + if not scope: + return '' + allowed = set(self.scope.split()) + scopes = scope_to_list(scope) + return list_to_scope([s for s in scopes if s in allowed]) + + def check_redirect_uri(self, redirect_uri): + return redirect_uri in self.redirect_uris + + def has_client_secret(self): + return bool(self.client_secret) + + def check_client_secret(self, client_secret): + return self.client_secret == client_secret + + def check_token_endpoint_auth_method(self, method): + return self.token_endpoint_auth_method == method + + def check_response_type(self, response_type): + return response_type in self.response_types + + def check_grant_type(self, grant_type): + return grant_type in self.grant_types diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/sqla_oauth2/functions.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/sqla_oauth2/functions.py new file mode 100644 index 000000000..f79337bfa --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/sqla_oauth2/functions.py @@ -0,0 +1,101 @@ +def create_query_client_func(session, client_model): + """Create an ``query_client`` function that can be used in authorization + server. + + :param session: SQLAlchemy session + :param client_model: Client model class + """ + def query_client(client_id): + q = session.query(client_model) + return q.filter_by(client_id=client_id).first() + return query_client + + +def create_save_token_func(session, token_model): + """Create an ``save_token`` function that can be used in authorization + server. + + :param session: SQLAlchemy session + :param token_model: Token model class + """ + def save_token(token, request): + if request.user: + user_id = request.user.get_user_id() + else: + user_id = None + client = request.client + item = token_model( + client_id=client.client_id, + user_id=user_id, + **token + ) + session.add(item) + session.commit() + return save_token + + +def create_query_token_func(session, token_model): + """Create an ``query_token`` function for revocation, introspection + token endpoints. + + :param session: SQLAlchemy session + :param token_model: Token model class + """ + def query_token(token, token_type_hint, client): + q = session.query(token_model) + q = q.filter_by(client_id=client.client_id, revoked=False) + if token_type_hint == 'access_token': + return q.filter_by(access_token=token).first() + elif token_type_hint == 'refresh_token': + return q.filter_by(refresh_token=token).first() + # without token_type_hint + item = q.filter_by(access_token=token).first() + if item: + return item + return q.filter_by(refresh_token=token).first() + return query_token + + +def create_revocation_endpoint(session, token_model): + """Create a revocation endpoint class with SQLAlchemy session + and token model. + + :param session: SQLAlchemy session + :param token_model: Token model class + """ + from authlib.oauth2.rfc7009 import RevocationEndpoint + query_token = create_query_token_func(session, token_model) + + class _RevocationEndpoint(RevocationEndpoint): + def query_token(self, token, token_type_hint, client): + return query_token(token, token_type_hint, client) + + def revoke_token(self, token): + token.revoked = True + session.add(token) + session.commit() + + return _RevocationEndpoint + + +def create_bearer_token_validator(session, token_model): + """Create an bearer token validator class with SQLAlchemy session + and token model. + + :param session: SQLAlchemy session + :param token_model: Token model class + """ + from authlib.oauth2.rfc6750 import BearerTokenValidator + + class _BearerTokenValidator(BearerTokenValidator): + def authenticate_token(self, token_string): + q = session.query(token_model) + return q.filter_by(access_token=token_string).first() + + def request_invalid(self, request): + return False + + def token_revoked(self, token): + return token.revoked + + return _BearerTokenValidator diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/sqla_oauth2/tokens_mixins.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/sqla_oauth2/tokens_mixins.py new file mode 100644 index 000000000..fcd28e26b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/sqla_oauth2/tokens_mixins.py @@ -0,0 +1,62 @@ +import time +from sqlalchemy import Column, String, Boolean, Text, Integer +from authlib.oauth2.rfc6749 import ( + TokenMixin, + AuthorizationCodeMixin, +) + + +class OAuth2AuthorizationCodeMixin(AuthorizationCodeMixin): + code = Column(String(120), unique=True, nullable=False) + client_id = Column(String(48)) + redirect_uri = Column(Text, default='') + response_type = Column(Text, default='') + scope = Column(Text, default='') + nonce = Column(Text) + auth_time = Column( + Integer, nullable=False, + default=lambda: int(time.time()) + ) + + code_challenge = Column(Text) + code_challenge_method = Column(String(48)) + + def is_expired(self): + return self.auth_time + 300 < time.time() + + def get_redirect_uri(self): + return self.redirect_uri + + def get_scope(self): + return self.scope + + def get_auth_time(self): + return self.auth_time + + def get_nonce(self): + return self.nonce + + +class OAuth2TokenMixin(TokenMixin): + client_id = Column(String(48)) + token_type = Column(String(40)) + access_token = Column(String(255), unique=True, nullable=False) + refresh_token = Column(String(255), index=True) + scope = Column(Text, default='') + revoked = Column(Boolean, default=False) + issued_at = Column( + Integer, nullable=False, default=lambda: int(time.time()) + ) + expires_in = Column(Integer, nullable=False, default=0) + + def get_client_id(self): + return self.client_id + + def get_scope(self): + return self.scope + + def get_expires_in(self): + return self.expires_in + + def get_expires_at(self): + return self.issued_at + self.expires_in diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/starlette_client/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/starlette_client/__init__.py new file mode 100644 index 000000000..c4dbe9fc6 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/starlette_client/__init__.py @@ -0,0 +1,20 @@ +# flake8: noqa + +from ..base_client import BaseOAuth, OAuthError +from .integration import StartletteIntegration, StarletteRemoteApp + + +class OAuth(BaseOAuth): + framework_client_cls = StarletteRemoteApp + framework_integration_cls = StartletteIntegration + + def __init__(self, config=None, cache=None, fetch_token=None, update_token=None): + super(OAuth, self).__init__(fetch_token, update_token) + self.cache = cache + self.config = config + + +__all__ = [ + 'OAuth', 'StartletteIntegration', 'StarletteRemoteApp', + 'OAuthError', +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/starlette_client/integration.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/starlette_client/integration.py new file mode 100644 index 000000000..ef2ff47a9 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/integrations/starlette_client/integration.py @@ -0,0 +1,72 @@ +from starlette.responses import RedirectResponse +from ..httpx_client import AsyncOAuth1Client, AsyncOAuth2Client +from ..base_client import FrameworkIntegration, OAuthError +from ..base_client.async_app import AsyncRemoteApp + + +class StartletteIntegration(FrameworkIntegration): + oauth1_client_cls = AsyncOAuth1Client + oauth2_client_cls = AsyncOAuth2Client + + def update_token(self, token, refresh_token=None, access_token=None): + pass + + def generate_access_token_params(self, request_token_url, request): + if request_token_url: + return dict(request.query_params) + + error = request.query_params.get('error') + if error: + description = request.query_params.get('error_description') + raise OAuthError(error=error, description=description) + + return { + 'code': request.query_params.get('code'), + 'state': request.query_params.get('state'), + } + + @staticmethod + def load_config(oauth, name, params): + if not oauth.config: + return {} + + rv = {} + for k in params: + conf_key = '{}_{}'.format(name, k).upper() + v = oauth.config.get(conf_key, default=None) + if v is not None: + rv[k] = v + return rv + + +class StarletteRemoteApp(AsyncRemoteApp): + + async def authorize_redirect(self, request, redirect_uri=None, **kwargs): + """Create a HTTP Redirect for Authorization Endpoint. + + :param request: Starlette Request instance. + :param redirect_uri: Callback or redirect URI for authorization. + :param kwargs: Extra parameters to include. + :return: Starlette ``RedirectResponse`` instance. + """ + rv = await self.create_authorization_url(redirect_uri, **kwargs) + self.save_authorize_data(request, redirect_uri=redirect_uri, **rv) + return RedirectResponse(rv['url'], status_code=302) + + async def authorize_access_token(self, request, **kwargs): + """Fetch an access token. + + :param request: Starlette Request instance. + :return: A token dict. + """ + params = self.retrieve_access_token_params(request) + params.update(kwargs) + return await self.fetch_access_token(**params) + + async def parse_id_token(self, request, token, claims_options=None): + """Return an instance of UserInfo from token's ``id_token``.""" + if 'id_token' not in token: + return None + + nonce = self.framework.get_session_data(request, 'nonce') + return await self._parse_id_token(token, nonce, claims_options) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/__init__.py new file mode 100644 index 000000000..c023ae2e8 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/__init__.py @@ -0,0 +1,74 @@ +""" + authlib.jose + ~~~~~~~~~~~~ + + JOSE implementation in Authlib. Tracking the status of JOSE specs at + https://tools.ietf.org/wg/jose/ +""" +from .rfc7515 import ( + JsonWebSignature, JWSAlgorithm, JWSHeader, JWSObject, +) +from .rfc7516 import ( + JsonWebEncryption, JWEAlgorithm, JWEEncAlgorithm, JWEZipAlgorithm, +) +from .rfc7517 import Key, KeySet, JsonWebKey +from .rfc7518 import ( + register_jws_rfc7518, + register_jwe_rfc7518, + ECDHAlgorithm, + OctKey, + RSAKey, + ECKey, +) +from .rfc7519 import JsonWebToken, BaseClaims, JWTClaims +from .rfc8037 import OKPKey, register_jws_rfc8037 +from .drafts import register_jwe_draft + +from .errors import JoseError + +# register algorithms +register_jws_rfc7518() +register_jwe_rfc7518() +register_jws_rfc8037() +register_jwe_draft() + +# attach algorithms +ECDHAlgorithm.ALLOWED_KEY_CLS = (ECKey, OKPKey) + +# register supported keys +JsonWebKey.JWK_KEY_CLS = { + OctKey.kty: OctKey, + RSAKey.kty: RSAKey, + ECKey.kty: ECKey, + OKPKey.kty: OKPKey, +} + +# compatible constants +JWS_ALGORITHMS = list(JsonWebSignature.ALGORITHMS_REGISTRY.keys()) +JWE_ALG_ALGORITHMS = list(JsonWebEncryption.ALG_REGISTRY.keys()) +JWE_ENC_ALGORITHMS = list(JsonWebEncryption.ENC_REGISTRY.keys()) +JWE_ZIP_ALGORITHMS = list(JsonWebEncryption.ZIP_REGISTRY.keys()) +JWE_ALGORITHMS = JWE_ALG_ALGORITHMS + JWE_ENC_ALGORITHMS + JWE_ZIP_ALGORITHMS + +# compatible imports +JWS = JsonWebSignature +JWE = JsonWebEncryption +JWK = JsonWebKey +JWT = JsonWebToken + +jwt = JsonWebToken() + + +__all__ = [ + 'JoseError', + + 'JWS', 'JsonWebSignature', 'JWSAlgorithm', 'JWSHeader', 'JWSObject', + 'JWE', 'JsonWebEncryption', 'JWEAlgorithm', 'JWEEncAlgorithm', 'JWEZipAlgorithm', + + 'JWK', 'JsonWebKey', 'Key', 'KeySet', + + 'OctKey', 'RSAKey', 'ECKey', 'OKPKey', + + 'JWT', 'JsonWebToken', 'BaseClaims', 'JWTClaims', + 'jwt', +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/drafts/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/drafts/__init__.py new file mode 100644 index 000000000..b16013874 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/drafts/__init__.py @@ -0,0 +1,3 @@ +from ._jwe_enc_cryptography import register_jwe_draft + +__all__ = ['register_jwe_draft'] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/drafts/_jwe_enc_cryptography.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/drafts/_jwe_enc_cryptography.py new file mode 100644 index 000000000..806eab93e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/drafts/_jwe_enc_cryptography.py @@ -0,0 +1,54 @@ +""" + authlib.jose.draft + ~~~~~~~~~~~~~~~~~~~~ + + Content Encryption per `Section 4`_. + + .. _`Section 4`: https://tools.ietf.org/html/draft-amringer-jose-chacha-02#section-4 +""" +from cryptography.hazmat.primitives.ciphers.aead import ChaCha20Poly1305 +from authlib.jose.rfc7516 import JWEEncAlgorithm, JsonWebEncryption + + +class C20PEncAlgorithm(JWEEncAlgorithm): + # Use of an IV of size 96 bits is REQUIRED with this algorithm. + # https://tools.ietf.org/html/draft-amringer-jose-chacha-02#section-2.2.1 + IV_SIZE = 96 + + def __init__(self, key_size): + self.name = 'C20P' + self.description = 'ChaCha20-Poly1305' + self.key_size = key_size + self.CEK_SIZE = key_size + + def encrypt(self, msg, aad, iv, key): + """Key Encryption with AES GCM + + :param msg: text to be encrypt in bytes + :param aad: additional authenticated data in bytes + :param iv: initialization vector in bytes + :param key: encrypted key in bytes + :return: (ciphertext, tag) + """ + self.check_iv(iv) + chacha = ChaCha20Poly1305(key) + ciphertext = chacha.encrypt(iv, msg, aad) + return ciphertext[:-16], ciphertext[-16:] + + def decrypt(self, ciphertext, aad, iv, tag, key): + """Key Decryption with AES GCM + + :param ciphertext: ciphertext in bytes + :param aad: additional authenticated data in bytes + :param iv: initialization vector in bytes + :param tag: authentication tag in bytes + :param key: encrypted key in bytes + :return: message + """ + self.check_iv(iv) + chacha = ChaCha20Poly1305(key) + return chacha.decrypt(iv, ciphertext + tag, aad) + + +def register_jwe_draft(): + JsonWebEncryption.register_algorithm(C20PEncAlgorithm(256)) # C20P diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/errors.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/errors.py new file mode 100644 index 000000000..2174b42e9 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/errors.py @@ -0,0 +1,88 @@ +from authlib.common.errors import AuthlibBaseError + + +class JoseError(AuthlibBaseError): + pass + + +class DecodeError(JoseError): + error = 'decode_error' + + +class MissingAlgorithmError(JoseError): + error = 'missing_algorithm' + + +class UnsupportedAlgorithmError(JoseError): + error = 'unsupported_algorithm' + + +class BadSignatureError(JoseError): + error = 'bad_signature' + + def __init__(self, result): + super(BadSignatureError, self).__init__() + self.result = result + + +class InvalidHeaderParameterName(JoseError): + error = 'invalid_header_parameter_name' + + def __init__(self, name): + description = 'Invalid Header Parameter Names: {}'.format(name) + super(InvalidHeaderParameterName, self).__init__( + description=description) + + +class MissingEncryptionAlgorithmError(JoseError): + error = 'missing_encryption_algorithm' + description = 'Missing "enc" in header' + + +class UnsupportedEncryptionAlgorithmError(JoseError): + error = 'unsupported_encryption_algorithm' + description = 'Unsupported "enc" value in header' + + +class UnsupportedCompressionAlgorithmError(JoseError): + error = 'unsupported_compression_algorithm' + description = 'Unsupported "zip" value in header' + + +class InvalidUseError(JoseError): + error = 'invalid_use' + description = 'Key "use" is not valid for your usage' + + +class InvalidClaimError(JoseError): + error = 'invalid_claim' + + def __init__(self, claim): + description = 'Invalid claim "{}"'.format(claim) + super(InvalidClaimError, self).__init__(description=description) + + +class MissingClaimError(JoseError): + error = 'missing_claim' + + def __init__(self, claim): + description = 'Missing "{}" claim'.format(claim) + super(MissingClaimError, self).__init__(description=description) + + +class InsecureClaimError(JoseError): + error = 'insecure_claim' + + def __init__(self, claim): + description = 'Insecure claim "{}"'.format(claim) + super(InsecureClaimError, self).__init__(description=description) + + +class ExpiredTokenError(JoseError): + error = 'expired_token' + description = 'The token is expired' + + +class InvalidTokenError(JoseError): + error = 'invalid_token' + description = 'The token is not valid yet' diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/jwk.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/jwk.py new file mode 100644 index 000000000..02dbbabe2 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/jwk.py @@ -0,0 +1,19 @@ +from .rfc7517 import JsonWebKey + + +def loads(obj, kid=None): + # TODO: deprecate + key_set = JsonWebKey.import_key_set(obj) + if key_set: + return key_set.find_by_kid(kid) + return JsonWebKey.import_key(obj) + + +def dumps(key, kty=None, **params): + # TODO: deprecate + if kty: + params['kty'] = kty + + key = JsonWebKey.import_key(key, params) + data = key.as_dict() + return data diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7515/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7515/__init__.py new file mode 100644 index 000000000..5f8e0f5f5 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7515/__init__.py @@ -0,0 +1,18 @@ +""" + authlib.jose.rfc7515 + ~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + JSON Web Signature (JWS). + + https://tools.ietf.org/html/rfc7515 +""" + +from .jws import JsonWebSignature +from .models import JWSAlgorithm, JWSHeader, JWSObject + + +__all__ = [ + 'JsonWebSignature', + 'JWSAlgorithm', 'JWSHeader', 'JWSObject' +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7515/jws.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7515/jws.py new file mode 100644 index 000000000..20920559b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7515/jws.py @@ -0,0 +1,318 @@ +from authlib.common.encoding import ( + to_bytes, + to_unicode, + urlsafe_b64encode, + json_b64encode, + json_loads, +) +from authlib.jose.util import ( + extract_header, + extract_segment, +) +from authlib.jose.errors import ( + DecodeError, + MissingAlgorithmError, + UnsupportedAlgorithmError, + BadSignatureError, + InvalidHeaderParameterName, +) +from .models import JWSHeader, JWSObject + + +class JsonWebSignature(object): + + #: Registered Header Parameter Names defined by Section 4.1 + REGISTERED_HEADER_PARAMETER_NAMES = frozenset([ + 'alg', 'jku', 'jwk', 'kid', + 'x5u', 'x5c', 'x5t', 'x5t#S256', + 'typ', 'cty', 'crit' + ]) + + #: Defined available JWS algorithms in the registry + ALGORITHMS_REGISTRY = {} + + def __init__(self, algorithms=None, private_headers=None): + self._private_headers = private_headers + self._algorithms = algorithms + + @classmethod + def register_algorithm(cls, algorithm): + if not algorithm or algorithm.algorithm_type != 'JWS': + raise ValueError( + 'Invalid algorithm for JWS, {!r}'.format(algorithm)) + cls.ALGORITHMS_REGISTRY[algorithm.name] = algorithm + + def serialize_compact(self, protected, payload, key): + """Generate a JWS Compact Serialization. The JWS Compact Serialization + represents digitally signed or MACed content as a compact, URL-safe + string, per `Section 7.1`_. + + .. code-block:: text + + BASE64URL(UTF8(JWS Protected Header)) || '.' || + BASE64URL(JWS Payload) || '.' || + BASE64URL(JWS Signature) + + :param protected: A dict of protected header + :param payload: A bytes/string of payload + :param key: Private key used to generate signature + :return: byte + """ + jws_header = JWSHeader(protected, None) + self._validate_private_headers(protected) + algorithm, key = self._prepare_algorithm_key(protected, payload, key) + + protected_segment = json_b64encode(jws_header.protected) + payload_segment = urlsafe_b64encode(to_bytes(payload)) + + # calculate signature + signing_input = b'.'.join([protected_segment, payload_segment]) + signature = urlsafe_b64encode(algorithm.sign(signing_input, key)) + return b'.'.join([protected_segment, payload_segment, signature]) + + def deserialize_compact(self, s, key, decode=None): + """Exact JWS Compact Serialization, and validate with the given key. + If key is not provided, the returned dict will contain the signature, + and signing input values. Via `Section 7.1`_. + + :param s: text of JWS Compact Serialization + :param key: key used to verify the signature + :param decode: a function to decode payload data + :return: JWSObject + :raise: BadSignatureError + + .. _`Section 7.1`: https://tools.ietf.org/html/rfc7515#section-7.1 + """ + try: + s = to_bytes(s) + signing_input, signature_segment = s.rsplit(b'.', 1) + protected_segment, payload_segment = signing_input.split(b'.', 1) + except ValueError: + raise DecodeError('Not enough segments') + + protected = _extract_header(protected_segment) + jws_header = JWSHeader(protected, None) + + payload = _extract_payload(payload_segment) + if decode: + payload = decode(payload) + + signature = _extract_signature(signature_segment) + rv = JWSObject(jws_header, payload, 'compact') + algorithm, key = self._prepare_algorithm_key(jws_header, payload, key) + if algorithm.verify(signing_input, signature, key): + return rv + raise BadSignatureError(rv) + + def serialize_json(self, header_obj, payload, key): + """Generate a JWS JSON Serialization. The JWS JSON Serialization + represents digitally signed or MACed content as a JSON object, + per `Section 7.2`_. + + :param header_obj: A dict/list of header + :param payload: A string/dict of payload + :param key: Private key used to generate signature + :return: JWSObject + + Example ``header_obj`` of JWS JSON Serialization:: + + { + "protected: {"alg": "HS256"}, + "header": {"kid": "jose"} + } + + Pass a dict to generate flattened JSON Serialization, pass a list of + header dict to generate standard JSON Serialization. + """ + payload_segment = json_b64encode(payload) + + def _sign(jws_header): + self._validate_private_headers(jws_header) + _alg, _key = self._prepare_algorithm_key(jws_header, payload, key) + + protected_segment = json_b64encode(jws_header.protected) + signing_input = b'.'.join([protected_segment, payload_segment]) + signature = urlsafe_b64encode(_alg.sign(signing_input, _key)) + + rv = { + 'protected': to_unicode(protected_segment), + 'signature': to_unicode(signature) + } + if jws_header.header is not None: + rv['header'] = jws_header.header + return rv + + if isinstance(header_obj, dict): + data = _sign(JWSHeader.from_dict(header_obj)) + data['payload'] = to_unicode(payload_segment) + return data + + signatures = [_sign(JWSHeader.from_dict(h)) for h in header_obj] + return { + 'payload': to_unicode(payload_segment), + 'signatures': signatures + } + + def deserialize_json(self, obj, key, decode=None): + """Exact JWS JSON Serialization, and validate with the given key. + If key is not provided, it will return a dict without signature + verification. Header will still be validated. Via `Section 7.2`_. + + :param obj: text of JWS JSON Serialization + :param key: key used to verify the signature + :param decode: a function to decode payload data + :return: JWSObject + :raise: BadSignatureError + + .. _`Section 7.2`: https://tools.ietf.org/html/rfc7515#section-7.2 + """ + obj = _ensure_dict(obj) + + payload_segment = obj.get('payload') + if not payload_segment: + raise DecodeError('Missing "payload" value') + + payload_segment = to_bytes(payload_segment) + payload = _extract_payload(payload_segment) + if decode: + payload = decode(payload) + + if 'signatures' not in obj: + # flattened JSON JWS + jws_header, valid = self._validate_json_jws( + payload_segment, payload, obj, key) + + rv = JWSObject(jws_header, payload, 'flat') + if valid: + return rv + raise BadSignatureError(rv) + + headers = [] + is_valid = True + for header_obj in obj['signatures']: + jws_header, valid = self._validate_json_jws( + payload_segment, payload, header_obj, key) + headers.append(jws_header) + if not valid: + is_valid = False + + rv = JWSObject(headers, payload, 'json') + if is_valid: + return rv + raise BadSignatureError(rv) + + def serialize(self, header, payload, key): + """Generate a JWS Serialization. It will automatically generate a + Compact or JSON Serialization depending on the given header. If a + header is in a JSON header format, it will call + :meth:`serialize_json`, otherwise it will call + :meth:`serialize_compact`. + + :param header: A dict/list of header + :param payload: A string/dict of payload + :param key: Private key used to generate signature + :return: byte/dict + """ + if isinstance(header, (list, tuple)): + return self.serialize_json(header, payload, key) + if 'protected' in header: + return self.serialize_json(header, payload, key) + return self.serialize_compact(header, payload, key) + + def deserialize(self, s, key, decode=None): + """Deserialize JWS Serialization, both compact and JSON format. + It will automatically deserialize depending on the given JWS. + + :param s: text of JWS Compact/JSON Serialization + :param key: key used to verify the signature + :param decode: a function to decode payload data + :return: dict + :raise: BadSignatureError + + If key is not provided, it will still deserialize the serialization + without verification. + """ + if isinstance(s, dict): + return self.deserialize_json(s, key, decode) + + s = to_bytes(s) + if s.startswith(b'{') and s.endswith(b'}'): + return self.deserialize_json(s, key, decode) + return self.deserialize_compact(s, key, decode) + + def _prepare_algorithm_key(self, header, payload, key): + if 'alg' not in header: + raise MissingAlgorithmError() + + alg = header['alg'] + if self._algorithms and alg not in self._algorithms: + raise UnsupportedAlgorithmError() + if alg not in self.ALGORITHMS_REGISTRY: + raise UnsupportedAlgorithmError() + + algorithm = self.ALGORITHMS_REGISTRY[alg] + if callable(key): + key = key(header, payload) + elif 'jwk' in header: + key = header['jwk'] + key = algorithm.prepare_key(key) + return algorithm, key + + def _validate_private_headers(self, header): + # only validate private headers when developers set + # private headers explicitly + if self._private_headers is not None: + names = self.REGISTERED_HEADER_PARAMETER_NAMES.copy() + names = names.union(self._private_headers) + + for k in header: + if k not in names: + raise InvalidHeaderParameterName(k) + + def _validate_json_jws(self, payload_segment, payload, header_obj, key): + protected_segment = header_obj.get('protected') + if not protected_segment: + raise DecodeError('Missing "protected" value') + + signature_segment = header_obj.get('signature') + if not signature_segment: + raise DecodeError('Missing "signature" value') + + protected_segment = to_bytes(protected_segment) + protected = _extract_header(protected_segment) + header = header_obj.get('header') + if header and not isinstance(header, dict): + raise DecodeError('Invalid "header" value') + + jws_header = JWSHeader(protected, header) + algorithm, key = self._prepare_algorithm_key(jws_header, payload, key) + signing_input = b'.'.join([protected_segment, payload_segment]) + signature = _extract_signature(to_bytes(signature_segment)) + if algorithm.verify(signing_input, signature, key): + return jws_header, True + return jws_header, False + + +def _extract_header(header_segment): + return extract_header(header_segment, DecodeError) + + +def _extract_signature(signature_segment): + return extract_segment(signature_segment, DecodeError, 'signature') + + +def _extract_payload(payload_segment): + return extract_segment(payload_segment, DecodeError, 'payload') + + +def _ensure_dict(s): + if not isinstance(s, dict): + try: + s = json_loads(to_unicode(s)) + except (ValueError, TypeError): + raise DecodeError('Invalid JWS') + + if not isinstance(s, dict): + raise DecodeError('Invalid JWS') + + return s diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7515/models.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7515/models.py new file mode 100644 index 000000000..caccfb4e7 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7515/models.py @@ -0,0 +1,81 @@ +class JWSAlgorithm(object): + """Interface for JWS algorithm. JWA specification (RFC7518) SHOULD + implement the algorithms for JWS with this base implementation. + """ + name = None + description = None + algorithm_type = 'JWS' + algorithm_location = 'alg' + + def prepare_key(self, raw_data): + """Prepare key for signing and verifying signature.""" + raise NotImplementedError() + + def sign(self, msg, key): + """Sign the text msg with a private/sign key. + + :param msg: message bytes to be signed + :param key: private key to sign the message + :return: bytes + """ + raise NotImplementedError + + def verify(self, msg, sig, key): + """Verify the signature of text msg with a public/verify key. + + :param msg: message bytes to be signed + :param sig: result signature to be compared + :param key: public key to verify the signature + :return: boolean + """ + raise NotImplementedError + + +class JWSHeader(dict): + """Header object for JWS. It combine the protected header and unprotected + header together. JWSHeader itself is a dict of the combined dict. e.g. + + >>> protected = {'alg': 'HS256'} + >>> header = {'kid': 'a'} + >>> jws_header = JWSHeader(protected, header) + >>> print(jws_header) + {'alg': 'HS256', 'kid': 'a'} + >>> jws_header.protected == protected + >>> jws_header.header == header + + :param protected: dict of protected header + :param header: dict of unprotected header + """ + def __init__(self, protected, header): + obj = {} + if protected: + obj.update(protected) + if header: + obj.update(header) + super(JWSHeader, self).__init__(obj) + self.protected = protected + self.header = header + + @classmethod + def from_dict(cls, obj): + if isinstance(obj, cls): + return obj + return cls(obj.get('protected'), obj.get('header')) + + +class JWSObject(dict): + """A dict instance to represent a JWS object.""" + def __init__(self, header, payload, type='compact'): + super(JWSObject, self).__init__( + header=header, + payload=payload, + ) + self.header = header + self.payload = payload + self.type = type + + @property + def headers(self): + """Alias of ``header`` for JSON typed JWS.""" + if self.type == 'json': + return self['header'] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7516/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7516/__init__.py new file mode 100644 index 000000000..f7f3c3157 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7516/__init__.py @@ -0,0 +1,18 @@ +""" + authlib.jose.rfc7516 + ~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + JSON Web Encryption (JWE). + + https://tools.ietf.org/html/rfc7516 +""" + +from .jwe import JsonWebEncryption +from .models import JWEAlgorithm, JWEEncAlgorithm, JWEZipAlgorithm + + +__all__ = [ + 'JsonWebEncryption', + 'JWEAlgorithm', 'JWEEncAlgorithm', 'JWEZipAlgorithm' +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7516/jwe.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7516/jwe.py new file mode 100644 index 000000000..0e5d84de3 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7516/jwe.py @@ -0,0 +1,207 @@ +from authlib.common.encoding import ( + to_bytes, urlsafe_b64encode, json_b64encode +) +from authlib.jose.util import ( + extract_header, + extract_segment, +) +from authlib.jose.errors import ( + DecodeError, + MissingAlgorithmError, + UnsupportedAlgorithmError, + MissingEncryptionAlgorithmError, + UnsupportedEncryptionAlgorithmError, + UnsupportedCompressionAlgorithmError, + InvalidHeaderParameterName, +) + + +class JsonWebEncryption(object): + #: Registered Header Parameter Names defined by Section 4.1 + REGISTERED_HEADER_PARAMETER_NAMES = frozenset([ + 'alg', 'enc', 'zip', + 'jku', 'jwk', 'kid', + 'x5u', 'x5c', 'x5t', 'x5t#S256', + 'typ', 'cty', 'crit' + ]) + + ALG_REGISTRY = {} + ENC_REGISTRY = {} + ZIP_REGISTRY = {} + + def __init__(self, algorithms=None, private_headers=None): + self._algorithms = algorithms + self._private_headers = private_headers + + @classmethod + def register_algorithm(cls, algorithm): + """Register an algorithm for ``alg`` or ``enc`` or ``zip`` of JWE.""" + if not algorithm or algorithm.algorithm_type != 'JWE': + raise ValueError( + 'Invalid algorithm for JWE, {!r}'.format(algorithm)) + + if algorithm.algorithm_location == 'alg': + cls.ALG_REGISTRY[algorithm.name] = algorithm + elif algorithm.algorithm_location == 'enc': + cls.ENC_REGISTRY[algorithm.name] = algorithm + elif algorithm.algorithm_location == 'zip': + cls.ZIP_REGISTRY[algorithm.name] = algorithm + + def serialize_compact(self, protected, payload, key): + """Generate a JWE Compact Serialization. The JWE Compact Serialization + represents encrypted content as a compact, URL-safe string. This + string is: + + BASE64URL(UTF8(JWE Protected Header)) || '.' || + BASE64URL(JWE Encrypted Key) || '.' || + BASE64URL(JWE Initialization Vector) || '.' || + BASE64URL(JWE Ciphertext) || '.' || + BASE64URL(JWE Authentication Tag) + + Only one recipient is supported by the JWE Compact Serialization and + it provides no syntax to represent JWE Shared Unprotected Header, JWE + Per-Recipient Unprotected Header, or JWE AAD values. + + :param protected: A dict of protected header + :param payload: A string/dict of payload + :param key: Private key used to generate signature + :return: byte + """ + + # step 1: Prepare algorithms & key + alg = self.get_header_alg(protected) + enc = self.get_header_enc(protected) + zip_alg = self.get_header_zip(protected) + self._validate_private_headers(protected, alg) + + key = prepare_key(alg, protected, key) + + # self._post_validate_header(protected, algorithm) + + # step 2: Generate a random Content Encryption Key (CEK) + # use enc_alg.generate_cek() in .wrap method + + # step 3: Encrypt the CEK with the recipient's public key + wrapped = alg.wrap(enc, protected, key) + cek = wrapped['cek'] + ek = wrapped['ek'] + if 'header' in wrapped: + protected.update(wrapped['header']) + + # step 4: Generate a random JWE Initialization Vector + iv = enc.generate_iv() + + # step 5: Let the Additional Authenticated Data encryption parameter + # be ASCII(BASE64URL(UTF8(JWE Protected Header))) + protected_segment = json_b64encode(protected) + aad = to_bytes(protected_segment, 'ascii') + + # step 6: compress message if required + if zip_alg: + msg = zip_alg.compress(to_bytes(payload)) + else: + msg = to_bytes(payload) + + # step 7: perform encryption + ciphertext, tag = enc.encrypt(msg, aad, iv, cek) + return b'.'.join([ + protected_segment, + urlsafe_b64encode(ek), + urlsafe_b64encode(iv), + urlsafe_b64encode(ciphertext), + urlsafe_b64encode(tag) + ]) + + def deserialize_compact(self, s, key, decode=None): + """Exact JWS Compact Serialization, and validate with the given key. + + :param s: text of JWS Compact Serialization + :param key: key used to verify the signature + :param decode: a function to decode plaintext data + :return: dict + """ + try: + s = to_bytes(s) + protected_s, ek_s, iv_s, ciphertext_s, tag_s = s.rsplit(b'.') + except ValueError: + raise DecodeError('Not enough segments') + + protected = extract_header(protected_s, DecodeError) + ek = extract_segment(ek_s, DecodeError, 'encryption key') + iv = extract_segment(iv_s, DecodeError, 'initialization vector') + ciphertext = extract_segment(ciphertext_s, DecodeError, 'ciphertext') + tag = extract_segment(tag_s, DecodeError, 'authentication tag') + + alg = self.get_header_alg(protected) + enc = self.get_header_enc(protected) + zip_alg = self.get_header_zip(protected) + self._validate_private_headers(protected, alg) + + key = prepare_key(alg, protected, key) + + cek = alg.unwrap(enc, ek, protected, key) + aad = to_bytes(protected_s, 'ascii') + msg = enc.decrypt(ciphertext, aad, iv, tag, cek) + + if zip_alg: + payload = zip_alg.decompress(to_bytes(msg)) + else: + payload = msg + + if decode: + payload = decode(payload) + return {'header': protected, 'payload': payload} + + def get_header_alg(self, header): + if 'alg' not in header: + raise MissingAlgorithmError() + + alg = header['alg'] + if self._algorithms and alg not in self._algorithms: + raise UnsupportedAlgorithmError() + if alg not in self.ALG_REGISTRY: + raise UnsupportedAlgorithmError() + return self.ALG_REGISTRY[alg] + + def get_header_enc(self, header): + if 'enc' not in header: + raise MissingEncryptionAlgorithmError() + enc = header['enc'] + if self._algorithms and enc not in self._algorithms: + raise UnsupportedEncryptionAlgorithmError() + if enc not in self.ENC_REGISTRY: + raise UnsupportedEncryptionAlgorithmError() + return self.ENC_REGISTRY[enc] + + def get_header_zip(self, header): + if 'zip' in header: + z = header['zip'] + if self._algorithms and z not in self._algorithms: + raise UnsupportedCompressionAlgorithmError() + if z not in self.ZIP_REGISTRY: + raise UnsupportedCompressionAlgorithmError() + return self.ZIP_REGISTRY[z] + + def _validate_private_headers(self, header, alg): + # only validate private headers when developers set + # private headers explicitly + if self._private_headers is None: + return + + names = self.REGISTERED_HEADER_PARAMETER_NAMES.copy() + names = names.union(self._private_headers) + + if alg.EXTRA_HEADERS: + names = names.union(alg.EXTRA_HEADERS) + + for k in header: + if k not in names: + raise InvalidHeaderParameterName(k) + + +def prepare_key(alg, header, key): + if callable(key): + key = key(header, None) + elif 'jwk' in header: + key = header['jwk'] + return alg.prepare_key(key) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7516/models.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7516/models.py new file mode 100644 index 000000000..5eab89c7a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7516/models.py @@ -0,0 +1,78 @@ +import os + + +class JWEAlgorithm(object): + """Interface for JWE algorithm. JWA specification (RFC7518) SHOULD + implement the algorithms for JWE with this base implementation. + """ + EXTRA_HEADERS = None + + name = None + description = None + algorithm_type = 'JWE' + algorithm_location = 'alg' + + def prepare_key(self, raw_data): + raise NotImplementedError + + def wrap(self, enc_alg, headers, key): + raise NotImplementedError + + def unwrap(self, enc_alg, ek, headers, key): + raise NotImplementedError + + +class JWEEncAlgorithm(object): + name = None + description = None + algorithm_type = 'JWE' + algorithm_location = 'enc' + + IV_SIZE = None + CEK_SIZE = None + + def generate_cek(self): + return os.urandom(self.CEK_SIZE // 8) + + def generate_iv(self): + return os.urandom(self.IV_SIZE // 8) + + def check_iv(self, iv): + if len(iv) * 8 != self.IV_SIZE: + raise ValueError('Invalid "iv" size') + + def encrypt(self, msg, aad, iv, key): + """Encrypt the given "msg" text. + + :param msg: text to be encrypt in bytes + :param aad: additional authenticated data in bytes + :param iv: initialization vector in bytes + :param key: encrypted key in bytes + :return: (ciphertext, iv, tag) + """ + raise NotImplementedError + + def decrypt(self, ciphertext, aad, iv, tag, key): + """Decrypt the given cipher text. + + :param ciphertext: ciphertext in bytes + :param aad: additional authenticated data in bytes + :param iv: initialization vector in bytes + :param tag: authentication tag in bytes + :param key: encrypted key in bytes + :return: message + """ + raise NotImplementedError + + +class JWEZipAlgorithm(object): + name = None + description = None + algorithm_type = 'JWE' + algorithm_location = 'zip' + + def compress(self, s): + raise NotImplementedError + + def decompress(self, s): + raise NotImplementedError diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7517/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7517/__init__.py new file mode 100644 index 000000000..e2f1595e5 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7517/__init__.py @@ -0,0 +1,15 @@ +""" + authlib.jose.rfc7517 + ~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + JSON Web Key (JWK). + + https://tools.ietf.org/html/rfc7517 +""" +from .models import Key, KeySet +from ._cryptography_key import load_pem_key +from .jwk import JsonWebKey + + +__all__ = ['Key', 'KeySet', 'JsonWebKey', 'load_pem_key'] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7517/_cryptography_key.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7517/_cryptography_key.py new file mode 100644 index 000000000..f7194a37f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7517/_cryptography_key.py @@ -0,0 +1,34 @@ +from cryptography.x509 import load_pem_x509_certificate +from cryptography.hazmat.primitives.serialization import ( + load_pem_private_key, load_pem_public_key, load_ssh_public_key, +) +from cryptography.hazmat.backends import default_backend +from authlib.common.encoding import to_bytes + + +def load_pem_key(raw, ssh_type=None, key_type=None, password=None): + raw = to_bytes(raw) + + if ssh_type and raw.startswith(ssh_type): + return load_ssh_public_key(raw, backend=default_backend()) + + if key_type == 'public': + return load_pem_public_key(raw, backend=default_backend()) + + if key_type == 'private' or password is not None: + return load_pem_private_key(raw, password=password, backend=default_backend()) + + if b'PUBLIC' in raw: + return load_pem_public_key(raw, backend=default_backend()) + + if b'PRIVATE' in raw: + return load_pem_private_key(raw, password=password, backend=default_backend()) + + if b'CERTIFICATE' in raw: + cert = load_pem_x509_certificate(raw, default_backend()) + return cert.public_key() + + try: + return load_pem_private_key(raw, password=password, backend=default_backend()) + except ValueError: + return load_pem_public_key(raw, backend=default_backend()) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7517/jwk.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7517/jwk.py new file mode 100644 index 000000000..b52d31920 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7517/jwk.py @@ -0,0 +1,63 @@ +from authlib.common.encoding import text_types, json_loads +from ._cryptography_key import load_pem_key +from .models import KeySet + + +class JsonWebKey(object): + JWK_KEY_CLS = {} + + @classmethod + def generate_key(cls, kty, crv_or_size, options=None, is_private=False): + """Generate a Key with the given key type, curve name or bit size. + + :param kty: string of ``oct``, ``RSA``, ``EC``, ``OKP`` + :param crv_or_size: curve name or bit size + :param options: a dict of other options for Key + :param is_private: create a private key or public key + :return: Key instance + """ + key_cls = cls.JWK_KEY_CLS[kty] + return key_cls.generate_key(crv_or_size, options, is_private) + + @classmethod + def import_key(cls, raw, options=None): + """Import a Key from bytes, string, PEM or dict. + + :return: Key instance + """ + kty = None + if options is not None: + kty = options.get('kty') + + if kty is None and isinstance(raw, dict): + kty = raw.get('kty') + + if kty is None: + raw_key = load_pem_key(raw) + for _kty in cls.JWK_KEY_CLS: + key_cls = cls.JWK_KEY_CLS[_kty] + if isinstance(raw_key, key_cls.RAW_KEY_CLS): + return key_cls.import_key(raw_key, options) + + key_cls = cls.JWK_KEY_CLS[kty] + return key_cls.import_key(raw, options) + + @classmethod + def import_key_set(cls, raw): + """Import KeySet from string, dict or a list of keys. + + :return: KeySet instance + """ + raw = _transform_raw_key(raw) + if isinstance(raw, dict) and 'keys' in raw: + keys = raw.get('keys') + return KeySet([cls.import_key(k) for k in keys]) + + +def _transform_raw_key(raw): + if isinstance(raw, text_types) and \ + raw.startswith('{') and raw.endswith('}'): + return json_loads(raw) + elif isinstance(raw, (tuple, list)): + return {'keys': raw} + return raw diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7517/models.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7517/models.py new file mode 100644 index 000000000..b3b24f321 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7517/models.py @@ -0,0 +1,156 @@ +import hashlib +from collections import OrderedDict +from authlib.common.encoding import ( + json_dumps, + to_bytes, + to_unicode, + urlsafe_b64encode, +) +from ..errors import InvalidUseError + + +class Key(dict): + """This is the base class for a JSON Web Key.""" + kty = '_' + + ALLOWED_PARAMS = [ + 'use', 'key_ops', 'alg', 'kid', + 'x5u', 'x5c', 'x5t', 'x5t#S256' + ] + + PRIVATE_KEY_OPS = [ + 'sign', 'decrypt', 'unwrapKey', + ] + PUBLIC_KEY_OPS = [ + 'verify', 'encrypt', 'wrapKey', + ] + + REQUIRED_JSON_FIELDS = [] + RAW_KEY_CLS = bytes + + def __init__(self, payload): + super(Key, self).__init__(payload) + + self.key_type = 'secret' + self.raw_key = None + + def get_op_key(self, operation): + """Get the raw key for the given key_op. This method will also + check if the given key_op is supported by this key. + + :param operation: key operation value, such as "sign", "encrypt". + :return: raw key + """ + self.check_key_op(operation) + if operation in self.PUBLIC_KEY_OPS: + return self.get_public_key() + return self.get_private_key() + + def get_public_key(self): + if self.key_type == 'private': + return self.raw_key.public_key() + return self.raw_key + + def get_private_key(self): + if self.key_type == 'private': + return self.raw_key + + def check_key_op(self, operation): + """Check if the given key_op is supported by this key. + + :param operation: key operation value, such as "sign", "encrypt". + :raise: ValueError + """ + key_ops = self.get('key_ops') + if key_ops is not None and operation not in key_ops: + raise ValueError('Unsupported key_op "{}"'.format(operation)) + + if operation in self.PRIVATE_KEY_OPS and self.key_type == 'public': + raise ValueError('Invalid key_op "{}" for public key'.format(operation)) + + use = self.get('use') + if use: + if operation in ['sign', 'verify']: + if use != 'sig': + raise InvalidUseError() + elif operation in ['decrypt', 'encrypt', 'wrapKey', 'unwrapKey']: + if use != 'enc': + raise InvalidUseError() + + def as_key(self): + """Represent this key as raw key.""" + return self.raw_key + + def as_dict(self, add_kid=False): + """Represent this key as a dict of the JSON Web Key.""" + obj = dict(self) + obj['kty'] = self.kty + if add_kid and 'kid' not in obj: + obj['kid'] = self.thumbprint() + return obj + + def as_json(self): + """Represent this key as a JSON string.""" + obj = self.as_dict() + return json_dumps(obj) + + def as_pem(self): + """Represent this key as string in PEM format.""" + raise RuntimeError('Not supported') + + def thumbprint(self): + """Implementation of RFC7638 JSON Web Key (JWK) Thumbprint.""" + fields = list(self.REQUIRED_JSON_FIELDS) + fields.append('kty') + fields.sort() + data = OrderedDict() + + obj = self.as_dict() + for k in fields: + data[k] = obj[k] + + json_data = json_dumps(data) + digest_data = hashlib.sha256(to_bytes(json_data)).digest() + return to_unicode(urlsafe_b64encode(digest_data)) + + @classmethod + def check_required_fields(cls, data): + for k in cls.REQUIRED_JSON_FIELDS: + if k not in data: + raise ValueError('Missing required field: "{}"'.format(k)) + + @classmethod + def generate_key(cls, crv_or_size, options=None, is_private=False): + raise NotImplementedError() + + @classmethod + def import_key(cls, raw, options=None): + raise NotImplementedError() + + +class KeySet(object): + """This class represents a JSON Web Key Set.""" + + def __init__(self, keys): + self.keys = keys + + def as_dict(self): + """Represent this key as a dict of the JSON Web Key Set.""" + return {'keys': [k.as_dict(True) for k in self.keys]} + + def as_json(self): + """Represent this key set as a JSON string.""" + obj = self.as_dict() + return json_dumps(obj) + + def find_by_kid(self, kid): + """Find the key matches the given kid value. + + :param kid: A string of kid + :return: Key instance + :raise: ValueError + """ + for k in self.keys: + if k.get('kid') == kid: + return k + raise ValueError('Invalid JSON Web Key Set') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/__init__.py new file mode 100644 index 000000000..658760245 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/__init__.py @@ -0,0 +1,19 @@ +from .jws_algorithms import register_jws_rfc7518 +from .jwe_algorithms import register_jwe_rfc7518 +from .oct_key import OctKey +from ._cryptography_backends import ( + RSAKey, ECKey, ECDHAlgorithm, + import_key, load_pem_key, export_key, +) + +__all__ = [ + 'register_jws_rfc7518', + 'register_jwe_rfc7518', + 'ECDHAlgorithm', + 'OctKey', + 'RSAKey', + 'ECKey', + 'import_key', + 'load_pem_key', + 'export_key', +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/_cryptography_backends/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/_cryptography_backends/__init__.py new file mode 100644 index 000000000..5f8ab16db --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/_cryptography_backends/__init__.py @@ -0,0 +1,7 @@ +from ._jws import JWS_ALGORITHMS +from ._jwe_alg import JWE_ALG_ALGORITHMS, ECDHAlgorithm +from ._jwe_enc import JWE_ENC_ALGORITHMS +from ._keys import ( + RSAKey, ECKey, + load_pem_key, import_key, export_key, +) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/_cryptography_backends/_jwe_alg.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/_cryptography_backends/_jwe_alg.py new file mode 100644 index 000000000..8d000d216 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/_cryptography_backends/_jwe_alg.py @@ -0,0 +1,268 @@ +import os +import struct +from cryptography.hazmat.primitives.asymmetric import padding +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.keywrap import ( + aes_key_wrap, + aes_key_unwrap +) +from cryptography.hazmat.primitives.ciphers import Cipher +from cryptography.hazmat.primitives.ciphers.algorithms import AES +from cryptography.hazmat.primitives.ciphers.modes import GCM +from cryptography.hazmat.primitives.kdf.concatkdf import ConcatKDFHash +from authlib.common.encoding import ( + to_bytes, to_native, + urlsafe_b64decode, + urlsafe_b64encode +) +from authlib.jose.rfc7516 import JWEAlgorithm +from ._keys import RSAKey, ECKey +from ..oct_key import OctKey + + +class RSAAlgorithm(JWEAlgorithm): + #: A key of size 2048 bits or larger MUST be used with these algorithms + #: RSA1_5, RSA-OAEP, RSA-OAEP-256 + key_size = 2048 + + def __init__(self, name, description, pad_fn): + self.name = name + self.description = description + self.padding = pad_fn + + def prepare_key(self, raw_data): + return RSAKey.import_key(raw_data) + + def wrap(self, enc_alg, headers, key): + cek = enc_alg.generate_cek() + op_key = key.get_op_key('wrapKey') + if op_key.key_size < self.key_size: + raise ValueError('A key of size 2048 bits or larger MUST be used') + ek = op_key.encrypt(cek, self.padding) + return {'ek': ek, 'cek': cek} + + def unwrap(self, enc_alg, ek, headers, key): + # it will raise ValueError if failed + op_key = key.get_op_key('unwrapKey') + cek = op_key.decrypt(ek, self.padding) + print(cek, enc_alg.key_size) + if len(cek) * 8 != enc_alg.CEK_SIZE: + raise ValueError('Invalid "cek" length') + return cek + + +class AESAlgorithm(JWEAlgorithm): + def __init__(self, key_size): + self.name = 'A{}KW'.format(key_size) + self.description = 'AES Key Wrap using {}-bit key'.format(key_size) + self.key_size = key_size + + def prepare_key(self, raw_data): + return OctKey.import_key(raw_data) + + def _check_key(self, key): + if len(key) * 8 != self.key_size: + raise ValueError( + 'A key of size {} bits is required.'.format(self.key_size)) + + def wrap(self, enc_alg, headers, key): + cek = enc_alg.generate_cek() + op_key = key.get_op_key('wrapKey') + self._check_key(op_key) + ek = aes_key_wrap(op_key, cek, default_backend()) + return {'ek': ek, 'cek': cek} + + def unwrap(self, enc_alg, ek, headers, key): + op_key = key.get_op_key('unwrapKey') + self._check_key(op_key) + cek = aes_key_unwrap(op_key, ek, default_backend()) + if len(cek) * 8 != enc_alg.CEK_SIZE: + raise ValueError('Invalid "cek" length') + return cek + + +class AESGCMAlgorithm(JWEAlgorithm): + EXTRA_HEADERS = frozenset(['iv', 'tag']) + + def __init__(self, key_size): + self.name = 'A{}GCMKW'.format(key_size) + self.description = 'Key wrapping with AES GCM using {}-bit key'.format(key_size) + self.key_size = key_size + + def prepare_key(self, raw_data): + return OctKey.import_key(raw_data) + + def _check_key(self, key): + if len(key) * 8 != self.key_size: + raise ValueError( + 'A key of size {} bits is required.'.format(self.key_size)) + + def wrap(self, enc_alg, headers, key): + cek = enc_alg.generate_cek() + op_key = key.get_op_key('wrapKey') + self._check_key(op_key) + + #: https://tools.ietf.org/html/rfc7518#section-4.7.1.1 + #: The "iv" (initialization vector) Header Parameter value is the + #: base64url-encoded representation of the 96-bit IV value + iv_size = 96 + iv = os.urandom(iv_size // 8) + + cipher = Cipher(AES(op_key), GCM(iv), backend=default_backend()) + enc = cipher.encryptor() + ek = enc.update(cek) + enc.finalize() + + h = { + 'iv': to_native(urlsafe_b64encode(iv)), + 'tag': to_native(urlsafe_b64encode(enc.tag)) + } + return {'ek': ek, 'cek': cek, 'header': h} + + def unwrap(self, enc_alg, ek, headers, key): + op_key = key.get_op_key('unwrapKey') + self._check_key(op_key) + + iv = headers.get('iv') + if not iv: + raise ValueError('Missing "iv" in headers') + + tag = headers.get('tag') + if not tag: + raise ValueError('Missing "tag" in headers') + + iv = urlsafe_b64decode(to_bytes(iv)) + tag = urlsafe_b64decode(to_bytes(tag)) + + cipher = Cipher(AES(op_key), GCM(iv, tag), backend=default_backend()) + d = cipher.decryptor() + cek = d.update(ek) + d.finalize() + if len(cek) * 8 != enc_alg.CEK_SIZE: + raise ValueError('Invalid "cek" length') + return cek + + +class ECDHAlgorithm(JWEAlgorithm): + EXTRA_HEADERS = ['epk', 'apu', 'apv'] + ALLOWED_KEY_CLS = ECKey + + # https://tools.ietf.org/html/rfc7518#section-4.6 + def __init__(self, key_size=None): + if key_size is None: + self.name = 'ECDH-ES' + self.description = 'ECDH-ES in the Direct Key Agreement mode' + else: + self.name = 'ECDH-ES+A{}KW'.format(key_size) + self.description = ( + 'ECDH-ES using Concat KDF and CEK wrapped ' + 'with A{}KW').format(key_size) + self.key_size = key_size + self.aeskw = AESAlgorithm(key_size) + + def prepare_key(self, raw_data): + if isinstance(raw_data, self.ALLOWED_KEY_CLS): + return raw_data + return ECKey.import_key(raw_data) + + def deliver(self, key, pubkey, headers, bit_size): + # AlgorithmID + if self.key_size is None: + alg_id = _u32be_len_input(headers['enc']) + else: + alg_id = _u32be_len_input(headers['alg']) + + # PartyUInfo + apu_info = _u32be_len_input(headers.get('apu'), True) + + # PartyVInfo + apv_info = _u32be_len_input(headers.get('apv'), True) + + # SuppPubInfo + pub_info = struct.pack('>I', bit_size) + + other_info = alg_id + apu_info + apv_info + pub_info + shared_key = key.exchange_shared_key(pubkey) + ckdf = ConcatKDFHash( + algorithm=hashes.SHA256(), + length=bit_size // 8, + otherinfo=other_info, + backend=default_backend() + ) + return ckdf.derive(shared_key) + + def wrap(self, enc_alg, headers, key): + if self.key_size is None: + bit_size = enc_alg.key_size + else: + bit_size = self.key_size + + epk = key.generate_key(key['crv'], is_private=True) + public_key = key.get_op_key('wrapKey') + dk = self.deliver(epk, public_key, headers, bit_size) + + # REQUIRED_JSON_FIELDS contains only public fields + pub_epk = {k: epk[k] for k in epk.REQUIRED_JSON_FIELDS} + pub_epk['kty'] = epk.kty + h = {'epk': pub_epk} + if self.key_size is None: + return {'ek': b'', 'cek': dk, 'header': h} + + kek = self.aeskw.prepare_key(dk) + rv = self.aeskw.wrap(enc_alg, headers, kek) + rv['header'] = h + return rv + + def unwrap(self, enc_alg, ek, headers, key): + if 'epk' not in headers: + raise ValueError('Missing "epk" in headers') + + if self.key_size is None: + bit_size = enc_alg.key_size + else: + bit_size = self.key_size + + epk = key.import_key(headers['epk']) + public_key = epk.get_op_key('wrapKey') + dk = self.deliver(key, public_key, headers, bit_size) + + if self.key_size is None: + return dk + + kek = self.aeskw.prepare_key(dk) + return self.aeskw.unwrap(enc_alg, ek, headers, kek) + + +def _u32be_len_input(s, base64=False): + if not s: + return b'\x00\x00\x00\x00' + if base64: + s = urlsafe_b64decode(to_bytes(s)) + else: + s = to_bytes(s) + return struct.pack('>I', len(s)) + s + + +JWE_ALG_ALGORITHMS = [ + RSAAlgorithm('RSA1_5', 'RSAES-PKCS1-v1_5', padding.PKCS1v15()), + RSAAlgorithm( + 'RSA-OAEP', 'RSAES OAEP using default parameters', + padding.OAEP(padding.MGF1(hashes.SHA1()), hashes.SHA1(), None)), + RSAAlgorithm( + 'RSA-OAEP-256', 'RSAES OAEP using SHA-256 and MGF1 with SHA-256', + padding.OAEP(padding.MGF1(hashes.SHA256()), hashes.SHA256(), None)), + + AESAlgorithm(128), # A128KW + AESAlgorithm(192), # A192KW + AESAlgorithm(256), # A256KW + AESGCMAlgorithm(128), # A128GCMKW + AESGCMAlgorithm(192), # A192GCMKW + AESGCMAlgorithm(256), # A256GCMKW + ECDHAlgorithm(None), # ECDH-ES + ECDHAlgorithm(128), # ECDH-ES+A128KW + ECDHAlgorithm(192), # ECDH-ES+A192KW + ECDHAlgorithm(256), # ECDH-ES+A256KW +] + +# 'PBES2-HS256+A128KW': '', +# 'PBES2-HS384+A192KW': '', +# 'PBES2-HS512+A256KW': '', diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/_cryptography_backends/_jwe_enc.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/_cryptography_backends/_jwe_enc.py new file mode 100644 index 000000000..f955a7c51 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/_cryptography_backends/_jwe_enc.py @@ -0,0 +1,144 @@ +""" + authlib.jose.rfc7518 + ~~~~~~~~~~~~~~~~~~~~ + + Cryptographic Algorithms for Cryptographic Algorithms for Content + Encryption per `Section 5`_. + + .. _`Section 5`: https://tools.ietf.org/html/rfc7518#section-5 +""" +import hmac +import hashlib +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.ciphers import Cipher +from cryptography.hazmat.primitives.ciphers.algorithms import AES +from cryptography.hazmat.primitives.ciphers.modes import GCM, CBC +from cryptography.hazmat.primitives.padding import PKCS7 +from cryptography.exceptions import InvalidTag +from authlib.jose.rfc7516 import JWEEncAlgorithm +from ..util import encode_int + + +class CBCHS2EncAlgorithm(JWEEncAlgorithm): + # The IV used is a 128-bit value generated randomly or + # pseudo-randomly for use in the cipher. + IV_SIZE = 128 + + def __init__(self, key_size, hash_type): + self.name = 'A{}CBC-HS{}'.format(key_size, hash_type) + tpl = 'AES_{}_CBC_HMAC_SHA_{} authenticated encryption algorithm' + self.description = tpl.format(key_size, hash_type) + + # bit length + self.key_size = key_size + # byte length + self.key_len = key_size // 8 + + self.CEK_SIZE = key_size * 2 + self.hash_alg = getattr(hashlib, 'sha{}'.format(hash_type)) + + def _hmac(self, ciphertext, aad, iv, key): + al = encode_int(len(aad) * 8, 64) + msg = aad + iv + ciphertext + al + d = hmac.new(key, msg, self.hash_alg).digest() + return d[:self.key_len] + + def encrypt(self, msg, aad, iv, key): + """Key Encryption with AES_CBC_HMAC_SHA2. + + :param msg: text to be encrypt in bytes + :param aad: additional authenticated data in bytes + :param iv: initialization vector in bytes + :param key: encrypted key in bytes + :return: (ciphertext, iv, tag) + """ + self.check_iv(iv) + hkey = key[:self.key_len] + ekey = key[self.key_len:] + + pad = PKCS7(AES.block_size).padder() + padded_data = pad.update(msg) + pad.finalize() + + cipher = Cipher(AES(ekey), CBC(iv), backend=default_backend()) + enc = cipher.encryptor() + ciphertext = enc.update(padded_data) + enc.finalize() + tag = self._hmac(ciphertext, aad, iv, hkey) + return ciphertext, tag + + def decrypt(self, ciphertext, aad, iv, tag, key): + """Key Decryption with AES AES_CBC_HMAC_SHA2. + + :param ciphertext: ciphertext in bytes + :param aad: additional authenticated data in bytes + :param iv: initialization vector in bytes + :param tag: authentication tag in bytes + :param key: encrypted key in bytes + :return: message + """ + self.check_iv(iv) + hkey = key[:self.key_len] + dkey = key[self.key_len:] + + _tag = self._hmac(ciphertext, aad, iv, hkey) + if not hmac.compare_digest(_tag, tag): + raise InvalidTag() + + cipher = Cipher(AES(dkey), CBC(iv), backend=default_backend()) + d = cipher.decryptor() + data = d.update(ciphertext) + d.finalize() + unpad = PKCS7(AES.block_size).unpadder() + return unpad.update(data) + unpad.finalize() + + +class GCMEncAlgorithm(JWEEncAlgorithm): + # Use of an IV of size 96 bits is REQUIRED with this algorithm. + # https://tools.ietf.org/html/rfc7518#section-5.3 + IV_SIZE = 96 + + def __init__(self, key_size): + self.name = 'A{}GCM'.format(key_size) + self.description = 'AES GCM using {}-bit key'.format(key_size) + self.key_size = key_size + self.CEK_SIZE = key_size + + def encrypt(self, msg, aad, iv, key): + """Key Encryption with AES GCM + + :param msg: text to be encrypt in bytes + :param aad: additional authenticated data in bytes + :param iv: initialization vector in bytes + :param key: encrypted key in bytes + :return: (ciphertext, iv, tag) + """ + self.check_iv(iv) + cipher = Cipher(AES(key), GCM(iv), backend=default_backend()) + enc = cipher.encryptor() + enc.authenticate_additional_data(aad) + ciphertext = enc.update(msg) + enc.finalize() + return ciphertext, enc.tag + + def decrypt(self, ciphertext, aad, iv, tag, key): + """Key Decryption with AES GCM + + :param ciphertext: ciphertext in bytes + :param aad: additional authenticated data in bytes + :param iv: initialization vector in bytes + :param tag: authentication tag in bytes + :param key: encrypted key in bytes + :return: message + """ + self.check_iv(iv) + cipher = Cipher(AES(key), GCM(iv, tag), backend=default_backend()) + d = cipher.decryptor() + d.authenticate_additional_data(aad) + return d.update(ciphertext) + d.finalize() + + +JWE_ENC_ALGORITHMS = [ + CBCHS2EncAlgorithm(128, 256), # A128CBC-HS256 + CBCHS2EncAlgorithm(192, 384), # A192CBC-HS384 + CBCHS2EncAlgorithm(256, 512), # A256CBC-HS512 + GCMEncAlgorithm(128), # A128GCM + GCMEncAlgorithm(192), # A192GCM + GCMEncAlgorithm(256), # A256GCM +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/_cryptography_backends/_jws.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/_cryptography_backends/_jws.py new file mode 100644 index 000000000..9caee9664 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/_cryptography_backends/_jws.py @@ -0,0 +1,159 @@ +# -*- coding: utf-8 -*- +""" + authlib.jose.rfc7518 + ~~~~~~~~~~~~~~~~~~~~ + + "alg" (Algorithm) Header Parameter Values for JWS per `Section 3`_. + + .. _`Section 3`: https://tools.ietf.org/html/rfc7518#section-3 +""" + +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric.utils import ( + decode_dss_signature, encode_dss_signature +) +from cryptography.hazmat.primitives.asymmetric.ec import ECDSA +from cryptography.hazmat.primitives.asymmetric import padding +from cryptography.exceptions import InvalidSignature +from authlib.jose.rfc7515 import JWSAlgorithm +from ._keys import RSAKey, ECKey +from ..util import encode_int, decode_int + + +class RSAAlgorithm(JWSAlgorithm): + """RSA using SHA algorithms for JWS. Available algorithms: + + - RS256: RSASSA-PKCS1-v1_5 using SHA-256 + - RS384: RSASSA-PKCS1-v1_5 using SHA-384 + - RS512: RSASSA-PKCS1-v1_5 using SHA-512 + """ + SHA256 = hashes.SHA256 + SHA384 = hashes.SHA384 + SHA512 = hashes.SHA512 + + def __init__(self, sha_type): + self.name = 'RS{}'.format(sha_type) + self.description = 'RSASSA-PKCS1-v1_5 using SHA-{}'.format(sha_type) + self.hash_alg = getattr(self, 'SHA{}'.format(sha_type)) + self.padding = padding.PKCS1v15() + + def prepare_key(self, raw_data): + return RSAKey.import_key(raw_data) + + def sign(self, msg, key): + op_key = key.get_op_key('sign') + return op_key.sign(msg, self.padding, self.hash_alg()) + + def verify(self, msg, sig, key): + op_key = key.get_op_key('verify') + try: + op_key.verify(sig, msg, self.padding, self.hash_alg()) + return True + except InvalidSignature: + return False + + +class ECAlgorithm(JWSAlgorithm): + """ECDSA using SHA algorithms for JWS. Available algorithms: + + - ES256: ECDSA using P-256 and SHA-256 + - ES384: ECDSA using P-384 and SHA-384 + - ES512: ECDSA using P-521 and SHA-512 + """ + SHA256 = hashes.SHA256 + SHA384 = hashes.SHA384 + SHA512 = hashes.SHA512 + + def __init__(self, sha_type): + self.name = 'ES{}'.format(sha_type) + self.description = 'ECDSA using P-{} and SHA-{}'.format(sha_type, sha_type) + self.hash_alg = getattr(self, 'SHA{}'.format(sha_type)) + + def prepare_key(self, raw_data): + return ECKey.import_key(raw_data) + + def sign(self, msg, key): + op_key = key.get_op_key('sign') + der_sig = op_key.sign(msg, ECDSA(self.hash_alg())) + r, s = decode_dss_signature(der_sig) + size = key.curve_key_size + return encode_int(r, size) + encode_int(s, size) + + def verify(self, msg, sig, key): + key_size = key.curve_key_size + length = (key_size + 7) // 8 + + if len(sig) != 2 * length: + return False + + r = decode_int(sig[:length]) + s = decode_int(sig[length:]) + der_sig = encode_dss_signature(r, s) + + try: + op_key = key.get_op_key('verify') + op_key.verify(der_sig, msg, ECDSA(self.hash_alg())) + return True + except InvalidSignature: + return False + + +class RSAPSSAlgorithm(JWSAlgorithm): + """RSASSA-PSS using SHA algorithms for JWS. Available algorithms: + + - PS256: RSASSA-PSS using SHA-256 and MGF1 with SHA-256 + - PS384: RSASSA-PSS using SHA-384 and MGF1 with SHA-384 + - PS512: RSASSA-PSS using SHA-512 and MGF1 with SHA-512 + """ + SHA256 = hashes.SHA256 + SHA384 = hashes.SHA384 + SHA512 = hashes.SHA512 + + def __init__(self, sha_type): + self.name = 'PS{}'.format(sha_type) + tpl = 'RSASSA-PSS using SHA-{} and MGF1 with SHA-{}' + self.description = tpl.format(sha_type, sha_type) + self.hash_alg = getattr(self, 'SHA{}'.format(sha_type)) + + def prepare_key(self, raw_data): + return RSAKey.import_key(raw_data) + + def sign(self, msg, key): + op_key = key.get_op_key('sign') + return op_key.sign( + msg, + padding.PSS( + mgf=padding.MGF1(self.hash_alg()), + salt_length=self.hash_alg.digest_size + ), + self.hash_alg() + ) + + def verify(self, msg, sig, key): + op_key = key.get_op_key('verify') + try: + op_key.verify( + sig, + msg, + padding.PSS( + mgf=padding.MGF1(self.hash_alg()), + salt_length=self.hash_alg.digest_size + ), + self.hash_alg() + ) + return True + except InvalidSignature: + return False + + +JWS_ALGORITHMS = [ + RSAAlgorithm(256), # RS256 + RSAAlgorithm(384), # RS384 + RSAAlgorithm(512), # RS512 + ECAlgorithm(256), # ES256 + ECAlgorithm(384), # ES384 + ECAlgorithm(512), # ES512 + RSAPSSAlgorithm(256), # PS256 + RSAPSSAlgorithm(384), # PS384 + RSAPSSAlgorithm(512), # PS512 +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/_cryptography_backends/_keys.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/_cryptography_backends/_keys.py new file mode 100644 index 000000000..786a49d41 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/_cryptography_backends/_keys.py @@ -0,0 +1,306 @@ +from cryptography.hazmat.primitives.serialization import ( + Encoding, PrivateFormat, PublicFormat, + BestAvailableEncryption, NoEncryption, +) +from cryptography.hazmat.primitives.asymmetric import rsa +from cryptography.hazmat.primitives.asymmetric.rsa import ( + RSAPublicKey, RSAPrivateKeyWithSerialization, + RSAPrivateNumbers, RSAPublicNumbers, + rsa_recover_prime_factors, rsa_crt_dmp1, rsa_crt_dmq1, rsa_crt_iqmp +) +from cryptography.hazmat.primitives.asymmetric import ec +from cryptography.hazmat.primitives.asymmetric.ec import ( + EllipticCurvePublicKey, EllipticCurvePrivateKeyWithSerialization, + EllipticCurvePrivateNumbers, EllipticCurvePublicNumbers, + SECP256R1, SECP384R1, SECP521R1, +) +from cryptography.hazmat.backends import default_backend +from authlib.jose.rfc7517 import Key, load_pem_key +from authlib.common.encoding import to_bytes +from authlib.common.encoding import base64_to_int, int_to_base64 + + +class RSAKey(Key): + """Key class of the ``RSA`` key type.""" + + kty = 'RSA' + RAW_KEY_CLS = (RSAPublicKey, RSAPrivateKeyWithSerialization) + REQUIRED_JSON_FIELDS = ['e', 'n'] + + def as_pem(self, is_private=False, password=None): + """Export key into PEM format bytes. + + :param is_private: export private key or public key + :param password: encrypt private key with password + :return: bytes + """ + return export_key(self, is_private=is_private, password=password) + + @staticmethod + def dumps_private_key(raw_key): + numbers = raw_key.private_numbers() + return { + 'n': int_to_base64(numbers.public_numbers.n), + 'e': int_to_base64(numbers.public_numbers.e), + 'd': int_to_base64(numbers.d), + 'p': int_to_base64(numbers.p), + 'q': int_to_base64(numbers.q), + 'dp': int_to_base64(numbers.dmp1), + 'dq': int_to_base64(numbers.dmq1), + 'qi': int_to_base64(numbers.iqmp) + } + + @staticmethod + def dumps_public_key(raw_key): + numbers = raw_key.public_numbers() + return { + 'n': int_to_base64(numbers.n), + 'e': int_to_base64(numbers.e) + } + + @staticmethod + def loads_private_key(obj): + if 'oth' in obj: # pragma: no cover + # https://tools.ietf.org/html/rfc7518#section-6.3.2.7 + raise ValueError('"oth" is not supported yet') + + props = ['p', 'q', 'dp', 'dq', 'qi'] + props_found = [prop in obj for prop in props] + any_props_found = any(props_found) + + if any_props_found and not all(props_found): + raise ValueError( + 'RSA key must include all parameters ' + 'if any are present besides d') + + public_numbers = RSAPublicNumbers( + base64_to_int(obj['e']), base64_to_int(obj['n'])) + + if any_props_found: + numbers = RSAPrivateNumbers( + d=base64_to_int(obj['d']), + p=base64_to_int(obj['p']), + q=base64_to_int(obj['q']), + dmp1=base64_to_int(obj['dp']), + dmq1=base64_to_int(obj['dq']), + iqmp=base64_to_int(obj['qi']), + public_numbers=public_numbers) + else: + d = base64_to_int(obj['d']) + p, q = rsa_recover_prime_factors( + public_numbers.n, d, public_numbers.e) + numbers = RSAPrivateNumbers( + d=d, + p=p, + q=q, + dmp1=rsa_crt_dmp1(d, p), + dmq1=rsa_crt_dmq1(d, q), + iqmp=rsa_crt_iqmp(p, q), + public_numbers=public_numbers) + + return numbers.private_key(default_backend()) + + @staticmethod + def loads_public_key(obj): + numbers = RSAPublicNumbers( + base64_to_int(obj['e']), + base64_to_int(obj['n']) + ) + return numbers.public_key(default_backend()) + + @classmethod + def import_key(cls, raw, options=None): + """Import a key from PEM or dict data.""" + return import_key( + cls, raw, + RSAPublicKey, RSAPrivateKeyWithSerialization, + b'ssh-rsa', options + ) + + @classmethod + def generate_key(cls, key_size=2048, options=None, is_private=False): + if key_size < 512: + raise ValueError('key_size must not be less than 512') + if key_size % 8 != 0: + raise ValueError('Invalid key_size for RSAKey') + raw_key = rsa.generate_private_key( + public_exponent=65537, + key_size=key_size, + backend=default_backend(), + ) + if not is_private: + raw_key = raw_key.public_key() + return cls.import_key(raw_key, options=options) + + +class ECKey(Key): + """Key class of the ``EC`` key type.""" + + kty = 'EC' + DSS_CURVES = { + 'P-256': SECP256R1, + 'P-384': SECP384R1, + 'P-521': SECP521R1, + } + CURVES_DSS = { + SECP256R1.name: 'P-256', + SECP384R1.name: 'P-384', + SECP521R1.name: 'P-521', + } + REQUIRED_JSON_FIELDS = ['crv', 'x', 'y'] + RAW_KEY_CLS = (EllipticCurvePublicKey, EllipticCurvePrivateKeyWithSerialization) + + def as_pem(self, is_private=False, password=None): + """Export key into PEM format bytes. + + :param is_private: export private key or public key + :param password: encrypt private key with password + :return: bytes + """ + return export_key(self, is_private=is_private, password=password) + + def exchange_shared_key(self, pubkey): + # # used in ECDHAlgorithm + if isinstance(self.raw_key, EllipticCurvePrivateKeyWithSerialization): + return self.raw_key.exchange(ec.ECDH(), pubkey) + raise ValueError('Invalid key for exchanging shared key') + + @property + def curve_key_size(self): + return self.raw_key.curve.key_size + + @classmethod + def loads_private_key(cls, obj): + curve = cls.DSS_CURVES[obj['crv']]() + public_numbers = EllipticCurvePublicNumbers( + base64_to_int(obj['x']), + base64_to_int(obj['y']), + curve, + ) + private_numbers = EllipticCurvePrivateNumbers( + base64_to_int(obj['d']), + public_numbers + ) + return private_numbers.private_key(default_backend()) + + @classmethod + def loads_public_key(cls, obj): + curve = cls.DSS_CURVES[obj['crv']]() + public_numbers = EllipticCurvePublicNumbers( + base64_to_int(obj['x']), + base64_to_int(obj['y']), + curve, + ) + return public_numbers.public_key(default_backend()) + + @classmethod + def dumps_private_key(cls, raw_key): + numbers = raw_key.private_numbers() + return { + 'crv': cls.CURVES_DSS[raw_key.curve.name], + 'x': int_to_base64(numbers.public_numbers.x), + 'y': int_to_base64(numbers.public_numbers.y), + 'd': int_to_base64(numbers.private_value), + } + + @classmethod + def dumps_public_key(cls, raw_key): + numbers = raw_key.public_numbers() + return { + 'crv': cls.CURVES_DSS[numbers.curve.name], + 'x': int_to_base64(numbers.x), + 'y': int_to_base64(numbers.y) + } + + @classmethod + def import_key(cls, raw, options=None): + """Import a key from PEM or dict data.""" + return import_key( + cls, raw, + EllipticCurvePublicKey, EllipticCurvePrivateKeyWithSerialization, + b'ecdsa-sha2-', options + ) + + @classmethod + def generate_key(cls, crv='P-256', options=None, is_private=False): + if crv not in cls.DSS_CURVES: + raise ValueError('Invalid crv value: "{}"'.format(crv)) + raw_key = ec.generate_private_key( + curve=cls.DSS_CURVES[crv](), + backend=default_backend(), + ) + if not is_private: + raw_key = raw_key.public_key() + return cls.import_key(raw_key, options=options) + + +def import_key(cls, raw, public_key_cls, private_key_cls, ssh_type=None, options=None): + if isinstance(raw, cls): + if options is not None: + raw.update(options) + return raw + + payload = None + if isinstance(raw, (public_key_cls, private_key_cls)): + raw_key = raw + elif isinstance(raw, dict): + cls.check_required_fields(raw) + payload = raw + if 'd' in payload: + raw_key = cls.loads_private_key(payload) + else: + raw_key = cls.loads_public_key(payload) + else: + if options is not None: + password = options.get('password') + else: + password = None + raw_key = load_pem_key(raw, ssh_type, password=password) + + if isinstance(raw_key, private_key_cls): + if payload is None: + payload = cls.dumps_private_key(raw_key) + key_type = 'private' + elif isinstance(raw_key, public_key_cls): + if payload is None: + payload = cls.dumps_public_key(raw_key) + key_type = 'public' + else: + raise ValueError('Invalid data for importing key') + + obj = cls(payload) + obj.raw_key = raw_key + obj.key_type = key_type + return obj + + +def export_key(key, encoding=None, is_private=False, password=None): + if encoding is None or encoding == 'PEM': + encoding = Encoding.PEM + elif encoding == 'DER': + encoding = Encoding.DER + else: + raise ValueError('Invalid encoding: {!r}'.format(encoding)) + + if is_private: + if key.key_type == 'private': + if password is None: + encryption_algorithm = NoEncryption() + else: + encryption_algorithm = BestAvailableEncryption(to_bytes(password)) + return key.raw_key.private_bytes( + encoding=encoding, + format=PrivateFormat.PKCS8, + encryption_algorithm=encryption_algorithm, + ) + raise ValueError('This is a public key') + + if key.key_type == 'private': + raw_key = key.raw_key.public_key() + else: + raw_key = key.raw_key + + return raw_key.public_bytes( + encoding=encoding, + format=PublicFormat.SubjectPublicKeyInfo, + ) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/jwe_algorithms.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/jwe_algorithms.py new file mode 100644 index 000000000..1e5dc961f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/jwe_algorithms.py @@ -0,0 +1,50 @@ +import zlib +from .oct_key import OctKey +from ._cryptography_backends import JWE_ALG_ALGORITHMS, JWE_ENC_ALGORITHMS +from ..rfc7516 import JWEAlgorithm, JWEZipAlgorithm, JsonWebEncryption + + +class DirectAlgorithm(JWEAlgorithm): + name = 'dir' + description = 'Direct use of a shared symmetric key' + + def prepare_key(self, raw_data): + return OctKey.import_key(raw_data) + + def wrap(self, enc_alg, headers, key): + cek = key.get_op_key('encrypt') + if len(cek) * 8 != enc_alg.CEK_SIZE: + raise ValueError('Invalid "cek" length') + return {'ek': b'', 'cek': cek} + + def unwrap(self, enc_alg, ek, headers, key): + cek = key.get_op_key('decrypt') + if len(cek) * 8 != enc_alg.CEK_SIZE: + raise ValueError('Invalid "cek" length') + return cek + + +class DeflateZipAlgorithm(JWEZipAlgorithm): + name = 'DEF' + description = 'DEFLATE' + + def compress(self, s): + """Compress bytes data with DEFLATE algorithm.""" + data = zlib.compress(s) + # drop gzip headers and tail + return data[2:-4] + + def decompress(self, s): + """Decompress DEFLATE bytes data.""" + return zlib.decompress(s, -zlib.MAX_WBITS) + + +def register_jwe_rfc7518(): + JsonWebEncryption.register_algorithm(DirectAlgorithm()) + JsonWebEncryption.register_algorithm(DeflateZipAlgorithm()) + + for algorithm in JWE_ALG_ALGORITHMS: + JsonWebEncryption.register_algorithm(algorithm) + + for algorithm in JWE_ENC_ALGORITHMS: + JsonWebEncryption.register_algorithm(algorithm) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/jws_algorithms.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/jws_algorithms.py new file mode 100644 index 000000000..637298554 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/jws_algorithms.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +""" + authlib.jose.rfc7518.jws_algorithms + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + "alg" (Algorithm) Header Parameter Values for JWS per `Section 3`_. + + .. _`Section 3`: https://tools.ietf.org/html/rfc7518#section-3 +""" + +import hmac +import hashlib +from .oct_key import OctKey +from ._cryptography_backends import JWS_ALGORITHMS +from ..rfc7515 import JWSAlgorithm, JsonWebSignature + + +class NoneAlgorithm(JWSAlgorithm): + name = 'none' + description = 'No digital signature or MAC performed' + + def prepare_key(self, raw_data): + return None + + def sign(self, msg, key): + return b'' + + def verify(self, msg, sig, key): + return False + + +class HMACAlgorithm(JWSAlgorithm): + """HMAC using SHA algorithms for JWS. Available algorithms: + + - HS256: HMAC using SHA-256 + - HS384: HMAC using SHA-384 + - HS512: HMAC using SHA-512 + """ + SHA256 = hashlib.sha256 + SHA384 = hashlib.sha384 + SHA512 = hashlib.sha512 + + def __init__(self, sha_type): + self.name = 'HS{}'.format(sha_type) + self.description = 'HMAC using SHA-{}'.format(sha_type) + self.hash_alg = getattr(self, 'SHA{}'.format(sha_type)) + + def prepare_key(self, raw_data): + return OctKey.import_key(raw_data) + + def sign(self, msg, key): + # it is faster than the one in cryptography + op_key = key.get_op_key('sign') + return hmac.new(op_key, msg, self.hash_alg).digest() + + def verify(self, msg, sig, key): + op_key = key.get_op_key('verify') + v_sig = hmac.new(op_key, msg, self.hash_alg).digest() + return hmac.compare_digest(sig, v_sig) + + +def register_jws_rfc7518(): + JsonWebSignature.register_algorithm(NoneAlgorithm()) + JsonWebSignature.register_algorithm(HMACAlgorithm(256)) + JsonWebSignature.register_algorithm(HMACAlgorithm(384)) + JsonWebSignature.register_algorithm(HMACAlgorithm(512)) + for algorithm in JWS_ALGORITHMS: + JsonWebSignature.register_algorithm(algorithm) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/oct_key.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/oct_key.py new file mode 100644 index 000000000..a095ada43 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/oct_key.py @@ -0,0 +1,48 @@ +from authlib.common.encoding import ( + to_bytes, to_unicode, + urlsafe_b64encode, urlsafe_b64decode, +) +from authlib.common.security import generate_token +from authlib.jose.rfc7517 import Key + + +class OctKey(Key): + """Key class of the ``oct`` key type.""" + + kty = 'oct' + REQUIRED_JSON_FIELDS = ['k'] + + def get_op_key(self, key_op): + self.check_key_op(key_op) + return self.raw_key + + @classmethod + def import_key(cls, raw, options=None): + """Import a key from bytes, string, or dict data.""" + if isinstance(raw, dict): + cls.check_required_fields(raw) + payload = raw + raw_key = urlsafe_b64decode(to_bytes(payload['k'])) + else: + raw_key = to_bytes(raw) + k = to_unicode(urlsafe_b64encode(raw_key)) + payload = {'k': k} + + if options is not None: + payload.update(options) + + obj = cls(payload) + obj.raw_key = raw_key + obj.key_type = 'secret' + return obj + + @classmethod + def generate_key(cls, key_size=256, options=None, is_private=False): + """Generate a ``OctKey`` with the given bit size.""" + if not is_private: + raise ValueError('oct key can not be generated as public') + + if key_size % 8 != 0: + raise ValueError('Invalid bit size for oct key') + + return cls.import_key(generate_token(key_size // 8), options) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/util.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/util.py new file mode 100644 index 000000000..d2d13ec1f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7518/util.py @@ -0,0 +1,12 @@ +import binascii + + +def encode_int(num, bits): + length = ((bits + 7) // 8) * 2 + padded_hex = '%0*x' % (length, num) + big_endian = binascii.a2b_hex(padded_hex.encode('ascii')) + return big_endian + + +def decode_int(b): + return int(binascii.b2a_hex(b), 16) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7519/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7519/__init__.py new file mode 100644 index 000000000..b98efc941 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7519/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +""" + authlib.jose.rfc7519 + ~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + JSON Web Token (JWT). + + https://tools.ietf.org/html/rfc7519 +""" + +from .jwt import JsonWebToken +from .claims import BaseClaims, JWTClaims + + +__all__ = ['JsonWebToken', 'BaseClaims', 'JWTClaims'] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7519/claims.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7519/claims.py new file mode 100644 index 000000000..71908d5b7 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7519/claims.py @@ -0,0 +1,214 @@ +import time +from authlib.jose.errors import ( + MissingClaimError, + InvalidClaimError, + ExpiredTokenError, + InvalidTokenError, +) + + +class BaseClaims(dict): + """Payload claims for JWT, which contains a validate interface. + + :param payload: the payload dict of JWT + :param header: the header dict of JWT + :param options: validate options + :param params: other params + + An example on ``options`` parameter, the format is inspired by + `OpenID Connect Claims`_:: + + { + "iss": { + "essential": True, + "values": ["https://example.com", "https://example.org"] + }, + "sub": { + "essential": True + "value": "248289761001" + }, + "jti": { + "validate": validate_jti + } + } + + .. _`OpenID Connect Claims`: + http://openid.net/specs/openid-connect-core-1_0.html#IndividualClaimsRequests + """ + REGISTERED_CLAIMS = [] + + def __init__(self, payload, header, options=None, params=None): + super(BaseClaims, self).__init__(payload) + self.header = header + self.options = options or {} + self.params = params or {} + + def __getattr__(self, key): + try: + return object.__getattribute__(self, key) + except AttributeError as error: + if key in self.REGISTERED_CLAIMS: + return self.get(key) + raise error + + def _validate_essential_claims(self): + for k in self.options: + if self.options[k].get('essential') and k not in self: + raise MissingClaimError(k) + + def _validate_claim_value(self, claim_name): + option = self.options.get(claim_name) + if not option: + return + + value = self.get(claim_name) + option_value = option.get('value') + if option_value and value != option_value: + raise InvalidClaimError(claim_name) + + option_values = option.get('values') + if option_values and value not in option_values: + raise InvalidClaimError(claim_name) + + validate = option.get('validate') + if validate and not validate(self, value): + raise InvalidClaimError(claim_name) + + def get_registered_claims(self): + rv = {} + for k in self.REGISTERED_CLAIMS: + if k in self: + rv[k] = self[k] + return rv + + +class JWTClaims(BaseClaims): + REGISTERED_CLAIMS = ['iss', 'sub', 'aud', 'exp', 'nbf', 'iat', 'jti'] + + def validate(self, now=None, leeway=0): + """Validate everything in claims payload.""" + self._validate_essential_claims() + + if now is None: + now = int(time.time()) + + self.validate_iss() + self.validate_sub() + self.validate_aud() + self.validate_exp(now, leeway) + self.validate_nbf(now, leeway) + self.validate_iat(now, leeway) + self.validate_jti() + + def validate_iss(self): + """The "iss" (issuer) claim identifies the principal that issued the + JWT. The processing of this claim is generally application specific. + The "iss" value is a case-sensitive string containing a StringOrURI + value. Use of this claim is OPTIONAL. + """ + self._validate_claim_value('iss') + + def validate_sub(self): + """The "sub" (subject) claim identifies the principal that is the + subject of the JWT. The claims in a JWT are normally statements + about the subject. The subject value MUST either be scoped to be + locally unique in the context of the issuer or be globally unique. + The processing of this claim is generally application specific. The + "sub" value is a case-sensitive string containing a StringOrURI + value. Use of this claim is OPTIONAL. + """ + self._validate_claim_value('sub') + + def validate_aud(self): + """The "aud" (audience) claim identifies the recipients that the JWT is + intended for. Each principal intended to process the JWT MUST + identify itself with a value in the audience claim. If the principal + processing the claim does not identify itself with a value in the + "aud" claim when this claim is present, then the JWT MUST be + rejected. In the general case, the "aud" value is an array of case- + sensitive strings, each containing a StringOrURI value. In the + special case when the JWT has one audience, the "aud" value MAY be a + single case-sensitive string containing a StringOrURI value. The + interpretation of audience values is generally application specific. + Use of this claim is OPTIONAL. + """ + aud_option = self.options.get('aud') + aud = self.get('aud') + if not aud_option or not aud: + return + + aud_values = aud_option.get('values') + if not aud_values: + aud_value = aud_option.get('value') + if aud_value: + aud_values = [aud_value] + + if not aud_values: + return + + if isinstance(self['aud'], list): + aud_list = self['aud'] + else: + aud_list = [self['aud']] + + if not any([v in aud_list for v in aud_values]): + raise InvalidClaimError('aud') + + def validate_exp(self, now, leeway): + """The "exp" (expiration time) claim identifies the expiration time on + or after which the JWT MUST NOT be accepted for processing. The + processing of the "exp" claim requires that the current date/time + MUST be before the expiration date/time listed in the "exp" claim. + Implementers MAY provide for some small leeway, usually no more than + a few minutes, to account for clock skew. Its value MUST be a number + containing a NumericDate value. Use of this claim is OPTIONAL. + """ + if 'exp' in self: + exp = self['exp'] + if not _validate_numeric_time(exp): + raise InvalidClaimError('exp') + if exp < (now - leeway): + raise ExpiredTokenError() + + def validate_nbf(self, now, leeway): + """The "nbf" (not before) claim identifies the time before which the JWT + MUST NOT be accepted for processing. The processing of the "nbf" + claim requires that the current date/time MUST be after or equal to + the not-before date/time listed in the "nbf" claim. Implementers MAY + provide for some small leeway, usually no more than a few minutes, to + account for clock skew. Its value MUST be a number containing a + NumericDate value. Use of this claim is OPTIONAL. + """ + if 'nbf' in self: + nbf = self['nbf'] + if not _validate_numeric_time(nbf): + raise InvalidClaimError('nbf') + if nbf > (now + leeway): + raise InvalidTokenError() + + def validate_iat(self, now, leeway): + """The "iat" (issued at) claim identifies the time at which the JWT was + issued. This claim can be used to determine the age of the JWT. Its + value MUST be a number containing a NumericDate value. Use of this + claim is OPTIONAL. + """ + if 'iat' in self: + iat = self['iat'] + if not _validate_numeric_time(iat): + raise InvalidClaimError('iat') + + def validate_jti(self): + """The "jti" (JWT ID) claim provides a unique identifier for the JWT. + The identifier value MUST be assigned in a manner that ensures that + there is a negligible probability that the same value will be + accidentally assigned to a different data object; if the application + uses multiple issuers, collisions MUST be prevented among values + produced by different issuers as well. The "jti" claim can be used + to prevent the JWT from being replayed. The "jti" value is a case- + sensitive string. Use of this claim is OPTIONAL. + """ + self._validate_claim_value('jti') + + +def _validate_numeric_time(s): + return isinstance(s, (int, float)) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7519/jwt.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7519/jwt.py new file mode 100644 index 000000000..18dc07b1e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc7519/jwt.py @@ -0,0 +1,138 @@ +import re +import datetime +import calendar +from authlib.common.encoding import ( + text_types, to_bytes, to_unicode, + json_loads, json_dumps, +) +from .claims import JWTClaims +from ..errors import DecodeError, InsecureClaimError +from ..rfc7515 import JsonWebSignature +from ..rfc7516 import JsonWebEncryption +from ..rfc7517 import KeySet + + +class JsonWebToken(object): + SENSITIVE_NAMES = ('password', 'token', 'secret', 'secret_key') + # Thanks to sentry SensitiveDataFilter + SENSITIVE_VALUES = re.compile(r'|'.join([ + # http://www.richardsramblings.com/regex/credit-card-numbers/ + r'\b(?:3[47]\d|(?:4\d|5[1-5]|65)\d{2}|6011)\d{12}\b', + # various private keys + r'-----BEGIN[A-Z ]+PRIVATE KEY-----.+-----END[A-Z ]+PRIVATE KEY-----', + # social security numbers (US) + r'^\b(?!(000|666|9))\d{3}-(?!00)\d{2}-(?!0000)\d{4}\b', + ]), re.DOTALL) + + def __init__(self, algorithms=None, private_headers=None): + self._jws = JsonWebSignature(algorithms, private_headers=private_headers) + self._jwe = JsonWebEncryption(algorithms, private_headers=private_headers) + + def check_sensitive_data(self, payload): + """Check if payload contains sensitive information.""" + for k in payload: + # check claims key name + if k in self.SENSITIVE_NAMES: + raise InsecureClaimError(k) + + # check claims values + v = payload[k] + if isinstance(v, text_types) and self.SENSITIVE_VALUES.search(v): + raise InsecureClaimError(k) + + def encode(self, header, payload, key, check=True): + """Encode a JWT with the given header, payload and key. + + :param header: A dict of JWS header + :param payload: A dict to be encoded + :param key: key used to sign the signature + :param check: check if sensitive data in payload + :return: bytes + """ + header['typ'] = 'JWT' + + for k in ['exp', 'iat', 'nbf']: + # convert datetime into timestamp + claim = payload.get(k) + if isinstance(claim, datetime.datetime): + payload[k] = calendar.timegm(claim.utctimetuple()) + + if check: + self.check_sensitive_data(payload) + + key = prepare_raw_key(key, header) + if callable(key): + key = key(header, payload) + + text = to_bytes(json_dumps(payload)) + if 'enc' in header: + return self._jwe.serialize_compact(header, text, key) + else: + return self._jws.serialize_compact(header, text, key) + + def decode(self, s, key, claims_cls=None, + claims_options=None, claims_params=None): + """Decode the JWS with the given key. This is similar with + :meth:`verify`, except that it will raise BadSignatureError when + signature doesn't match. + + :param s: text of JWT + :param key: key used to verify the signature + :param claims_cls: class to be used for JWT claims + :param claims_options: `options` parameters for claims_cls + :param claims_params: `params` parameters for claims_cls + :return: claims_cls instance + :raise: BadSignatureError + """ + if claims_cls is None: + claims_cls = JWTClaims + + def load_key(header, payload): + key_func = prepare_raw_key(key, header) + if callable(key_func): + return key_func(header, payload) + return key_func + + s = to_bytes(s) + dot_count = s.count(b'.') + if dot_count == 2: + data = self._jws.deserialize_compact(s, load_key, decode_payload) + elif dot_count == 4: + data = self._jwe.deserialize_compact(s, load_key, decode_payload) + else: + raise DecodeError('Invalid input segments length') + return claims_cls( + data['payload'], data['header'], + options=claims_options, + params=claims_params, + ) + + +def decode_payload(bytes_payload): + try: + payload = json_loads(to_unicode(bytes_payload)) + except ValueError: + raise DecodeError('Invalid payload value') + if not isinstance(payload, dict): + raise DecodeError('Invalid payload type') + return payload + + +def prepare_raw_key(raw, headers): + if isinstance(raw, KeySet): + return raw.find_by_kid(headers.get('kid')) + + if isinstance(raw, text_types) and \ + raw.startswith('{') and raw.endswith('}'): + raw = json_loads(raw) + elif isinstance(raw, (tuple, list)): + raw = {'keys': raw} + + if isinstance(raw, dict) and 'keys' in raw: + keys = raw['keys'] + kid = headers.get('kid') + for k in keys: + if k.get('kid') == kid: + return k + raise ValueError('Invalid JSON Web Key Set') + return raw diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc8037/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc8037/__init__.py new file mode 100644 index 000000000..46a6831ed --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc8037/__init__.py @@ -0,0 +1,5 @@ +from .okp_key import OKPKey +from ._jws_cryptography import register_jws_rfc8037 + + +__all__ = ['register_jws_rfc8037', 'OKPKey'] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc8037/_jws_cryptography.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc8037/_jws_cryptography.py new file mode 100644 index 000000000..86989fd5c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc8037/_jws_cryptography.py @@ -0,0 +1,32 @@ +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.primitives.asymmetric.ed25519 import ( + Ed25519PublicKey, Ed25519PrivateKey +) +from authlib.jose.rfc7515 import JWSAlgorithm, JsonWebSignature +from .okp_key import OKPKey + + +class EdDSAAlgorithm(JWSAlgorithm): + name = 'EdDSA' + description = 'Edwards-curve Digital Signature Algorithm for JWS' + private_key_cls = Ed25519PrivateKey + public_key_cls = Ed25519PublicKey + + def prepare_key(self, raw_data): + return OKPKey.import_key(raw_data) + + def sign(self, msg, key): + op_key = key.get_op_key('sign') + return op_key.sign(msg) + + def verify(self, msg, sig, key): + op_key = key.get_op_key('verify') + try: + op_key.verify(sig, msg) + return True + except InvalidSignature: + return False + + +def register_jws_rfc8037(): + JsonWebSignature.register_algorithm(EdDSAAlgorithm()) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc8037/okp_key.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc8037/okp_key.py new file mode 100644 index 000000000..d8438b3bf --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/rfc8037/okp_key.py @@ -0,0 +1,130 @@ +from cryptography.hazmat.primitives.asymmetric.ed25519 import ( + Ed25519PublicKey, Ed25519PrivateKey +) +from cryptography.hazmat.primitives.asymmetric.ed448 import ( + Ed448PublicKey, Ed448PrivateKey +) +from cryptography.hazmat.primitives.asymmetric.x25519 import ( + X25519PublicKey, X25519PrivateKey +) +from cryptography.hazmat.primitives.asymmetric.x448 import ( + X448PublicKey, X448PrivateKey +) +from cryptography.hazmat.primitives.serialization import ( + Encoding, PublicFormat, PrivateFormat, NoEncryption +) +from authlib.common.encoding import ( + to_unicode, to_bytes, + urlsafe_b64decode, urlsafe_b64encode, +) +from authlib.jose.rfc7517 import Key +from ..rfc7518 import import_key, export_key + + +PUBLIC_KEYS_MAP = { + 'Ed25519': Ed25519PublicKey, + 'Ed448': Ed448PublicKey, + 'X25519': X25519PublicKey, + 'X448': X448PublicKey, +} +PRIVATE_KEYS_MAP = { + 'Ed25519': Ed25519PrivateKey, + 'Ed448': Ed448PrivateKey, + 'X25519': X25519PrivateKey, + 'X448': X448PrivateKey, +} +PUBLIC_KEY_TUPLE = tuple(PUBLIC_KEYS_MAP.values()) +PRIVATE_KEY_TUPLE = tuple(PRIVATE_KEYS_MAP.values()) + + +class OKPKey(Key): + """Key class of the ``OKP`` key type.""" + + kty = 'OKP' + REQUIRED_JSON_FIELDS = ['crv', 'x'] + RAW_KEY_CLS = ( + Ed25519PublicKey, Ed25519PrivateKey, + Ed448PublicKey, Ed448PrivateKey, + X25519PublicKey, X25519PrivateKey, + X448PublicKey, X448PrivateKey, + ) + + def as_pem(self, is_private=False, password=None): + """Export key into PEM format bytes. + + :param is_private: export private key or public key + :param password: encrypt private key with password + :return: bytes + """ + return export_key(self, is_private=is_private, password=password) + + def exchange_shared_key(self, pubkey): + # used in ECDHAlgorithm + if isinstance(self.raw_key, (X25519PrivateKey, X448PrivateKey)): + return self.raw_key.exchange(pubkey) + raise ValueError('Invalid key for exchanging shared key') + + @property + def curve_key_size(self): + raise NotImplementedError() + + @staticmethod + def get_key_curve(key): + if isinstance(key, (Ed25519PublicKey, Ed25519PrivateKey)): + return 'Ed25519' + elif isinstance(key, (Ed448PublicKey, Ed448PrivateKey)): + return 'Ed448' + elif isinstance(key, (X25519PublicKey, X25519PrivateKey)): + return 'X25519' + elif isinstance(key, (X448PublicKey, X448PrivateKey)): + return 'X448' + + @staticmethod + def loads_private_key(obj): + crv_key = PRIVATE_KEYS_MAP[obj['crv']] + d_bytes = urlsafe_b64decode(to_bytes(obj['d'])) + return crv_key.from_private_bytes(d_bytes) + + @staticmethod + def loads_public_key(obj): + crv_key = PUBLIC_KEYS_MAP[obj['crv']] + x_bytes = urlsafe_b64decode(to_bytes(obj['x'])) + return crv_key.from_public_bytes(x_bytes) + + @staticmethod + def dumps_private_key(raw_key): + obj = OKPKey.dumps_public_key(raw_key.public_key()) + d_bytes = raw_key.private_bytes( + Encoding.Raw, + PrivateFormat.Raw, + NoEncryption() + ) + obj['d'] = to_unicode(urlsafe_b64encode(d_bytes)) + return obj + + @staticmethod + def dumps_public_key(raw_key): + x_bytes = raw_key.public_bytes(Encoding.Raw, PublicFormat.Raw) + return { + 'crv': OKPKey.get_key_curve(raw_key), + 'x': to_unicode(urlsafe_b64encode(x_bytes)), + } + + @classmethod + def import_key(cls, raw, options=None): + """Import a key from PEM or dict data.""" + return import_key( + cls, raw, + PUBLIC_KEY_TUPLE, PRIVATE_KEY_TUPLE, + b'ssh-ed25519', options + ) + + @classmethod + def generate_key(cls, crv='Ed25519', options=None, is_private=False): + if crv not in PRIVATE_KEYS_MAP: + raise ValueError('Invalid crv value: "{}"'.format(crv)) + private_key_cls = PRIVATE_KEYS_MAP[crv] + raw_key = private_key_cls.generate() + if not is_private: + raw_key = raw_key.public_key() + return cls.import_key(raw_key, options=options) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/util.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/util.py new file mode 100644 index 000000000..08414cb96 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/jose/util.py @@ -0,0 +1,23 @@ +import binascii +from authlib.common.encoding import urlsafe_b64decode, json_loads + + +def extract_header(header_segment, error_cls): + header_data = extract_segment(header_segment, error_cls, 'header') + + try: + header = json_loads(header_data.decode('utf-8')) + except ValueError as e: + raise error_cls('Invalid header string: {}'.format(e)) + + if not isinstance(header, dict): + raise error_cls('Header must be a json object') + return header + + +def extract_segment(segment, error_cls, name='payload'): + try: + return urlsafe_b64decode(segment) + except (TypeError, binascii.Error): + msg = 'Invalid {} padding'.format(name) + raise error_cls(msg) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/__init__.py new file mode 100644 index 000000000..af1ba0792 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/__init__.py @@ -0,0 +1,36 @@ +# coding: utf-8 + +from .rfc5849 import ( + OAuth1Request, + ClientAuth, + SIGNATURE_HMAC_SHA1, + SIGNATURE_RSA_SHA1, + SIGNATURE_PLAINTEXT, + SIGNATURE_TYPE_HEADER, + SIGNATURE_TYPE_QUERY, + SIGNATURE_TYPE_BODY, + ClientMixin, + TemporaryCredentialMixin, + TokenCredentialMixin, + TemporaryCredential, + AuthorizationServer, + ResourceProtector, +) + +__all__ = [ + 'OAuth1Request', + 'ClientAuth', + 'SIGNATURE_HMAC_SHA1', + 'SIGNATURE_RSA_SHA1', + 'SIGNATURE_PLAINTEXT', + 'SIGNATURE_TYPE_HEADER', + 'SIGNATURE_TYPE_QUERY', + 'SIGNATURE_TYPE_BODY', + + 'ClientMixin', + 'TemporaryCredentialMixin', + 'TokenCredentialMixin', + 'TemporaryCredential', + 'AuthorizationServer', + 'ResourceProtector', +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/client.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/client.py new file mode 100644 index 000000000..7715711b5 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/client.py @@ -0,0 +1,189 @@ +# -*- coding: utf-8 -*- +from authlib.common.urls import ( + url_decode, + add_params_to_uri, + urlparse, +) +from authlib.common.encoding import json_loads +from .rfc5849 import ( + SIGNATURE_HMAC_SHA1, + SIGNATURE_TYPE_HEADER, + ClientAuth, +) + + +class OAuth1Client(object): + auth_class = ClientAuth + + def __init__(self, session, client_id, client_secret=None, + token=None, token_secret=None, + redirect_uri=None, rsa_key=None, verifier=None, + signature_method=SIGNATURE_HMAC_SHA1, + signature_type=SIGNATURE_TYPE_HEADER, + force_include_body=False, **kwargs): + if not client_id: + raise ValueError('Missing "client_id"') + + self.session = session + self.auth = self.auth_class( + client_id, client_secret=client_secret, + token=token, token_secret=token_secret, + redirect_uri=redirect_uri, + signature_method=signature_method, + signature_type=signature_type, + rsa_key=rsa_key, + verifier=verifier, + force_include_body=force_include_body + ) + self._kwargs = kwargs + + @property + def redirect_uri(self): + return self.auth.redirect_uri + + @redirect_uri.setter + def redirect_uri(self, uri): + self.auth.redirect_uri = uri + + @property + def token(self): + return dict( + oauth_token=self.auth.token, + oauth_token_secret=self.auth.token_secret, + oauth_verifier=self.auth.verifier + ) + + @token.setter + def token(self, token): + """This token setter is designed for an easy integration for + OAuthClient. Make sure both OAuth1Session and OAuth2Session + have token setters. + """ + if token is None: + self.auth.token = None + self.auth.token_secret = None + self.auth.verifier = None + elif 'oauth_token' in token: + self.auth.token = token['oauth_token'] + if 'oauth_token_secret' in token: + self.auth.token_secret = token['oauth_token_secret'] + if 'oauth_verifier' in token: + self.auth.verifier = token['oauth_verifier'] + else: + message = 'oauth_token is missing: {!r}'.format(token) + self.handle_error('missing_token', message) + + def create_authorization_url(self, url, request_token=None, **kwargs): + """Create an authorization URL by appending request_token and optional + kwargs to url. + + This is the second step in the OAuth 1 workflow. The user should be + redirected to this authorization URL, grant access to you, and then + be redirected back to you. The redirection back can either be specified + during client registration or by supplying a callback URI per request. + + :param url: The authorization endpoint URL. + :param request_token: The previously obtained request token. + :param kwargs: Optional parameters to append to the URL. + :returns: The authorization URL with new parameters embedded. + """ + kwargs['oauth_token'] = request_token or self.auth.token + if self.auth.redirect_uri: + kwargs['oauth_callback'] = self.auth.redirect_uri + + self.auth.redirect_uri = None + self.auth.realm = None + return add_params_to_uri(url, kwargs.items()) + + def fetch_request_token(self, url, realm=None, **kwargs): + """Method for fetching an access token from the token endpoint. + + This is the first step in the OAuth 1 workflow. A request token is + obtained by making a signed post request to url. The token is then + parsed from the application/x-www-form-urlencoded response and ready + to be used to construct an authorization url. + + :param url: Request Token endpoint. + :param realm: A string/list/tuple of realm for Authorization header. + :param kwargs: Extra parameters to include for fetching token. + :return: A Request Token dict. + + Note, ``realm`` can also be configured when session created:: + + session = OAuth1Session(client_id, client_secret, ..., realm='') + """ + if realm is None: + realm = self._kwargs.get('realm', None) + if realm: + if isinstance(realm, (tuple, list)): + realm = ' '.join(realm) + self.auth.realm = realm + else: + self.auth.realm = None + + return self._fetch_token(url, **kwargs) + + def fetch_access_token(self, url, verifier=None, **kwargs): + """Method for fetching an access token from the token endpoint. + + This is the final step in the OAuth 1 workflow. An access token is + obtained using all previously obtained credentials, including the + verifier from the authorization step. + + :param url: Access Token endpoint. + :param verifier: A verifier string to prove authorization was granted. + :param kwargs: Extra parameters to include for fetching access token. + :return: A token dict. + """ + if verifier: + self.auth.verifier = verifier + if not self.auth.verifier: + self.handle_error('missing_verifier', 'Missing "verifier" value') + return self._fetch_token(url, **kwargs) + + def parse_authorization_response(self, url): + """Extract parameters from the post authorization redirect + response URL. + + :param url: The full URL that resulted from the user being redirected + back from the OAuth provider to you, the client. + :returns: A dict of parameters extracted from the URL. + """ + token = dict(url_decode(urlparse.urlparse(url).query)) + self.token = token + return token + + def _fetch_token(self, url, **kwargs): + resp = self.session.post(url, auth=self.auth, **kwargs) + token = self.parse_response_token(resp.status_code, resp.text) + self.token = token + self.auth.verifier = None + return token + + def parse_response_token(self, status_code, text): + if status_code >= 400: + message = ( + "Token request failed with code {}, " + "response was '{}'." + ).format(status_code, text) + self.handle_error('fetch_token_denied', message) + + try: + text = text.strip() + if text.startswith('{'): + token = json_loads(text) + else: + token = dict(url_decode(text)) + except (TypeError, ValueError) as e: + error = ( + "Unable to decode token from token response. " + "This is commonly caused by an unsuccessful request where" + " a non urlencoded error message is returned. " + "The decoding error was {}" + ).format(e) + raise ValueError(error) + return token + + @staticmethod + def handle_error(error_type, error_description): + raise ValueError('{}: {}'.format(error_type, error_description)) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/errors.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/errors.py new file mode 100644 index 000000000..e7770da53 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/errors.py @@ -0,0 +1,3 @@ +# flake8: noqa + +from .rfc5849.errors import * diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/__init__.py new file mode 100644 index 000000000..1f029fbbe --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/__init__.py @@ -0,0 +1,45 @@ +""" + authlib.oauth1.rfc5849 + ~~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of The OAuth 1.0 Protocol. + + https://tools.ietf.org/html/rfc5849 +""" + +from .wrapper import OAuth1Request +from .client_auth import ClientAuth +from .signature import ( + SIGNATURE_HMAC_SHA1, + SIGNATURE_RSA_SHA1, + SIGNATURE_PLAINTEXT, + SIGNATURE_TYPE_HEADER, + SIGNATURE_TYPE_QUERY, + SIGNATURE_TYPE_BODY, +) +from .models import ( + ClientMixin, + TemporaryCredentialMixin, + TokenCredentialMixin, + TemporaryCredential, +) +from .authorization_server import AuthorizationServer +from .resource_protector import ResourceProtector + +__all__ = [ + 'OAuth1Request', + 'ClientAuth', + 'SIGNATURE_HMAC_SHA1', + 'SIGNATURE_RSA_SHA1', + 'SIGNATURE_PLAINTEXT', + 'SIGNATURE_TYPE_HEADER', + 'SIGNATURE_TYPE_QUERY', + 'SIGNATURE_TYPE_BODY', + + 'ClientMixin', + 'TemporaryCredentialMixin', + 'TokenCredentialMixin', + 'TemporaryCredential', + 'AuthorizationServer', + 'ResourceProtector', +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/authorization_server.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/authorization_server.py new file mode 100644 index 000000000..be9b985be --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/authorization_server.py @@ -0,0 +1,357 @@ +from authlib.common.urls import is_valid_url, add_params_to_uri +from .base_server import BaseServer +from .errors import ( + OAuth1Error, + InvalidRequestError, + MissingRequiredParameterError, + InvalidClientError, + InvalidTokenError, + AccessDeniedError, + MethodNotAllowedError, +) + + +class AuthorizationServer(BaseServer): + TOKEN_RESPONSE_HEADER = [ + ('Content-Type', 'application/x-www-form-urlencoded'), + ('Cache-Control', 'no-store'), + ('Pragma', 'no-cache'), + ] + + TEMPORARY_CREDENTIALS_METHOD = 'POST' + + def _get_client(self, request): + client = self.get_client_by_id(request.client_id) + request.client = client + return client + + def create_oauth1_request(self, request): + raise NotImplementedError() + + def handle_response(self, status_code, payload, headers): + raise NotImplementedError() + + def handle_error_response(self, error): + return self.handle_response( + error.status_code, + error.get_body(), + error.get_headers() + ) + + def validate_temporary_credentials_request(self, request): + """Validate HTTP request for temporary credentials.""" + + # The client obtains a set of temporary credentials from the server by + # making an authenticated (Section 3) HTTP "POST" request to the + # Temporary Credential Request endpoint (unless the server advertises + # another HTTP request method for the client to use). + if request.method.upper() != self.TEMPORARY_CREDENTIALS_METHOD: + raise MethodNotAllowedError() + + # REQUIRED parameter + if not request.client_id: + raise MissingRequiredParameterError('oauth_consumer_key') + + # REQUIRED parameter + oauth_callback = request.redirect_uri + if not request.redirect_uri: + raise MissingRequiredParameterError('oauth_callback') + + # An absolute URI or + # other means (the parameter value MUST be set to "oob" + if oauth_callback != 'oob' and not is_valid_url(oauth_callback): + raise InvalidRequestError('Invalid "oauth_callback" value') + + client = self._get_client(request) + if not client: + raise InvalidClientError() + + self.validate_timestamp_and_nonce(request) + self.validate_oauth_signature(request) + return request + + def create_temporary_credentials_response(self, request=None): + """Validate temporary credentials token request and create response + for temporary credentials token. Assume the endpoint of temporary + credentials request is ``https://photos.example.net/initiate``: + + .. code-block:: http + + POST /initiate HTTP/1.1 + Host: photos.example.net + Authorization: OAuth realm="Photos", + oauth_consumer_key="dpf43f3p2l4k3l03", + oauth_signature_method="HMAC-SHA1", + oauth_timestamp="137131200", + oauth_nonce="wIjqoS", + oauth_callback="http%3A%2F%2Fprinter.example.com%2Fready", + oauth_signature="74KNZJeDHnMBp0EMJ9ZHt%2FXKycU%3D" + + The server validates the request and replies with a set of temporary + credentials in the body of the HTTP response: + + .. code-block:: http + + HTTP/1.1 200 OK + Content-Type: application/x-www-form-urlencoded + + oauth_token=hh5s93j4hdidpola&oauth_token_secret=hdhd0244k9j7ao03& + oauth_callback_confirmed=true + + :param request: OAuth1Request instance. + :returns: (status_code, body, headers) + """ + try: + request = self.create_oauth1_request(request) + self.validate_temporary_credentials_request(request) + except OAuth1Error as error: + return self.handle_error_response(error) + + credential = self.create_temporary_credential(request) + payload = [ + ('oauth_token', credential.get_oauth_token()), + ('oauth_token_secret', credential.get_oauth_token_secret()), + ('oauth_callback_confirmed', True) + ] + return self.handle_response(200, payload, self.TOKEN_RESPONSE_HEADER) + + def validate_authorization_request(self, request): + """Validate the request for resource owner authorization.""" + if not request.token: + raise MissingRequiredParameterError('oauth_token') + + credential = self.get_temporary_credential(request) + if not credential: + raise InvalidTokenError() + + # assign credential for later use + request.credential = credential + return request + + def create_authorization_response(self, request, grant_user=None): + """Validate authorization request and create authorization response. + Assume the endpoint for authorization request is + ``https://photos.example.net/authorize``, the client redirects Jane's + user-agent to the server's Resource Owner Authorization endpoint to + obtain Jane's approval for accessing her private photos:: + + https://photos.example.net/authorize?oauth_token=hh5s93j4hdidpola + + The server requests Jane to sign in using her username and password + and if successful, asks her to approve granting 'printer.example.com' + access to her private photos. Jane approves the request and her + user-agent is redirected to the callback URI provided by the client + in the previous request (line breaks are for display purposes only):: + + http://printer.example.com/ready? + oauth_token=hh5s93j4hdidpola&oauth_verifier=hfdp7dh39dks9884 + + :param request: OAuth1Request instance. + :param grant_user: if granted, pass the grant user, otherwise None. + :returns: (status_code, body, headers) + """ + request = self.create_oauth1_request(request) + # authorize endpoint should try catch this error + self.validate_authorization_request(request) + + temporary_credentials = request.credential + redirect_uri = temporary_credentials.get_redirect_uri() + if not redirect_uri or redirect_uri == 'oob': + client_id = temporary_credentials.get_client_id() + client = self.get_client_by_id(client_id) + redirect_uri = client.get_default_redirect_uri() + + if grant_user is None: + error = AccessDeniedError() + location = add_params_to_uri(redirect_uri, error.get_body()) + return self.handle_response(302, '', [('Location', location)]) + + request.user = grant_user + verifier = self.create_authorization_verifier(request) + + params = [ + ('oauth_token', request.token), + ('oauth_verifier', verifier) + ] + location = add_params_to_uri(redirect_uri, params) + return self.handle_response(302, '', [('Location', location)]) + + def validate_token_request(self, request): + """Validate request for issuing token.""" + + if not request.client_id: + raise MissingRequiredParameterError('oauth_consumer_key') + + client = self._get_client(request) + if not client: + raise InvalidClientError() + + if not request.token: + raise MissingRequiredParameterError('oauth_token') + + token = self.get_temporary_credential(request) + if not token: + raise InvalidTokenError() + + verifier = request.oauth_params.get('oauth_verifier') + if not verifier: + raise MissingRequiredParameterError('oauth_verifier') + + if not token.check_verifier(verifier): + raise InvalidRequestError('Invalid "oauth_verifier"') + + request.credential = token + self.validate_timestamp_and_nonce(request) + self.validate_oauth_signature(request) + return request + + def create_token_response(self, request): + """Validate token request and create token response. Assuming the + endpoint of token request is ``https://photos.example.net/token``, + the callback request informs the client that Jane completed the + authorization process. The client then requests a set of token + credentials using its temporary credentials (over a secure Transport + Layer Security (TLS) channel): + + .. code-block:: http + + POST /token HTTP/1.1 + Host: photos.example.net + Authorization: OAuth realm="Photos", + oauth_consumer_key="dpf43f3p2l4k3l03", + oauth_token="hh5s93j4hdidpola", + oauth_signature_method="HMAC-SHA1", + oauth_timestamp="137131201", + oauth_nonce="walatlh", + oauth_verifier="hfdp7dh39dks9884", + oauth_signature="gKgrFCywp7rO0OXSjdot%2FIHF7IU%3D" + + The server validates the request and replies with a set of token + credentials in the body of the HTTP response: + + .. code-block:: http + + HTTP/1.1 200 OK + Content-Type: application/x-www-form-urlencoded + + oauth_token=nnch734d00sl2jdk&oauth_token_secret=pfkkdhi9sl3r4s00 + + :param request: OAuth1Request instance. + :returns: (status_code, body, headers) + """ + try: + request = self.create_oauth1_request(request) + except OAuth1Error as error: + return self.handle_error_response(error) + + try: + self.validate_token_request(request) + except OAuth1Error as error: + self.delete_temporary_credential(request) + return self.handle_error_response(error) + + credential = self.create_token_credential(request) + payload = [ + ('oauth_token', credential.get_oauth_token()), + ('oauth_token_secret', credential.get_oauth_token_secret()), + ] + self.delete_temporary_credential(request) + return self.handle_response(200, payload, self.TOKEN_RESPONSE_HEADER) + + def create_temporary_credential(self, request): + """Generate and save a temporary credential into database or cache. + A temporary credential is used for exchanging token credential. This + method should be re-implemented:: + + def create_temporary_credential(self, request): + oauth_token = generate_token(36) + oauth_token_secret = generate_token(48) + temporary_credential = TemporaryCredential( + oauth_token=oauth_token, + oauth_token_secret=oauth_token_secret, + client_id=request.client_id, + redirect_uri=request.redirect_uri, + ) + # if the credential has a save method + temporary_credential.save() + return temporary_credential + + :param request: OAuth1Request instance + :return: TemporaryCredential instance + """ + raise NotImplementedError() + + def get_temporary_credential(self, request): + """Get the temporary credential from database or cache. A temporary + credential should share the same methods as described in models of + ``TemporaryCredentialMixin``:: + + def get_temporary_credential(self, request): + key = 'a-key-prefix:{}'.format(request.token) + data = cache.get(key) + # TemporaryCredential shares methods from TemporaryCredentialMixin + return TemporaryCredential(data) + + :param request: OAuth1Request instance + :return: TemporaryCredential instance + """ + raise NotImplementedError() + + def delete_temporary_credential(self, request): + """Delete temporary credential from database or cache. For instance, + if temporary credential is saved in cache:: + + def delete_temporary_credential(self, request): + key = 'a-key-prefix:{}'.format(request.token) + cache.delete(key) + + :param request: OAuth1Request instance + """ + raise NotImplementedError() + + def create_authorization_verifier(self, request): + """Create and bind ``oauth_verifier`` to temporary credential. It + could be re-implemented in this way:: + + def create_authorization_verifier(self, request): + verifier = generate_token(36) + + temporary_credential = request.credential + user_id = request.user.get_user_id() + + temporary_credential.user_id = user_id + temporary_credential.oauth_verifier = verifier + # if the credential has a save method + temporary_credential.save() + + # remember to return the verifier + return verifier + + :param request: OAuth1Request instance + :return: A string of ``oauth_verifier`` + """ + raise NotImplementedError() + + def create_token_credential(self, request): + """Create and save token credential into database. This method would + be re-implemented like this:: + + def create_token_credential(self, request): + oauth_token = generate_token(36) + oauth_token_secret = generate_token(48) + temporary_credential = request.credential + + token_credential = TokenCredential( + oauth_token=oauth_token, + oauth_token_secret=oauth_token_secret, + client_id=temporary_credential.get_client_id(), + user_id=temporary_credential.get_user_id() + ) + # if the credential has a save method + token_credential.save() + return token_credential + + :param request: OAuth1Request instance + :return: TokenCredential instance + """ + raise NotImplementedError() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/base_server.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/base_server.py new file mode 100644 index 000000000..46898bb20 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/base_server.py @@ -0,0 +1,119 @@ +import time +from .signature import ( + SIGNATURE_HMAC_SHA1, + SIGNATURE_PLAINTEXT, + SIGNATURE_RSA_SHA1, +) +from .signature import ( + verify_hmac_sha1, + verify_plaintext, + verify_rsa_sha1, +) +from .errors import ( + InvalidRequestError, + MissingRequiredParameterError, + UnsupportedSignatureMethodError, + InvalidNonceError, + InvalidSignatureError, +) + + +class BaseServer(object): + SIGNATURE_METHODS = { + SIGNATURE_HMAC_SHA1: verify_hmac_sha1, + SIGNATURE_RSA_SHA1: verify_rsa_sha1, + SIGNATURE_PLAINTEXT: verify_plaintext, + } + SUPPORTED_SIGNATURE_METHODS = [SIGNATURE_HMAC_SHA1] + EXPIRY_TIME = 300 + + @classmethod + def register_signature_method(cls, name, verify): + """Extend signature method verification. + + :param name: A string to represent signature method. + :param verify: A function to verify signature. + + The ``verify`` method accept ``OAuth1Request`` as parameter:: + + def verify_custom_method(request): + # verify this request, return True or False + return True + + Server.register_signature_method('custom-name', verify_custom_method) + """ + cls.SIGNATURE_METHODS[name] = verify + + def validate_timestamp_and_nonce(self, request): + """Validate ``oauth_timestamp`` and ``oauth_nonce`` in HTTP request. + + :param request: OAuth1Request instance + """ + timestamp = request.oauth_params.get('oauth_timestamp') + nonce = request.oauth_params.get('oauth_nonce') + + if request.signature_method == SIGNATURE_PLAINTEXT: + # The parameters MAY be omitted when using the "PLAINTEXT" + # signature method + if not timestamp and not nonce: + return + + if not timestamp: + raise MissingRequiredParameterError('oauth_timestamp') + + try: + # The timestamp value MUST be a positive integer + timestamp = int(timestamp) + if timestamp < 0: + raise InvalidRequestError('Invalid "oauth_timestamp" value') + + if self.EXPIRY_TIME and time.time() - timestamp > self.EXPIRY_TIME: + raise InvalidRequestError('Invalid "oauth_timestamp" value') + except (ValueError, TypeError): + raise InvalidRequestError('Invalid "oauth_timestamp" value') + + if not nonce: + raise MissingRequiredParameterError('oauth_nonce') + + if self.exists_nonce(nonce, request): + raise InvalidNonceError() + + def validate_oauth_signature(self, request): + """Validate ``oauth_signature`` from HTTP request. + + :param request: OAuth1Request instance + """ + method = request.signature_method + if not method: + raise MissingRequiredParameterError('oauth_signature_method') + + if method not in self.SUPPORTED_SIGNATURE_METHODS: + raise UnsupportedSignatureMethodError() + + if not request.signature: + raise MissingRequiredParameterError('oauth_signature') + + verify = self.SIGNATURE_METHODS.get(method) + if not verify: + raise UnsupportedSignatureMethodError() + + if not verify(request): + raise InvalidSignatureError() + + def get_client_by_id(self, client_id): + """Get client instance with the given ``client_id``. + + :param client_id: A string of client_id + :return: Client instance + """ + raise NotImplementedError() + + def exists_nonce(self, nonce, request): + """The nonce value MUST be unique across all requests with the same + timestamp, client credentials, and token combinations. + + :param nonce: A string value of ``oauth_nonce`` + :param request: OAuth1Request instance + :return: Boolean + """ + raise NotImplementedError() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/client_auth.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/client_auth.py new file mode 100644 index 000000000..504a3523e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/client_auth.py @@ -0,0 +1,184 @@ +import time +from authlib.common.security import generate_token +from authlib.common.urls import extract_params +from authlib.common.encoding import to_native +from .wrapper import OAuth1Request +from .signature import ( + SIGNATURE_HMAC_SHA1, + SIGNATURE_PLAINTEXT, + SIGNATURE_RSA_SHA1, + SIGNATURE_TYPE_HEADER, + SIGNATURE_TYPE_BODY, + SIGNATURE_TYPE_QUERY, +) +from .signature import ( + sign_hmac_sha1, + sign_rsa_sha1, + sign_plaintext +) +from .parameters import ( + prepare_form_encoded_body, + prepare_headers, + prepare_request_uri_query, +) + + +CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded' +CONTENT_TYPE_MULTI_PART = 'multipart/form-data' + + +class ClientAuth(object): + SIGNATURE_METHODS = { + SIGNATURE_HMAC_SHA1: sign_hmac_sha1, + SIGNATURE_RSA_SHA1: sign_rsa_sha1, + SIGNATURE_PLAINTEXT: sign_plaintext, + } + + @classmethod + def register_signature_method(cls, name, sign): + """Extend client signature methods. + + :param name: A string to represent signature method. + :param sign: A function to generate signature. + + The ``sign`` method accept 2 parameters:: + + def custom_sign_method(client, request): + # client is the instance of Client. + return 'your-signed-string' + + Client.register_signature_method('custom-name', custom_sign_method) + """ + cls.SIGNATURE_METHODS[name] = sign + + def __init__(self, client_id, client_secret=None, + token=None, token_secret=None, + redirect_uri=None, rsa_key=None, verifier=None, + signature_method=SIGNATURE_HMAC_SHA1, + signature_type=SIGNATURE_TYPE_HEADER, + realm=None, force_include_body=False): + self.client_id = client_id + self.client_secret = client_secret + self.token = token + self.token_secret = token_secret + self.redirect_uri = redirect_uri + self.signature_method = signature_method + self.signature_type = signature_type + self.rsa_key = rsa_key + self.verifier = verifier + self.realm = realm + self.force_include_body = force_include_body + + def get_oauth_signature(self, method, uri, headers, body): + """Get an OAuth signature to be used in signing a request + + To satisfy `section 3.4.1.2`_ item 2, if the request argument's + headers dict attribute contains a Host item, its value will + replace any netloc part of the request argument's uri attribute + value. + + .. _`section 3.4.1.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.2 + """ + sign = self.SIGNATURE_METHODS.get(self.signature_method) + if not sign: + raise ValueError('Invalid signature method.') + + request = OAuth1Request(method, uri, body=body, headers=headers) + return sign(self, request) + + def get_oauth_params(self, nonce, timestamp): + oauth_params = [ + ('oauth_nonce', nonce), + ('oauth_timestamp', timestamp), + ('oauth_version', '1.0'), + ('oauth_signature_method', self.signature_method), + ('oauth_consumer_key', self.client_id), + ] + if self.token: + oauth_params.append(('oauth_token', self.token)) + if self.redirect_uri: + oauth_params.append(('oauth_callback', self.redirect_uri)) + if self.verifier: + oauth_params.append(('oauth_verifier', self.verifier)) + return oauth_params + + def _render(self, uri, headers, body, oauth_params): + if self.signature_type == SIGNATURE_TYPE_HEADER: + headers = prepare_headers(oauth_params, headers, realm=self.realm) + elif self.signature_type == SIGNATURE_TYPE_BODY: + if CONTENT_TYPE_FORM_URLENCODED in headers.get('Content-Type', ''): + decoded_body = extract_params(body) or [] + body = prepare_form_encoded_body(oauth_params, decoded_body) + headers['Content-Type'] = CONTENT_TYPE_FORM_URLENCODED + elif self.signature_type == SIGNATURE_TYPE_QUERY: + uri = prepare_request_uri_query(oauth_params, uri) + else: + raise ValueError('Unknown signature type specified.') + return uri, headers, body + + def sign(self, method, uri, headers, body, nonce=None, timestamp=None): + """Sign the HTTP request, add OAuth parameters and signature. + + :param method: HTTP method of the request. + :param uri: URI of the HTTP request. + :param body: Body payload of the HTTP request. + :param headers: Headers of the HTTP request. + :param nonce: A string to represent nonce value. If not configured, + this method will generate one for you. + :param timestamp: Current timestamp. If not configured, this method + will generate one for you. + :return: uri, headers, body + """ + if nonce is None: + nonce = generate_nonce() + if timestamp is None: + timestamp = generate_timestamp() + if body is None: + body = '' + + # transform int to str + timestamp = str(timestamp) + + if headers is None: + headers = {} + + oauth_params = self.get_oauth_params(nonce, timestamp) + uri, headers, body = self._render(uri, headers, body, oauth_params) + + sig = self.get_oauth_signature(method, uri, headers, body) + oauth_params.append(('oauth_signature', sig)) + + uri, headers, body = self._render(uri, headers, body, oauth_params) + return uri, headers, body + + def prepare(self, method, uri, headers, body): + """Add OAuth parameters to the request. + + Parameters may be included from the body if the content-type is + urlencoded, if no content type is set, a guess is made. + """ + content_type = to_native(headers.get('Content-Type', '')) + if self.signature_type == SIGNATURE_TYPE_BODY: + content_type = CONTENT_TYPE_FORM_URLENCODED + elif not content_type and extract_params(body): + content_type = CONTENT_TYPE_FORM_URLENCODED + + if CONTENT_TYPE_FORM_URLENCODED in content_type: + headers['Content-Type'] = CONTENT_TYPE_FORM_URLENCODED + uri, headers, body = self.sign(method, uri, headers, body) + elif self.force_include_body: + # To allow custom clients to work on non form encoded bodies. + uri, headers, body = self.sign(method, uri, headers, body) + else: + # Omit body data in the signing of non form-encoded requests + uri, headers, _ = self.sign(method, uri, headers, '') + body = '' + return uri, headers, body + + +def generate_nonce(): + return generate_token() + + +def generate_timestamp(): + return str(int(time.time())) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/errors.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/errors.py new file mode 100644 index 000000000..14918331f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/errors.py @@ -0,0 +1,100 @@ +""" + authlib.oauth1.rfc5849.errors + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + RFC5849 has no definition on errors. This module is designed by + Authlib based on OAuth 1.0a `Section 10`_ with some changes. + + .. _`Section 10`: https://oauth.net/core/1.0a/#rfc.section.10 +""" +from authlib.common.errors import AuthlibHTTPError +from authlib.common.security import is_secure_transport + + +class OAuth1Error(AuthlibHTTPError): + def __init__(self, description=None, uri=None, status_code=None): + super(OAuth1Error, self).__init__(None, description, uri, status_code) + + def get_headers(self): + """Get a list of headers.""" + return [ + ('Content-Type', 'application/x-www-form-urlencoded'), + ('Cache-Control', 'no-store'), + ('Pragma', 'no-cache') + ] + + +class InsecureTransportError(OAuth1Error): + error = 'insecure_transport' + + def get_error_description(self): + return self.gettext('OAuth 2 MUST utilize https.') + + @classmethod + def check(cls, uri): + if not is_secure_transport(uri): + raise cls() + + +class InvalidRequestError(OAuth1Error): + error = 'invalid_request' + + +class UnsupportedParameterError(OAuth1Error): + error = 'unsupported_parameter' + + +class UnsupportedSignatureMethodError(OAuth1Error): + error = 'unsupported_signature_method' + + +class MissingRequiredParameterError(OAuth1Error): + error = 'missing_required_parameter' + + def __init__(self, key): + super(MissingRequiredParameterError, self).__init__() + self._key = key + + def get_error_description(self): + return self.gettext( + 'missing "%(key)s" in parameters') % dict(key=self._key) + + +class DuplicatedOAuthProtocolParameterError(OAuth1Error): + error = 'duplicated_oauth_protocol_parameter' + + +class InvalidClientError(OAuth1Error): + error = 'invalid_client' + status_code = 401 + + +class InvalidTokenError(OAuth1Error): + error = 'invalid_token' + status_code = 401 + + def get_error_description(self): + return self.gettext('Invalid or expired "oauth_token" in parameters') + + +class InvalidSignatureError(OAuth1Error): + error = 'invalid_signature' + status_code = 401 + + +class InvalidNonceError(OAuth1Error): + error = 'invalid_nonce' + status_code = 401 + + +class AccessDeniedError(OAuth1Error): + error = 'access_denied' + + def get_error_description(self): + return self.gettext( + 'The resource owner or authorization server denied the request') + + +class MethodNotAllowedError(OAuth1Error): + error = 'method_not_allowed' + status_code = 405 diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/models.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/models.py new file mode 100644 index 000000000..76befe9d3 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/models.py @@ -0,0 +1,109 @@ + +class ClientMixin(object): + def get_default_redirect_uri(self): + """A method to get client default redirect_uri. For instance, the + database table for client has a column called ``default_redirect_uri``:: + + def get_default_redirect_uri(self): + return self.default_redirect_uri + + :return: A URL string + """ + raise NotImplementedError() + + def get_client_secret(self): + """A method to return the client_secret of this client. For instance, + the database table has a column called ``client_secret``:: + + def get_client_secret(self): + return self.client_secret + """ + raise NotImplementedError() + + def get_rsa_public_key(self): + """A method to get the RSA public key for RSA-SHA1 signature method. + For instance, the value is saved on column ``rsa_public_key``:: + + def get_rsa_public_key(self): + return self.rsa_public_key + """ + raise NotImplementedError() + + +class TokenCredentialMixin(object): + def get_oauth_token(self): + """A method to get the value of ``oauth_token``. For instance, the + database table has a column called ``oauth_token``:: + + def get_oauth_token(self): + return self.oauth_token + + :return: A string + """ + raise NotImplementedError() + + def get_oauth_token_secret(self): + """A method to get the value of ``oauth_token_secret``. For instance, + the database table has a column called ``oauth_token_secret``:: + + def get_oauth_token_secret(self): + return self.oauth_token_secret + + :return: A string + """ + raise NotImplementedError() + + +class TemporaryCredentialMixin(TokenCredentialMixin): + def get_client_id(self): + """A method to get the client_id associated with this credential. + For instance, the table in the database has a column ``client_id``:: + + def get_client_id(self): + return self.client_id + """ + raise NotImplementedError() + + def get_redirect_uri(self): + """A method to get temporary credential's ``oauth_callback``. + For instance, the database table for temporary credential has a + column called ``oauth_callback``:: + + def get_redirect_uri(self): + return self.oauth_callback + + :return: A URL string + """ + raise NotImplementedError() + + def check_verifier(self, verifier): + """A method to check if the given verifier matches this temporary + credential. For instance that this temporary credential has recorded + the value in database as column ``oauth_verifier``:: + + def check_verifier(self, verifier): + return self.oauth_verifier == verifier + + :return: Boolean + """ + raise NotImplementedError() + + +class TemporaryCredential(dict, TemporaryCredentialMixin): + def get_client_id(self): + return self.get('client_id') + + def get_user_id(self): + return self.get('user_id') + + def get_redirect_uri(self): + return self.get('oauth_callback') + + def check_verifier(self, verifier): + return self.get('oauth_verifier') == verifier + + def get_oauth_token(self): + return self.get('oauth_token') + + def get_oauth_token_secret(self): + return self.get('oauth_token_secret') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/parameters.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/parameters.py new file mode 100644 index 000000000..4746aeaad --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/parameters.py @@ -0,0 +1,103 @@ +# -*- coding: utf-8 -*- + +""" + authlib.spec.rfc5849.parameters + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + This module contains methods related to `section 3.5`_ of the OAuth 1.0a spec. + + .. _`section 3.5`: https://tools.ietf.org/html/rfc5849#section-3.5 +""" +from authlib.common.urls import urlparse, url_encode, extract_params +from .util import escape + + +def prepare_headers(oauth_params, headers=None, realm=None): + """**Prepare the Authorization header.** + Per `section 3.5.1`_ of the spec. + + Protocol parameters can be transmitted using the HTTP "Authorization" + header field as defined by `RFC2617`_ with the auth-scheme name set to + "OAuth" (case insensitive). + + For example:: + + Authorization: OAuth realm="Photos", + oauth_consumer_key="dpf43f3p2l4k3l03", + oauth_signature_method="HMAC-SHA1", + oauth_timestamp="137131200", + oauth_nonce="wIjqoS", + oauth_callback="http%3A%2F%2Fprinter.example.com%2Fready", + oauth_signature="74KNZJeDHnMBp0EMJ9ZHt%2FXKycU%3D", + oauth_version="1.0" + + .. _`section 3.5.1`: https://tools.ietf.org/html/rfc5849#section-3.5.1 + .. _`RFC2617`: https://tools.ietf.org/html/rfc2617 + """ + headers = headers or {} + + # step 1, 2, 3 in Section 3.5.1 + header_parameters = ', '.join([ + '{0}="{1}"'.format(escape(k), escape(v)) for k, v in oauth_params + if k.startswith('oauth_') + ]) + + # 4. The OPTIONAL "realm" parameter MAY be added and interpreted per + # `RFC2617 section 1.2`_. + # + # .. _`RFC2617 section 1.2`: https://tools.ietf.org/html/rfc2617#section-1.2 + if realm: + # NOTE: realm should *not* be escaped + header_parameters = 'realm="{}", '.format(realm) + header_parameters + + # the auth-scheme name set to "OAuth" (case insensitive). + headers['Authorization'] = 'OAuth {}'.format(header_parameters) + return headers + + +def _append_params(oauth_params, params): + """Append OAuth params to an existing set of parameters. + + Both params and oauth_params is must be lists of 2-tuples. + + Per `section 3.5.2`_ and `3.5.3`_ of the spec. + + .. _`section 3.5.2`: https://tools.ietf.org/html/rfc5849#section-3.5.2 + .. _`3.5.3`: https://tools.ietf.org/html/rfc5849#section-3.5.3 + + """ + merged = list(params) + merged.extend(oauth_params) + # The request URI / entity-body MAY include other request-specific + # parameters, in which case, the protocol parameters SHOULD be appended + # following the request-specific parameters, properly separated by an "&" + # character (ASCII code 38) + merged.sort(key=lambda i: i[0].startswith('oauth_')) + return merged + + +def prepare_form_encoded_body(oauth_params, body): + """Prepare the Form-Encoded Body. + + Per `section 3.5.2`_ of the spec. + + .. _`section 3.5.2`: https://tools.ietf.org/html/rfc5849#section-3.5.2 + + """ + # append OAuth params to the existing body + return url_encode(_append_params(oauth_params, body)) + + +def prepare_request_uri_query(oauth_params, uri): + """Prepare the Request URI Query. + + Per `section 3.5.3`_ of the spec. + + .. _`section 3.5.3`: https://tools.ietf.org/html/rfc5849#section-3.5.3 + + """ + # append OAuth params to the existing set of query components + sch, net, path, par, query, fra = urlparse.urlparse(uri) + query = url_encode( + _append_params(oauth_params, extract_params(query) or [])) + return urlparse.urlunparse((sch, net, path, par, query, fra)) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/resource_protector.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/resource_protector.py new file mode 100644 index 000000000..2b5d7819c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/resource_protector.py @@ -0,0 +1,41 @@ +from .base_server import BaseServer +from .wrapper import OAuth1Request +from .errors import ( + MissingRequiredParameterError, + InvalidClientError, + InvalidTokenError, +) + + +class ResourceProtector(BaseServer): + def validate_request(self, method, uri, body, headers): + request = OAuth1Request(method, uri, body, headers) + + if not request.client_id: + raise MissingRequiredParameterError('oauth_consumer_key') + + client = self.get_client_by_id(request.client_id) + if not client: + raise InvalidClientError() + request.client = client + + if not request.token: + raise MissingRequiredParameterError('oauth_token') + + token = self.get_token_credential(request) + if not token: + raise InvalidTokenError() + + request.credential = token + self.validate_timestamp_and_nonce(request) + self.validate_oauth_signature(request) + return request + + def get_token_credential(self, request): + """Fetch the token credential from data store like a database, + framework should implement this function. + + :param request: OAuth1Request instance + :return: Token model instance + """ + raise NotImplementedError() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/rsa.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/rsa.py new file mode 100644 index 000000000..3785b0f79 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/rsa.py @@ -0,0 +1,29 @@ +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.serialization import ( + load_pem_private_key, load_pem_public_key +) +from cryptography.hazmat.primitives.asymmetric import padding +from cryptography.exceptions import InvalidSignature +from authlib.common.encoding import to_bytes + + +def sign_sha1(msg, rsa_private_key): + key = load_pem_private_key( + to_bytes(rsa_private_key), + password=None, + backend=default_backend() + ) + return key.sign(msg, padding.PKCS1v15(), hashes.SHA1()) + + +def verify_sha1(sig, msg, rsa_public_key): + key = load_pem_public_key( + to_bytes(rsa_public_key), + backend=default_backend() + ) + try: + key.verify(sig, msg, padding.PKCS1v15(), hashes.SHA1()) + return True + except InvalidSignature: + return False diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/signature.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/signature.py new file mode 100644 index 000000000..6ba67e2d5 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/signature.py @@ -0,0 +1,387 @@ +# -*- coding: utf-8 -*- +""" + authlib.oauth1.rfc5849.signature + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of `section 3.4`_ of the spec. + + .. _`section 3.4`: https://tools.ietf.org/html/rfc5849#section-3.4 +""" +import binascii +import hashlib +import hmac +from authlib.common.urls import urlparse +from authlib.common.encoding import to_unicode, to_bytes +from .util import escape, unescape + +SIGNATURE_HMAC_SHA1 = "HMAC-SHA1" +SIGNATURE_RSA_SHA1 = "RSA-SHA1" +SIGNATURE_PLAINTEXT = "PLAINTEXT" + +SIGNATURE_TYPE_HEADER = 'HEADER' +SIGNATURE_TYPE_QUERY = 'QUERY' +SIGNATURE_TYPE_BODY = 'BODY' + + +def construct_base_string(method, uri, params, host=None): + """Generate signature base string from request, per `Section 3.4.1`_. + + For example, the HTTP request:: + + POST /request?b5=%3D%253D&a3=a&c%40=&a2=r%20b HTTP/1.1 + Host: example.com + Content-Type: application/x-www-form-urlencoded + Authorization: OAuth realm="Example", + oauth_consumer_key="9djdj82h48djs9d2", + oauth_token="kkk9d7dh3k39sjv7", + oauth_signature_method="HMAC-SHA1", + oauth_timestamp="137131201", + oauth_nonce="7d8f3e4a", + oauth_signature="bYT5CMsGcbgUdFHObYMEfcx6bsw%3D" + + c2&a3=2+q + + is represented by the following signature base string (line breaks + are for display purposes only):: + + POST&http%3A%2F%2Fexample.com%2Frequest&a2%3Dr%2520b%26a3%3D2%2520q + %26a3%3Da%26b5%3D%253D%25253D%26c%2540%3D%26c2%3D%26oauth_consumer_ + key%3D9djdj82h48djs9d2%26oauth_nonce%3D7d8f3e4a%26oauth_signature_m + ethod%3DHMAC-SHA1%26oauth_timestamp%3D137131201%26oauth_token%3Dkkk + 9d7dh3k39sjv7 + + .. _`Section 3.4.1`: https://tools.ietf.org/html/rfc5849#section-3.4.1 + """ + + # Create base string URI per Section 3.4.1.2 + base_string_uri = normalize_base_string_uri(uri, host) + + # Cleanup parameter sources per Section 3.4.1.3.1 + unescaped_params = [] + for k, v in params: + # The "oauth_signature" parameter MUST be excluded from the signature + if k in ('oauth_signature', 'realm'): + continue + + # ensure oauth params are unescaped + if k.startswith('oauth_'): + v = unescape(v) + unescaped_params.append((k, v)) + + # Normalize parameters per Section 3.4.1.3.2 + normalized_params = normalize_parameters(unescaped_params) + + # construct base string + return '&'.join([ + escape(method.upper()), + escape(base_string_uri), + escape(normalized_params), + ]) + + +def normalize_base_string_uri(uri, host=None): + """Normalize Base String URI per `Section 3.4.1.2`_. + + For example, the HTTP request:: + + GET /r%20v/X?id=123 HTTP/1.1 + Host: EXAMPLE.COM:80 + + is represented by the base string URI: "http://example.com/r%20v/X". + + In another example, the HTTPS request:: + + GET /?q=1 HTTP/1.1 + Host: www.example.net:8080 + + is represented by the base string URI: "https://www.example.net:8080/". + + .. _`Section 3.4.1.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.2 + + The host argument overrides the netloc part of the uri argument. + """ + uri = to_unicode(uri) + scheme, netloc, path, params, query, fragment = urlparse.urlparse(uri) + + # The scheme, authority, and path of the request resource URI `RFC3986` + # are included by constructing an "http" or "https" URI representing + # the request resource (without the query or fragment) as follows: + # + # .. _`RFC3986`: https://tools.ietf.org/html/rfc3986 + + if not scheme or not netloc: + raise ValueError('uri must include a scheme and netloc') + + # Per `RFC 2616 section 5.1.2`_: + # + # Note that the absolute path cannot be empty; if none is present in + # the original URI, it MUST be given as "/" (the server root). + # + # .. _`RFC 2616 section 5.1.2`: https://tools.ietf.org/html/rfc2616#section-5.1.2 + if not path: + path = '/' + + # 1. The scheme and host MUST be in lowercase. + scheme = scheme.lower() + netloc = netloc.lower() + + # 2. The host and port values MUST match the content of the HTTP + # request "Host" header field. + if host is not None: + netloc = host.lower() + + # 3. The port MUST be included if it is not the default port for the + # scheme, and MUST be excluded if it is the default. Specifically, + # the port MUST be excluded when making an HTTP request `RFC2616`_ + # to port 80 or when making an HTTPS request `RFC2818`_ to port 443. + # All other non-default port numbers MUST be included. + # + # .. _`RFC2616`: https://tools.ietf.org/html/rfc2616 + # .. _`RFC2818`: https://tools.ietf.org/html/rfc2818 + default_ports = ( + ('http', '80'), + ('https', '443'), + ) + if ':' in netloc: + host, port = netloc.split(':', 1) + if (scheme, port) in default_ports: + netloc = host + + return urlparse.urlunparse((scheme, netloc, path, params, '', '')) + + +def normalize_parameters(params): + """Normalize parameters per `Section 3.4.1.3.2`_. + + For example, the list of parameters from the previous section would + be normalized as follows: + + Encoded:: + + +------------------------+------------------+ + | Name | Value | + +------------------------+------------------+ + | b5 | %3D%253D | + | a3 | a | + | c%40 | | + | a2 | r%20b | + | oauth_consumer_key | 9djdj82h48djs9d2 | + | oauth_token | kkk9d7dh3k39sjv7 | + | oauth_signature_method | HMAC-SHA1 | + | oauth_timestamp | 137131201 | + | oauth_nonce | 7d8f3e4a | + | c2 | | + | a3 | 2%20q | + +------------------------+------------------+ + + Sorted:: + + +------------------------+------------------+ + | Name | Value | + +------------------------+------------------+ + | a2 | r%20b | + | a3 | 2%20q | + | a3 | a | + | b5 | %3D%253D | + | c%40 | | + | c2 | | + | oauth_consumer_key | 9djdj82h48djs9d2 | + | oauth_nonce | 7d8f3e4a | + | oauth_signature_method | HMAC-SHA1 | + | oauth_timestamp | 137131201 | + | oauth_token | kkk9d7dh3k39sjv7 | + +------------------------+------------------+ + + Concatenated Pairs:: + + +-------------------------------------+ + | Name=Value | + +-------------------------------------+ + | a2=r%20b | + | a3=2%20q | + | a3=a | + | b5=%3D%253D | + | c%40= | + | c2= | + | oauth_consumer_key=9djdj82h48djs9d2 | + | oauth_nonce=7d8f3e4a | + | oauth_signature_method=HMAC-SHA1 | + | oauth_timestamp=137131201 | + | oauth_token=kkk9d7dh3k39sjv7 | + +-------------------------------------+ + + and concatenated together into a single string (line breaks are for + display purposes only):: + + a2=r%20b&a3=2%20q&a3=a&b5=%3D%253D&c%40=&c2=&oauth_consumer_key=9dj + dj82h48djs9d2&oauth_nonce=7d8f3e4a&oauth_signature_method=HMAC-SHA1 + &oauth_timestamp=137131201&oauth_token=kkk9d7dh3k39sjv7 + + .. _`Section 3.4.1.3.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.3.2 + """ + + # 1. First, the name and value of each parameter are encoded + # (`Section 3.6`_). + # + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + key_values = [(escape(k), escape(v)) for k, v in params] + + # 2. The parameters are sorted by name, using ascending byte value + # ordering. If two or more parameters share the same name, they + # are sorted by their value. + key_values.sort() + + # 3. The name of each parameter is concatenated to its corresponding + # value using an "=" character (ASCII code 61) as a separator, even + # if the value is empty. + parameter_parts = ['{0}={1}'.format(k, v) for k, v in key_values] + + # 4. The sorted name/value pairs are concatenated together into a + # single string by using an "&" character (ASCII code 38) as + # separator. + return '&'.join(parameter_parts) + + +def generate_signature_base_string(request): + """Generate signature base string from request.""" + host = request.headers.get('Host', None) + return construct_base_string( + request.method, request.uri, request.params, host) + + +def hmac_sha1_signature(base_string, client_secret, token_secret): + """Generate signature via HMAC-SHA1 method, per `Section 3.4.2`_. + + The "HMAC-SHA1" signature method uses the HMAC-SHA1 signature + algorithm as defined in `RFC2104`_:: + + digest = HMAC-SHA1 (key, text) + + .. _`RFC2104`: https://tools.ietf.org/html/rfc2104 + .. _`Section 3.4.2`: https://tools.ietf.org/html/rfc5849#section-3.4.2 + """ + + # The HMAC-SHA1 function variables are used in following way: + + # text is set to the value of the signature base string from + # `Section 3.4.1.1`_. + # + # .. _`Section 3.4.1.1`: https://tools.ietf.org/html/rfc5849#section-3.4.1.1 + text = base_string + + # key is set to the concatenated values of: + # 1. The client shared-secret, after being encoded (`Section 3.6`_). + # + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + key = escape(client_secret or '') + + # 2. An "&" character (ASCII code 38), which MUST be included + # even when either secret is empty. + key += '&' + + # 3. The token shared-secret, after being encoded (`Section 3.6`_). + # + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + key += escape(token_secret or '') + + signature = hmac.new(to_bytes(key), to_bytes(text), hashlib.sha1) + + # digest is used to set the value of the "oauth_signature" protocol + # parameter, after the result octet string is base64-encoded + # per `RFC2045, Section 6.8`. + # + # .. _`RFC2045, Section 6.8`: https://tools.ietf.org/html/rfc2045#section-6.8 + sig = binascii.b2a_base64(signature.digest())[:-1] + return to_unicode(sig) + + +def rsa_sha1_signature(base_string, rsa_private_key): + """Generate signature via RSA-SHA1 method, per `Section 3.4.3`_. + + The "RSA-SHA1" signature method uses the RSASSA-PKCS1-v1_5 signature + algorithm as defined in `RFC3447, Section 8.2`_ (also known as + PKCS#1), using SHA-1 as the hash function for EMSA-PKCS1-v1_5. To + use this method, the client MUST have established client credentials + with the server that included its RSA public key (in a manner that is + beyond the scope of this specification). + + .. _`Section 3.4.3`: https://tools.ietf.org/html/rfc5849#section-3.4.3 + .. _`RFC3447, Section 8.2`: https://tools.ietf.org/html/rfc3447#section-8.2 + """ + from .rsa import sign_sha1 + base_string = to_bytes(base_string) + s = sign_sha1(to_bytes(base_string), rsa_private_key) + sig = binascii.b2a_base64(s)[:-1] + return to_unicode(sig) + + +def plaintext_signature(client_secret, token_secret): + """Generate signature via PLAINTEXT method, per `Section 3.4.4`_. + + The "PLAINTEXT" method does not employ a signature algorithm. It + MUST be used with a transport-layer mechanism such as TLS or SSL (or + sent over a secure channel with equivalent protections). It does not + utilize the signature base string or the "oauth_timestamp" and + "oauth_nonce" parameters. + + .. _`Section 3.4.4`: https://tools.ietf.org/html/rfc5849#section-3.4.4 + """ + + # The "oauth_signature" protocol parameter is set to the concatenated + # value of: + + # 1. The client shared-secret, after being encoded (`Section 3.6`_). + # + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + signature = escape(client_secret or '') + + # 2. An "&" character (ASCII code 38), which MUST be included even + # when either secret is empty. + signature += '&' + + # 3. The token shared-secret, after being encoded (`Section 3.6`_). + # + # .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6 + signature += escape(token_secret or '') + + return signature + + +def sign_hmac_sha1(client, request): + """Sign a HMAC-SHA1 signature.""" + base_string = generate_signature_base_string(request) + return hmac_sha1_signature( + base_string, client.client_secret, client.token_secret) + + +def sign_rsa_sha1(client, request): + """Sign a RSASSA-PKCS #1 v1.5 base64 encoded signature.""" + base_string = generate_signature_base_string(request) + return rsa_sha1_signature(base_string, client.rsa_key) + + +def sign_plaintext(client, request): + """Sign a PLAINTEXT signature.""" + return plaintext_signature(client.client_secret, client.token_secret) + + +def verify_hmac_sha1(request): + """Verify a HMAC-SHA1 signature.""" + base_string = generate_signature_base_string(request) + sig = hmac_sha1_signature( + base_string, request.client_secret, request.token_secret) + return hmac.compare_digest(sig, request.signature) + + +def verify_rsa_sha1(request): + """Verify a RSASSA-PKCS #1 v1.5 base64 encoded signature.""" + from .rsa import verify_sha1 + base_string = generate_signature_base_string(request) + sig = binascii.a2b_base64(to_bytes(request.signature)) + return verify_sha1(sig, to_bytes(base_string), request.rsa_public_key) + + +def verify_plaintext(request): + """Verify a PLAINTEXT signature.""" + sig = plaintext_signature(request.client_secret, request.token_secret) + return hmac.compare_digest(sig, request.signature) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/util.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/util.py new file mode 100644 index 000000000..9383e22ed --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/util.py @@ -0,0 +1,9 @@ +from authlib.common.urls import quote, unquote + + +def escape(s): + return quote(s, safe=b'~') + + +def unescape(s): + return unquote(s) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/wrapper.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/wrapper.py new file mode 100644 index 000000000..9f889a30c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth1/rfc5849/wrapper.py @@ -0,0 +1,129 @@ +from authlib.common.urls import ( + urlparse, extract_params, url_decode, + parse_http_list, parse_keqv_list, +) +from .signature import ( + SIGNATURE_TYPE_QUERY, + SIGNATURE_TYPE_BODY, + SIGNATURE_TYPE_HEADER +) +from .errors import ( + InsecureTransportError, + DuplicatedOAuthProtocolParameterError +) +from .util import unescape + + +class OAuth1Request(object): + def __init__(self, method, uri, body=None, headers=None): + InsecureTransportError.check(uri) + self.method = method + self.uri = uri + self.body = body + self.headers = headers or {} + + # states namespaces + self.client = None + self.credential = None + self.user = None + + self.query = urlparse.urlparse(uri).query + self.query_params = url_decode(self.query) + self.body_params = extract_params(body) or [] + + self.auth_params, self.realm = _parse_authorization_header(headers) + self.signature_type, self.oauth_params = _parse_oauth_params( + self.query_params, self.body_params, self.auth_params) + + params = [] + params.extend(self.query_params) + params.extend(self.body_params) + params.extend(self.auth_params) + self.params = params + + @property + def client_id(self): + return self.oauth_params.get('oauth_consumer_key') + + @property + def client_secret(self): + if self.client: + return self.client.get_client_secret() + + @property + def rsa_public_key(self): + if self.client: + return self.client.get_rsa_public_key() + + @property + def timestamp(self): + return self.oauth_params.get('oauth_timestamp') + + @property + def redirect_uri(self): + return self.oauth_params.get('oauth_callback') + + @property + def signature(self): + return self.oauth_params.get('oauth_signature') + + @property + def signature_method(self): + return self.oauth_params.get('oauth_signature_method') + + @property + def token(self): + return self.oauth_params.get('oauth_token') + + @property + def token_secret(self): + if self.credential: + return self.credential.get_oauth_token_secret() + + +def _filter_oauth(params): + for k, v in params: + if k.startswith('oauth_'): + yield (k, v) + + +def _parse_authorization_header(headers): + """Parse an OAuth authorization header into a list of 2-tuples""" + authorization_header = headers.get('Authorization') + if not authorization_header: + return [], None + + auth_scheme = 'oauth ' + if authorization_header.lower().startswith(auth_scheme): + items = parse_http_list(authorization_header[len(auth_scheme):]) + try: + items = parse_keqv_list(items).items() + auth_params = [(unescape(k), unescape(v)) for k, v in items] + realm = dict(auth_params).get('realm') + return auth_params, realm + except (IndexError, ValueError): + pass + raise ValueError('Malformed authorization header') + + +def _parse_oauth_params(query_params, body_params, auth_params): + oauth_params_set = [ + (SIGNATURE_TYPE_QUERY, list(_filter_oauth(query_params))), + (SIGNATURE_TYPE_BODY, list(_filter_oauth(body_params))), + (SIGNATURE_TYPE_HEADER, list(_filter_oauth(auth_params))) + ] + oauth_params_set = [params for params in oauth_params_set if params[1]] + if len(oauth_params_set) > 1: + found_types = [p[0] for p in oauth_params_set] + raise DuplicatedOAuthProtocolParameterError( + '"oauth_" params must come from only 1 signature type ' + 'but were found in {}'.format(','.join(found_types)) + ) + + if oauth_params_set: + signature_type = oauth_params_set[0][0] + oauth_params = dict(oauth_params_set[0][1]) + else: + signature_type = None + oauth_params = {} + return signature_type, oauth_params diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/__init__.py new file mode 100644 index 000000000..23dea91b9 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/__init__.py @@ -0,0 +1,16 @@ +from .base import OAuth2Error +from .auth import ClientAuth, TokenAuth +from .client import OAuth2Client +from .rfc6749 import ( + OAuth2Request, + HttpRequest, + AuthorizationServer, + ClientAuthentication, + ResourceProtector, +) + +__all__ = [ + 'OAuth2Error', 'ClientAuth', 'TokenAuth', 'OAuth2Client', + 'OAuth2Request', 'HttpRequest', 'AuthorizationServer', + 'ClientAuthentication', 'ResourceProtector', +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/auth.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/auth.py new file mode 100644 index 000000000..1d7a655a8 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/auth.py @@ -0,0 +1,105 @@ +import base64 +from authlib.common.urls import add_params_to_qs, add_params_to_uri +from authlib.common.encoding import to_bytes, to_native +from .rfc6749 import OAuth2Token +from .rfc6750 import add_bearer_token + + +def encode_client_secret_basic(client, method, uri, headers, body): + text = '{}:{}'.format(client.client_id, client.client_secret) + auth = to_native(base64.urlsafe_b64encode(to_bytes(text, 'latin1'))) + headers['Authorization'] = 'Basic {}'.format(auth) + return uri, headers, body + + +def encode_client_secret_post(client, method, uri, headers, body): + body = add_params_to_qs(body or '', [ + ('client_id', client.client_id), + ('client_secret', client.client_secret or '') + ]) + if 'Content-Length' in headers: + headers['Content-Length'] = str(len(body)) + return uri, headers, body + + +def encode_none(client, method, uri, headers, body): + if method == 'GET': + uri = add_params_to_uri(uri, [('client_id', client.client_id)]) + return uri, headers, body + body = add_params_to_qs(body, [('client_id', client.client_id)]) + if 'Content-Length' in headers: + headers['Content-Length'] = str(len(body)) + return uri, headers, body + + +class ClientAuth(object): + """Attaches OAuth Client Information to HTTP requests. + + :param client_id: Client ID, which you get from client registration. + :param client_secret: Client Secret, which you get from registration. + :param auth_method: Client auth method for token endpoint. The supported + methods for now: + + * client_secret_basic (default) + * client_secret_post + * none + """ + DEFAULT_AUTH_METHODS = { + 'client_secret_basic': encode_client_secret_basic, + 'client_secret_post': encode_client_secret_post, + 'none': encode_none, + } + + def __init__(self, client_id, client_secret, auth_method=None): + if auth_method is None: + auth_method = 'client_secret_basic' + + self.client_id = client_id + self.client_secret = client_secret + + if auth_method in self.DEFAULT_AUTH_METHODS: + auth_method = self.DEFAULT_AUTH_METHODS[auth_method] + + self.auth_method = auth_method + + def prepare(self, method, uri, headers, body): + return self.auth_method(self, method, uri, headers, body) + + +class TokenAuth(object): + """Attach token information to HTTP requests. + + :param token: A dict or OAuth2Token instance of an OAuth 2.0 token + :param token_placement: The placement of the token, default is ``header``, + available choices: + + * header (default) + * body + * uri + """ + DEFAULT_TOKEN_TYPE = 'bearer' + SIGN_METHODS = { + 'bearer': add_bearer_token + } + + def __init__(self, token, token_placement='header', client=None): + self.token = OAuth2Token.from_dict(token) + self.token_placement = token_placement + self.client = client + self.hooks = set() + + def set_token(self, token): + self.token = OAuth2Token.from_dict(token) + + def prepare(self, uri, headers, body): + token_type = self.token.get('token_type', self.DEFAULT_TOKEN_TYPE) + sign = self.SIGN_METHODS[token_type.lower()] + uri, headers, body = sign( + self.token['access_token'], + uri, headers, body, + self.token_placement) + + for hook in self.hooks: + uri, headers, body = hook(uri, headers, body) + + return uri, headers, body diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/base.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/base.py new file mode 100644 index 000000000..5fea8e084 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/base.py @@ -0,0 +1,27 @@ +from authlib.common.errors import AuthlibHTTPError +from authlib.common.urls import add_params_to_uri + + +class OAuth2Error(AuthlibHTTPError): + def __init__(self, description=None, uri=None, + status_code=None, state=None, + redirect_uri=None, redirect_fragment=False, error=None): + super(OAuth2Error, self).__init__(error, description, uri, status_code) + self.state = state + self.redirect_uri = redirect_uri + self.redirect_fragment = redirect_fragment + + def get_body(self): + """Get a list of body.""" + error = super(OAuth2Error, self).get_body() + if self.state: + error.append(('state', self.state)) + return error + + def __call__(self, translations=None, error_uris=None): + if self.redirect_uri: + params = self.get_body() + loc = add_params_to_uri( + self.redirect_uri, params, self.redirect_fragment) + return 302, '', [('Location', loc)] + return super(OAuth2Error, self).__call__(translations, error_uris) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/client.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/client.py new file mode 100644 index 000000000..2720d4643 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/client.py @@ -0,0 +1,415 @@ +from authlib.common.security import generate_token +from authlib.common.urls import url_decode +from authlib.common.encoding import text_types +from .rfc6749.parameters import ( + prepare_grant_uri, + prepare_token_request, + parse_authorization_code_response, + parse_implicit_response, +) +from .rfc7009 import prepare_revoke_token_request +from .rfc7636 import create_s256_code_challenge +from .auth import TokenAuth, ClientAuth + +DEFAULT_HEADERS = { + 'Accept': 'application/json', + 'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8' +} + + +class OAuth2Client(object): + """Construct a new OAuth 2 protocol client. + + :param session: Requests session object to communicate with + authorization server. + :param client_id: Client ID, which you get from client registration. + :param client_secret: Client Secret, which you get from registration. + :param token_endpoint_auth_method: client authentication method for + token endpoint. + :param revocation_endpoint_auth_method: client authentication method for + revocation endpoint. + :param scope: Scope that you needed to access user resources. + :param redirect_uri: Redirect URI you registered as callback. + :param code_challenge_method: PKCE method name, only S256 is supported. + :param token: A dict of token attributes such as ``access_token``, + ``token_type`` and ``expires_at``. + :param token_placement: The place to put token in HTTP request. Available + values: "header", "body", "uri". + :param update_token: A function for you to update token. It accept a + :class:`OAuth2Token` as parameter. + """ + client_auth_class = ClientAuth + token_auth_class = TokenAuth + + EXTRA_AUTHORIZE_PARAMS = ( + 'response_mode', 'nonce', 'prompt', 'login_hint' + ) + SESSION_REQUEST_PARAMS = [] + + def __init__(self, session, client_id=None, client_secret=None, + token_endpoint_auth_method=None, + revocation_endpoint_auth_method=None, + scope=None, redirect_uri=None, code_challenge_method=None, + token=None, token_placement='header', update_token=None, **metadata): + + self.session = session + self.client_id = client_id + self.client_secret = client_secret + + if token_endpoint_auth_method is None: + if client_secret: + token_endpoint_auth_method = 'client_secret_basic' + else: + token_endpoint_auth_method = 'none' + + self.token_endpoint_auth_method = token_endpoint_auth_method + + if revocation_endpoint_auth_method is None: + if client_secret: + revocation_endpoint_auth_method = 'client_secret_basic' + else: + revocation_endpoint_auth_method = 'none' + + self.revocation_endpoint_auth_method = revocation_endpoint_auth_method + + self.scope = scope + self.redirect_uri = redirect_uri + self.code_challenge_method = code_challenge_method + + self.token_auth = self.token_auth_class(token, token_placement, self) + self.update_token = update_token + + token_updater = metadata.pop('token_updater', None) + if token_updater: + raise ValueError('update token has been redesigned, checkout the documentation') + + self.metadata = metadata + + self.compliance_hook = { + 'access_token_response': set(), + 'refresh_token_request': set(), + 'refresh_token_response': set(), + 'revoke_token_request': set(), + 'introspect_token_request': set(), + } + self._auth_methods = {} + + def register_client_auth_method(self, auth): + """Extend client authenticate for token endpoint. + + :param auth: an instance to sign the request + """ + if isinstance(auth, tuple): + self._auth_methods[auth[0]] = auth[1] + else: + self._auth_methods[auth.name] = auth + + def client_auth(self, auth_method): + if isinstance(auth_method, text_types) and auth_method in self._auth_methods: + auth_method = self._auth_methods[auth_method] + return self.client_auth_class( + client_id=self.client_id, + client_secret=self.client_secret, + auth_method=auth_method, + ) + + @property + def token(self): + return self.token_auth.token + + @token.setter + def token(self, token): + self.token_auth.set_token(token) + + def create_authorization_url(self, url, state=None, code_verifier=None, **kwargs): + """Generate an authorization URL and state. + + :param url: Authorization endpoint url, must be HTTPS. + :param state: An optional state string for CSRF protection. If not + given it will be generated for you. + :param code_verifier: An optional code_verifier for code challenge. + :param kwargs: Extra parameters to include. + :return: authorization_url, state + """ + if state is None: + state = generate_token() + + response_type = self.metadata.get('response_type', 'code') + response_type = kwargs.pop('response_type', response_type) + if 'redirect_uri' not in kwargs: + kwargs['redirect_uri'] = self.redirect_uri + if 'scope' not in kwargs: + kwargs['scope'] = self.scope + + if code_verifier and response_type == 'code' and self.code_challenge_method == 'S256': + kwargs['code_challenge'] = create_s256_code_challenge(code_verifier) + kwargs['code_challenge_method'] = self.code_challenge_method + + for k in self.EXTRA_AUTHORIZE_PARAMS: + if k not in kwargs and k in self.metadata: + kwargs[k] = self.metadata[k] + + uri = prepare_grant_uri( + url, client_id=self.client_id, response_type=response_type, + state=state, **kwargs) + return uri, state + + def fetch_token(self, url=None, body='', method='POST', headers=None, + auth=None, grant_type=None, **kwargs): + """Generic method for fetching an access token from the token endpoint. + + :param url: Access Token endpoint URL, if not configured, + ``authorization_response`` is used to extract token from + its fragment (implicit way). + :param body: Optional application/x-www-form-urlencoded body to add the + include in the token request. Prefer kwargs over body. + :param method: The HTTP method used to make the request. Defaults + to POST, but may also be GET. Other methods should + be added as needed. + :param headers: Dict to default request headers with. + :param auth: An auth tuple or method as accepted by requests. + :param grant_type: Use specified grant_type to fetch token + :return: A :class:`OAuth2Token` object (a dict too). + """ + # implicit grant_type + authorization_response = kwargs.pop('authorization_response', None) + if authorization_response and '#' in authorization_response: + return self.token_from_fragment(authorization_response, kwargs.get('state')) + + session_kwargs = self._extract_session_request_params(kwargs) + + if authorization_response and 'code=' in authorization_response: + grant_type = 'authorization_code' + params = parse_authorization_code_response( + authorization_response, + state=kwargs.get('state'), + ) + kwargs['code'] = params['code'] + + if grant_type is None: + grant_type = self.metadata.get('grant_type') + + body = self._prepare_token_endpoint_body(body, grant_type, **kwargs) + + if auth is None: + auth = self.client_auth(self.token_endpoint_auth_method) + + if headers is None: + headers = DEFAULT_HEADERS + + if url is None: + url = self.metadata.get('token_endpoint') + + return self._fetch_token( + url, body=body, auth=auth, method=method, + headers=headers, **session_kwargs + ) + + def _fetch_token(self, url, body='', headers=None, auth=None, + method='POST', **kwargs): + if method == 'GET': + if '?' in url: + url = '&'.join([url, body]) + else: + url = '?'.join([url, body]) + body = '' + + if headers is None: + headers = DEFAULT_HEADERS + + resp = self.session.request( + method, url, data=body, headers=headers, auth=auth, **kwargs) + + for hook in self.compliance_hook['access_token_response']: + resp = hook(resp) + + return self.parse_response_token(resp.json()) + + def token_from_fragment(self, authorization_response, state=None): + token = parse_implicit_response(authorization_response, state) + return self.parse_response_token(token) + + def refresh_token(self, url, refresh_token=None, body='', + auth=None, headers=None, **kwargs): + """Fetch a new access token using a refresh token. + + :param url: Refresh Token endpoint, must be HTTPS. + :param refresh_token: The refresh_token to use. + :param body: Optional application/x-www-form-urlencoded body to add the + include in the token request. Prefer kwargs over body. + :param auth: An auth tuple or method as accepted by requests. + :param headers: Dict to default request headers with. + :return: A :class:`OAuth2Token` object (a dict too). + """ + session_kwargs = self._extract_session_request_params(kwargs) + refresh_token = refresh_token or self.token.get('refresh_token') + if 'scope' not in kwargs and self.scope: + kwargs['scope'] = self.scope + body = prepare_token_request( + 'refresh_token', body, + refresh_token=refresh_token, **kwargs + ) + + if headers is None: + headers = DEFAULT_HEADERS + + for hook in self.compliance_hook['refresh_token_request']: + url, headers, body = hook(url, headers, body) + + if auth is None: + auth = self.client_auth(self.token_endpoint_auth_method) + + return self._refresh_token( + url, refresh_token=refresh_token, body=body, headers=headers, + auth=auth, **session_kwargs) + + def _refresh_token(self, url, refresh_token=None, body='', headers=None, + auth=None, **kwargs): + resp = self.session.post( + url, data=dict(url_decode(body)), headers=headers, + auth=auth, **kwargs) + + for hook in self.compliance_hook['refresh_token_response']: + resp = hook(resp) + + token = self.parse_response_token(resp.json()) + if 'refresh_token' not in token: + self.token['refresh_token'] = refresh_token + + if callable(self.update_token): + self.update_token(self.token, refresh_token=refresh_token) + + return self.token + + def revoke_token(self, url, token=None, token_type_hint=None, + body=None, auth=None, headers=None, **kwargs): + """Revoke token method defined via `RFC7009`_. + + :param url: Revoke Token endpoint, must be HTTPS. + :param token: The token to be revoked. + :param token_type_hint: The type of the token that to be revoked. + It can be "access_token" or "refresh_token". + :param body: Optional application/x-www-form-urlencoded body to add the + include in the token request. Prefer kwargs over body. + :param auth: An auth tuple or method as accepted by requests. + :param headers: Dict to default request headers with. + :return: Revocation Response + + .. _`RFC7009`: https://tools.ietf.org/html/rfc7009 + """ + return self._handle_token_hint( + 'revoke_token_request', url, + token=token, token_type_hint=token_type_hint, + body=body, auth=auth, headers=headers, **kwargs) + + def introspect_token(self, url, token=None, token_type_hint=None, + body=None, auth=None, headers=None, **kwargs): + """Implementation of OAuth 2.0 Token Introspection defined via `RFC7662`_. + + :param url: Introspection Endpoint, must be HTTPS. + :param token: The token to be introspected. + :param token_type_hint: The type of the token that to be revoked. + It can be "access_token" or "refresh_token". + :param body: Optional application/x-www-form-urlencoded body to add the + include in the token request. Prefer kwargs over body. + :param auth: An auth tuple or method as accepted by requests. + :param headers: Dict to default request headers with. + :return: Introspection Response + + .. _`RFC7662`: https://tools.ietf.org/html/rfc7662 + """ + return self._handle_token_hint( + 'introspect_token_request', url, + token=token, token_type_hint=token_type_hint, + body=body, auth=auth, headers=headers, **kwargs) + + def _handle_token_hint(self, hook, url, token=None, token_type_hint=None, + body=None, auth=None, headers=None, **kwargs): + if token is None and self.token: + token = self.token.get('refresh_token') or self.token.get('access_token') + + if body is None: + body = '' + + body, headers = prepare_revoke_token_request( + token, token_type_hint, body, headers) + + for hook in self.compliance_hook[hook]: + url, headers, body = hook(url, headers, body) + + if auth is None: + auth = self.client_auth(self.revocation_endpoint_auth_method) + + session_kwargs = self._extract_session_request_params(kwargs) + return self._http_post( + url, body, auth=auth, headers=headers, **session_kwargs) + + def _http_post(self, url, body=None, auth=None, headers=None, **kwargs): + return self.session.post( + url, data=dict(url_decode(body)), + headers=headers, auth=auth, **kwargs) + + def register_compliance_hook(self, hook_type, hook): + """Register a hook for request/response tweaking. + + Available hooks are: + + * access_token_response: invoked before token parsing. + * refresh_token_request: invoked before refreshing token. + * refresh_token_response: invoked before refresh token parsing. + * protected_request: invoked before making a request. + * revoke_token_request: invoked before revoking a token. + * introspect_token_request: invoked before introspecting a token. + """ + if hook_type == 'protected_request': + self.token_auth.hooks.add(hook) + return + + if hook_type not in self.compliance_hook: + raise ValueError('Hook type %s is not in %s.', + hook_type, self.compliance_hook) + self.compliance_hook[hook_type].add(hook) + + def parse_response_token(self, token): + if 'error' not in token: + self.token = token + return self.token + + error = token['error'] + description = token.get('error_description', error) + self.handle_error(error, description) + + def _prepare_token_endpoint_body(self, body, grant_type, **kwargs): + if grant_type is None: + grant_type = _guess_grant_type(kwargs) + + if grant_type == 'authorization_code': + if 'redirect_uri' not in kwargs: + kwargs['redirect_uri'] = self.redirect_uri + return prepare_token_request(grant_type, body, **kwargs) + + if 'scope' not in kwargs and self.scope: + kwargs['scope'] = self.scope + return prepare_token_request(grant_type, body, **kwargs) + + def _extract_session_request_params(self, kwargs): + """Extract parameters for session object from the passing ``**kwargs``.""" + rv = {} + for k in self.SESSION_REQUEST_PARAMS: + if k in kwargs: + rv[k] = kwargs.pop(k) + return rv + + @staticmethod + def handle_error(error_type, error_description): + raise ValueError('{}: {}'.format(error_type, error_description)) + + +def _guess_grant_type(kwargs): + if 'code' in kwargs: + grant_type = 'authorization_code' + elif 'username' in kwargs and 'password' in kwargs: + grant_type = 'password' + else: + grant_type = 'client_credentials' + return grant_type diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/__init__.py new file mode 100644 index 000000000..2994f4f4e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/__init__.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +""" + authlib.oauth2.rfc6749 + ~~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + The OAuth 2.0 Authorization Framework. + + https://tools.ietf.org/html/rfc6749 +""" + +from .wrappers import OAuth2Request, OAuth2Token, HttpRequest +from .errors import ( + OAuth2Error, + AccessDeniedError, + MissingAuthorizationError, + InvalidGrantError, + InvalidClientError, + InvalidRequestError, + InvalidScopeError, + InsecureTransportError, + UnauthorizedClientError, + UnsupportedGrantTypeError, + UnsupportedTokenTypeError, + # exceptions for clients + MissingCodeException, + MissingTokenException, + MissingTokenTypeException, + MismatchingStateException, +) +from .models import ClientMixin, AuthorizationCodeMixin, TokenMixin +from .authenticate_client import ClientAuthentication +from .authorization_server import AuthorizationServer +from .resource_protector import ResourceProtector +from .token_endpoint import TokenEndpoint +from .grants import ( + BaseGrant, + AuthorizationEndpointMixin, + TokenEndpointMixin, + AuthorizationCodeGrant, + ImplicitGrant, + ResourceOwnerPasswordCredentialsGrant, + ClientCredentialsGrant, + RefreshTokenGrant, +) + +__all__ = [ + 'OAuth2Request', 'OAuth2Token', 'HttpRequest', + 'OAuth2Error', + 'AccessDeniedError', + 'MissingAuthorizationError', + 'InvalidGrantError', + 'InvalidClientError', + 'InvalidRequestError', + 'InvalidScopeError', + 'InsecureTransportError', + 'UnauthorizedClientError', + 'UnsupportedGrantTypeError', + 'UnsupportedTokenTypeError', + 'MissingCodeException', + 'MissingTokenException', + 'MissingTokenTypeException', + 'MismatchingStateException', + 'ClientMixin', 'AuthorizationCodeMixin', 'TokenMixin', + 'ClientAuthentication', + 'AuthorizationServer', + 'ResourceProtector', + 'TokenEndpoint', + 'BaseGrant', + 'AuthorizationEndpointMixin', + 'TokenEndpointMixin', + 'AuthorizationCodeGrant', + 'ImplicitGrant', + 'ResourceOwnerPasswordCredentialsGrant', + 'ClientCredentialsGrant', + 'RefreshTokenGrant', +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/authenticate_client.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/authenticate_client.py new file mode 100644 index 000000000..d21289a1f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/authenticate_client.py @@ -0,0 +1,124 @@ +""" + authlib.oauth2.rfc6749.authenticate_client + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Registry of client authentication methods, with 3 built-in methods: + + 1. client_secret_basic + 2. client_secret_post + 3. none + + The "client_secret_basic" method is used a lot in examples of `RFC6749`_, + but the concept of naming are introduced in `RFC7591`_. + + .. _`RFC6749`: https://tools.ietf.org/html/rfc6749 + .. _`RFC7591`: https://tools.ietf.org/html/rfc7591 +""" + +import logging +from .errors import InvalidClientError +from .util import extract_basic_authorization + +log = logging.getLogger(__name__) + +__all__ = ['ClientAuthentication'] + + +class ClientAuthentication(object): + def __init__(self, query_client): + self.query_client = query_client + self._methods = { + 'none': authenticate_none, + 'client_secret_basic': authenticate_client_secret_basic, + 'client_secret_post': authenticate_client_secret_post, + } + + def register(self, method, func): + self._methods[method] = func + + def authenticate(self, request, methods): + for method in methods: + func = self._methods[method] + client = func(self.query_client, request) + if client: + request.auth_method = method + return client + + if 'client_secret_basic' in methods: + raise InvalidClientError(state=request.state, status_code=401) + raise InvalidClientError(state=request.state) + + def __call__(self, request, methods): + return self.authenticate(request, methods) + + +def authenticate_client_secret_basic(query_client, request): + """Authenticate client by ``client_secret_basic`` method. The client + uses HTTP Basic for authentication. + """ + client_id, client_secret = extract_basic_authorization(request.headers) + if client_id and client_secret: + client = _validate_client(query_client, client_id, request.state, 401) + if client.check_token_endpoint_auth_method('client_secret_basic') \ + and client.check_client_secret(client_secret): + log.debug( + 'Authenticate %s via "client_secret_basic" ' + 'success', client_id + ) + return client + log.debug( + 'Authenticate %s via "client_secret_basic" ' + 'failed', client_id + ) + + +def authenticate_client_secret_post(query_client, request): + """Authenticate client by ``client_secret_post`` method. The client + uses POST parameters for authentication. + """ + data = request.form + client_id = data.get('client_id') + client_secret = data.get('client_secret') + if client_id and client_secret: + client = _validate_client(query_client, client_id, request.state) + if client.check_token_endpoint_auth_method('client_secret_post') \ + and client.check_client_secret(client_secret): + log.debug( + 'Authenticate %s via "client_secret_post" ' + 'success', client_id + ) + return client + log.debug( + 'Authenticate %s via "client_secret_post" ' + 'failed', client_id + ) + + +def authenticate_none(query_client, request): + """Authenticate public client by ``none`` method. The client + does not have a client secret. + """ + client_id = request.client_id + if client_id and 'client_secret' not in request.data: + client = _validate_client(query_client, client_id, request.state) + if client.check_token_endpoint_auth_method('none'): + log.debug( + 'Authenticate %s via "none" ' + 'success', client_id + ) + return client + log.debug( + 'Authenticate {} via "none" ' + 'failed'.format(client_id) + ) + + +def _validate_client(query_client, client_id, state=None, status_code=400): + if client_id is None: + raise InvalidClientError(state=state, status_code=status_code) + + client = query_client(client_id) + if not client: + raise InvalidClientError(state=state, status_code=status_code) + + return client diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/authorization_server.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/authorization_server.py new file mode 100644 index 000000000..c1f3ddca1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/authorization_server.py @@ -0,0 +1,240 @@ +from .authenticate_client import ClientAuthentication +from .errors import ( + OAuth2Error, + InvalidGrantError, + InvalidScopeError, + UnsupportedGrantTypeError, +) +from .util import scope_to_list + + +class AuthorizationServer(object): + """Authorization server that handles Authorization Endpoint and Token + Endpoint. + + :param query_client: A function to get client by client_id. The client + model class MUST implement the methods described by + :class:`~authlib.oauth2.rfc6749.ClientMixin`. + :param save_token: A method to save tokens. + :param generate_token: A method to generate tokens. + :param metadata: A dict of Authorization Server Metadata + """ + def __init__(self, query_client, save_token, generate_token=None, metadata=None): + self.query_client = query_client + self.save_token = save_token + self.generate_token = generate_token + + self.metadata = metadata + self._client_auth = None + self._authorization_grants = [] + self._token_grants = [] + self._endpoints = {} + + def authenticate_client(self, request, methods): + """Authenticate client via HTTP request information with the given + methods, such as ``client_secret_basic``, ``client_secret_post``. + """ + if self._client_auth is None and self.query_client: + self._client_auth = ClientAuthentication(self.query_client) + return self._client_auth(request, methods) + + def register_client_auth_method(self, method, func): + """Add more client auth method. The default methods are: + + * none: The client is a public client and does not have a client secret + * client_secret_post: The client uses the HTTP POST parameters + * client_secret_basic: The client uses HTTP Basic + + :param method: Name of the Auth method + :param func: Function to authenticate the client + + The auth method accept two parameters: ``query_client`` and ``request``, + an example for this method:: + + def authenticate_client_via_custom(query_client, request): + client_id = request.headers['X-Client-Id'] + client = query_client(client_id) + do_some_validation(client) + return client + + authorization_server.register_client_auth_method( + 'custom', authenticate_client_via_custom) + """ + if self._client_auth is None and self.query_client: + self._client_auth = ClientAuthentication(self.query_client) + + self._client_auth.register(method, func) + + def get_translations(self, request): + """Return a translations instance used for i18n error messages. + Framework SHOULD implement this function. + """ + return None + + def get_error_uris(self, request): + """Return a dict of error uris mapping. Framework SHOULD implement + this function. + """ + return None + + def send_signal(self, name, *args, **kwargs): + """Framework integration can re-implement this method to support + signal system. + """ + pass + + def create_oauth2_request(self, request): + """This method MUST be implemented in framework integrations. It is + used to create an OAuth2Request instance. + + :param request: the "request" instance in framework + :return: OAuth2Request instance + """ + raise NotImplementedError() + + def create_json_request(self, request): + """This method MUST be implemented in framework integrations. It is + used to create an HttpRequest instance. + + :param request: the "request" instance in framework + :return: HttpRequest instance + """ + raise NotImplementedError() + + def handle_response(self, status, body, headers): + """Return HTTP response. Framework MUST implement this function.""" + raise NotImplementedError() + + def validate_requested_scope(self, scope, state=None): + """Validate if requested scope is supported by Authorization Server. + Developers CAN re-write this method to meet your needs. + """ + if scope and self.metadata: + scopes_supported = self.metadata.get('scopes_supported') + scopes = set(scope_to_list(scope)) + if scopes_supported and not set(scopes_supported).issuperset(scopes): + raise InvalidScopeError(state=state) + + def register_grant(self, grant_cls, extensions=None): + """Register a grant class into the endpoint registry. Developers + can implement the grants in ``authlib.oauth2.rfc6749.grants`` and + register with this method:: + + class AuthorizationCodeGrant(grants.AuthorizationCodeGrant): + def authenticate_user(self, credential): + # ... + + authorization_server.register_grant(AuthorizationCodeGrant) + + :param grant_cls: a grant class. + :param extensions: extensions for the grant class. + """ + if hasattr(grant_cls, 'check_authorization_endpoint'): + self._authorization_grants.append((grant_cls, extensions)) + if hasattr(grant_cls, 'check_token_endpoint'): + self._token_grants.append((grant_cls, extensions)) + + def register_endpoint(self, endpoint_cls): + """Add extra endpoint to authorization server. e.g. + RevocationEndpoint:: + + authorization_server.register_endpoint(RevocationEndpoint) + + :param endpoint_cls: A endpoint class + """ + self._endpoints[endpoint_cls.ENDPOINT_NAME] = endpoint_cls(self) + + def get_authorization_grant(self, request): + """Find the authorization grant for current request. + + :param request: OAuth2Request instance. + :return: grant instance + """ + for (grant_cls, extensions) in self._authorization_grants: + if grant_cls.check_authorization_endpoint(request): + return _create_grant(grant_cls, extensions, request, self) + raise InvalidGrantError( + 'Response type {!r} is not supported'.format(request.response_type)) + + def get_token_grant(self, request): + """Find the token grant for current request. + + :param request: OAuth2Request instance. + :return: grant instance + """ + for (grant_cls, extensions) in self._token_grants: + if grant_cls.check_token_endpoint(request) and \ + request.method in grant_cls.TOKEN_ENDPOINT_HTTP_METHODS: + return _create_grant(grant_cls, extensions, request, self) + raise UnsupportedGrantTypeError( + 'Grant type {!r} is not supported'.format(request.grant_type)) + + def create_endpoint_response(self, name, request=None): + """Validate endpoint request and create endpoint response. + + :param name: Endpoint name + :param request: HTTP request instance. + :return: Response + """ + if name not in self._endpoints: + raise RuntimeError('There is no "{}" endpoint.'.format(name)) + + endpoint = self._endpoints[name] + request = endpoint.create_endpoint_request(request) + try: + return self.handle_response(*endpoint(request)) + except OAuth2Error as error: + return self.handle_error_response(request, error) + + def create_authorization_response(self, request=None, grant_user=None): + """Validate authorization request and create authorization response. + + :param request: HTTP request instance. + :param grant_user: if granted, it is resource owner. If denied, + it is None. + :returns: Response + """ + request = self.create_oauth2_request(request) + try: + grant = self.get_authorization_grant(request) + except InvalidGrantError as error: + return self.handle_error_response(request, error) + + try: + redirect_uri = grant.validate_authorization_request() + args = grant.create_authorization_response(redirect_uri, grant_user) + return self.handle_response(*args) + except OAuth2Error as error: + return self.handle_error_response(request, error) + + def create_token_response(self, request=None): + """Validate token request and create token response. + + :param request: HTTP request instance + """ + request = self.create_oauth2_request(request) + try: + grant = self.get_token_grant(request) + except UnsupportedGrantTypeError as error: + return self.handle_error_response(request, error) + + try: + grant.validate_token_request() + args = grant.create_token_response() + return self.handle_response(*args) + except OAuth2Error as error: + return self.handle_error_response(request, error) + + def handle_error_response(self, request, error): + return self.handle_response(*error( + translations=self.get_translations(request), + error_uris=self.get_error_uris(request) + )) + + +def _create_grant(grant_cls, extensions, request, server): + grant = grant_cls(request, server) + if extensions: + for ext in extensions: + ext(grant) + return grant diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/errors.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/errors.py new file mode 100644 index 000000000..c2612aa61 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/errors.py @@ -0,0 +1,200 @@ +""" + authlib.oauth2.rfc6749.errors + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Implementation for OAuth 2 Error Response. A basic error has + parameters: + + error + REQUIRED. A single ASCII [USASCII] error code. + + error_description + OPTIONAL. Human-readable ASCII [USASCII] text providing + additional information, used to assist the client developer in + understanding the error that occurred. + + error_uri + OPTIONAL. A URI identifying a human-readable web page with + information about the error, used to provide the client + developer with additional information about the error. + Values for the "error_uri" parameter MUST conform to the + URI-reference syntax and thus MUST NOT include characters + outside the set %x21 / %x23-5B / %x5D-7E. + + state + REQUIRED if a "state" parameter was present in the client + authorization request. The exact value received from the + client. + + https://tools.ietf.org/html/rfc6749#section-5.2 + + :copyright: (c) 2017 by Hsiaoming Yang. +""" +from authlib.oauth2.base import OAuth2Error +from authlib.common.security import is_secure_transport + +__all__ = [ + 'OAuth2Error', + 'InsecureTransportError', 'InvalidRequestError', + 'InvalidClientError', 'InvalidGrantError', + 'UnauthorizedClientError', 'UnsupportedGrantTypeError', + 'InvalidScopeError', 'AccessDeniedError', + 'MissingAuthorizationError', 'UnsupportedTokenTypeError', + 'MissingCodeException', 'MissingTokenException', + 'MissingTokenTypeException', 'MismatchingStateException', +] + + +class InsecureTransportError(OAuth2Error): + error = 'insecure_transport' + + def get_error_description(self): + return self.gettext('OAuth 2 MUST utilize https.') + + @classmethod + def check(cls, uri): + """Check and raise InsecureTransportError with the given URI.""" + if not is_secure_transport(uri): + raise cls() + + +class InvalidRequestError(OAuth2Error): + """The request is missing a required parameter, includes an + unsupported parameter value (other than grant type), + repeats a parameter, includes multiple credentials, + utilizes more than one mechanism for authenticating the + client, or is otherwise malformed. + + https://tools.ietf.org/html/rfc6749#section-5.2 + """ + error = 'invalid_request' + + +class InvalidClientError(OAuth2Error): + """Client authentication failed (e.g., unknown client, no + client authentication included, or unsupported + authentication method). The authorization server MAY + return an HTTP 401 (Unauthorized) status code to indicate + which HTTP authentication schemes are supported. If the + client attempted to authenticate via the "Authorization" + request header field, the authorization server MUST + respond with an HTTP 401 (Unauthorized) status code and + include the "WWW-Authenticate" response header field + matching the authentication scheme used by the client. + + https://tools.ietf.org/html/rfc6749#section-5.2 + """ + error = 'invalid_client' + status_code = 400 + + def get_headers(self): + headers = super(InvalidClientError, self).get_headers() + if self.status_code == 401: + error_description = self.get_error_description() + # safe escape + error_description = error_description.replace('"', '|') + extras = [ + 'error="{}"'.format(self.error), + 'error_description="{}"'.format(error_description) + ] + headers.append( + ('WWW-Authenticate', 'Basic ' + ', '.join(extras)) + ) + return headers + + +class InvalidGrantError(OAuth2Error): + """The provided authorization grant (e.g., authorization + code, resource owner credentials) or refresh token is + invalid, expired, revoked, does not match the redirection + URI used in the authorization request, or was issued to + another client. + + https://tools.ietf.org/html/rfc6749#section-5.2 + """ + error = 'invalid_grant' + + +class UnauthorizedClientError(OAuth2Error): + """ The authenticated client is not authorized to use this + authorization grant type. + + https://tools.ietf.org/html/rfc6749#section-5.2 + """ + error = 'unauthorized_client' + + +class UnsupportedGrantTypeError(OAuth2Error): + """The authorization grant type is not supported by the + authorization server. + + https://tools.ietf.org/html/rfc6749#section-5.2 + """ + error = 'unsupported_grant_type' + + +class InvalidScopeError(OAuth2Error): + """The requested scope is invalid, unknown, malformed, or + exceeds the scope granted by the resource owner. + + https://tools.ietf.org/html/rfc6749#section-5.2 + """ + error = 'invalid_scope' + + def get_error_description(self): + return self.gettext( + 'The requested scope is invalid, unknown, or malformed.') + + +class AccessDeniedError(OAuth2Error): + """The resource owner or authorization server denied the request. + + Used in authorization endpoint for "code" and "implicit". Defined in + `Section 4.1.2.1`_. + + .. _`Section 4.1.2.1`: https://tools.ietf.org/html/rfc6749#section-4.1.2.1 + """ + error = 'access_denied' + + def get_error_description(self): + return self.gettext( + 'The resource owner or authorization server denied the request') + + +# -- below are extended errors -- # + + +class MissingAuthorizationError(OAuth2Error): + error = 'missing_authorization' + status_code = 401 + + def get_error_description(self): + return self.gettext('Missing "Authorization" in headers.') + + +class UnsupportedTokenTypeError(OAuth2Error): + error = 'unsupported_token_type' + status_code = 401 + + +# -- exceptions for clients -- # + + +class MissingCodeException(OAuth2Error): + error = 'missing_code' + description = 'Missing "code" in response.' + + +class MissingTokenException(OAuth2Error): + error = 'missing_token' + description = 'Missing "access_token" in response.' + + +class MissingTokenTypeException(OAuth2Error): + error = 'missing_token_type' + description = 'Missing "token_type" in response.' + + +class MismatchingStateException(OAuth2Error): + error = 'mismatching_state' + description = 'CSRF Warning! State not equal in request and response.' diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/grants/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/grants/__init__.py new file mode 100644 index 000000000..b1797565f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/grants/__init__.py @@ -0,0 +1,37 @@ +""" + authlib.oauth2.rfc6749.grants + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Implementation for `Section 4`_ of "Obtaining Authorization". + + To request an access token, the client obtains authorization from the + resource owner. The authorization is expressed in the form of an + authorization grant, which the client uses to request the access + token. OAuth defines four grant types: + + 1. authorization code + 2. implicit + 3. resource owner password credentials + 4. client credentials. + + It also provides an extension mechanism for defining additional grant + types. Authlib defines refresh_token as a grant type too. + + .. _`Section 4`: https://tools.ietf.org/html/rfc6749#section-4 +""" + +# flake8: noqa + +from .base import BaseGrant, AuthorizationEndpointMixin, TokenEndpointMixin +from .authorization_code import AuthorizationCodeGrant +from .implicit import ImplicitGrant +from .resource_owner_password_credentials import ResourceOwnerPasswordCredentialsGrant +from .client_credentials import ClientCredentialsGrant +from .refresh_token import RefreshTokenGrant + +__all__ = [ + 'BaseGrant', 'AuthorizationEndpointMixin', 'TokenEndpointMixin', + 'AuthorizationCodeGrant', 'ImplicitGrant', + 'ResourceOwnerPasswordCredentialsGrant', + 'ClientCredentialsGrant', 'RefreshTokenGrant', +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/grants/authorization_code.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/grants/authorization_code.py new file mode 100644 index 000000000..10599cb44 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/grants/authorization_code.py @@ -0,0 +1,384 @@ +import logging +from authlib.deprecate import deprecate +from authlib.common.urls import add_params_to_uri +from authlib.common.security import generate_token +from .base import BaseGrant, AuthorizationEndpointMixin, TokenEndpointMixin +from ..errors import ( + OAuth2Error, + UnauthorizedClientError, + InvalidClientError, + InvalidRequestError, + AccessDeniedError, +) + +log = logging.getLogger(__name__) + + +class AuthorizationCodeGrant(BaseGrant, AuthorizationEndpointMixin, TokenEndpointMixin): + """The authorization code grant type is used to obtain both access + tokens and refresh tokens and is optimized for confidential clients. + Since this is a redirection-based flow, the client must be capable of + interacting with the resource owner's user-agent (typically a web + browser) and capable of receiving incoming requests (via redirection) + from the authorization server:: + + +----------+ + | Resource | + | Owner | + | | + +----------+ + ^ + | + (B) + +----|-----+ Client Identifier +---------------+ + | -+----(A)-- & Redirection URI ---->| | + | User- | | Authorization | + | Agent -+----(B)-- User authenticates --->| Server | + | | | | + | -+----(C)-- Authorization Code ---<| | + +-|----|---+ +---------------+ + | | ^ v + (A) (C) | | + | | | | + ^ v | | + +---------+ | | + | |>---(D)-- Authorization Code ---------' | + | Client | & Redirection URI | + | | | + | |<---(E)----- Access Token -------------------' + +---------+ (w/ Optional Refresh Token) + """ + #: Allowed client auth methods for token endpoint + TOKEN_ENDPOINT_AUTH_METHODS = ['client_secret_basic', 'client_secret_post'] + + #: Generated "code" length + AUTHORIZATION_CODE_LENGTH = 48 + + RESPONSE_TYPES = {'code'} + GRANT_TYPE = 'authorization_code' + + def validate_authorization_request(self): + """The client constructs the request URI by adding the following + parameters to the query component of the authorization endpoint URI + using the "application/x-www-form-urlencoded" format. + Per `Section 4.1.1`_. + + response_type + REQUIRED. Value MUST be set to "code". + + client_id + REQUIRED. The client identifier as described in Section 2.2. + + redirect_uri + OPTIONAL. As described in Section 3.1.2. + + scope + OPTIONAL. The scope of the access request as described by + Section 3.3. + + state + RECOMMENDED. An opaque value used by the client to maintain + state between the request and callback. The authorization + server includes this value when redirecting the user-agent back + to the client. The parameter SHOULD be used for preventing + cross-site request forgery as described in Section 10.12. + + The client directs the resource owner to the constructed URI using an + HTTP redirection response, or by other means available to it via the + user-agent. + + For example, the client directs the user-agent to make the following + HTTP request using TLS (with extra line breaks for display purposes + only): + + .. code-block:: http + + GET /authorize?response_type=code&client_id=s6BhdRkqt3&state=xyz + &redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb HTTP/1.1 + Host: server.example.com + + The authorization server validates the request to ensure that all + required parameters are present and valid. If the request is valid, + the authorization server authenticates the resource owner and obtains + an authorization decision (by asking the resource owner or by + establishing approval via other means). + + .. _`Section 4.1.1`: https://tools.ietf.org/html/rfc6749#section-4.1.1 + """ + return validate_code_authorization_request(self) + + def create_authorization_response(self, redirect_uri, grant_user): + """If the resource owner grants the access request, the authorization + server issues an authorization code and delivers it to the client by + adding the following parameters to the query component of the + redirection URI using the "application/x-www-form-urlencoded" format. + Per `Section 4.1.2`_. + + code + REQUIRED. The authorization code generated by the + authorization server. The authorization code MUST expire + shortly after it is issued to mitigate the risk of leaks. A + maximum authorization code lifetime of 10 minutes is + RECOMMENDED. The client MUST NOT use the authorization code + more than once. If an authorization code is used more than + once, the authorization server MUST deny the request and SHOULD + revoke (when possible) all tokens previously issued based on + that authorization code. The authorization code is bound to + the client identifier and redirection URI. + state + REQUIRED if the "state" parameter was present in the client + authorization request. The exact value received from the + client. + + For example, the authorization server redirects the user-agent by + sending the following HTTP response. + + .. code-block:: http + + HTTP/1.1 302 Found + Location: https://client.example.com/cb?code=SplxlOBeZQQYbYS6WxSbIA + &state=xyz + + .. _`Section 4.1.2`: https://tools.ietf.org/html/rfc6749#section-4.1.2 + + :param redirect_uri: Redirect to the given URI for the authorization + :param grant_user: if resource owner granted the request, pass this + resource owner, otherwise pass None. + :returns: (status_code, body, headers) + """ + if not grant_user: + raise AccessDeniedError(state=self.request.state, redirect_uri=redirect_uri) + + self.request.user = grant_user + if hasattr(self, 'create_authorization_code'): # pragma: no cover + deprecate('Use "generate_authorization_code" instead', '1.0') + client = self.request.client + code = self.create_authorization_code(client, grant_user, self.request) + else: + code = self.generate_authorization_code() + self.save_authorization_code(code, self.request) + + params = [('code', code)] + if self.request.state: + params.append(('state', self.request.state)) + uri = add_params_to_uri(redirect_uri, params) + headers = [('Location', uri)] + return 302, '', headers + + def validate_token_request(self): + """The client makes a request to the token endpoint by sending the + following parameters using the "application/x-www-form-urlencoded" + format per `Section 4.1.3`_: + + grant_type + REQUIRED. Value MUST be set to "authorization_code". + + code + REQUIRED. The authorization code received from the + authorization server. + + redirect_uri + REQUIRED, if the "redirect_uri" parameter was included in the + authorization request as described in Section 4.1.1, and their + values MUST be identical. + + client_id + REQUIRED, if the client is not authenticating with the + authorization server as described in Section 3.2.1. + + If the client type is confidential or the client was issued client + credentials (or assigned other authentication requirements), the + client MUST authenticate with the authorization server as described + in Section 3.2.1. + + For example, the client makes the following HTTP request using TLS: + + .. code-block:: http + + POST /token HTTP/1.1 + Host: server.example.com + Authorization: Basic czZCaGRSa3F0MzpnWDFmQmF0M2JW + Content-Type: application/x-www-form-urlencoded + + grant_type=authorization_code&code=SplxlOBeZQQYbYS6WxSbIA + &redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb + + .. _`Section 4.1.3`: https://tools.ietf.org/html/rfc6749#section-4.1.3 + """ + # ignore validate for grant_type, since it is validated by + # check_token_endpoint + + # authenticate the client if client authentication is included + client = self.authenticate_token_endpoint_client() + + log.debug('Validate token request of %r', client) + if not client.check_grant_type(self.GRANT_TYPE): + raise UnauthorizedClientError() + + code = self.request.form.get('code') + if code is None: + raise InvalidRequestError('Missing "code" in request.') + + # ensure that the authorization code was issued to the authenticated + # confidential client, or if the client is public, ensure that the + # code was issued to "client_id" in the request + authorization_code = self.query_authorization_code(code, client) + if not authorization_code: + raise InvalidRequestError('Invalid "code" in request.') + + # validate redirect_uri parameter + log.debug('Validate token redirect_uri of %r', client) + redirect_uri = self.request.redirect_uri + original_redirect_uri = authorization_code.get_redirect_uri() + if original_redirect_uri and redirect_uri != original_redirect_uri: + raise InvalidRequestError('Invalid "redirect_uri" in request.') + + # save for create_token_response + self.request.client = client + self.request.credential = authorization_code + self.execute_hook('after_validate_token_request') + + def create_token_response(self): + """If the access token request is valid and authorized, the + authorization server issues an access token and optional refresh + token as described in Section 5.1. If the request client + authentication failed or is invalid, the authorization server returns + an error response as described in Section 5.2. Per `Section 4.1.4`_. + + An example successful response: + + .. code-block:: http + + HTTP/1.1 200 OK + Content-Type: application/json + Cache-Control: no-store + Pragma: no-cache + + { + "access_token":"2YotnFZFEjr1zCsicMWpAA", + "token_type":"example", + "expires_in":3600, + "refresh_token":"tGzv3JOkF0XG5Qx2TlKWIA", + "example_parameter":"example_value" + } + + :returns: (status_code, body, headers) + + .. _`Section 4.1.4`: https://tools.ietf.org/html/rfc6749#section-4.1.4 + """ + client = self.request.client + authorization_code = self.request.credential + + user = self.authenticate_user(authorization_code) + if not user: + raise InvalidRequestError('There is no "user" for this code.') + + scope = authorization_code.get_scope() + token = self.generate_token( + user=user, + scope=scope, + include_refresh_token=client.check_grant_type('refresh_token'), + ) + log.debug('Issue token %r to %r', token, client) + + self.request.user = user + self.save_token(token) + self.execute_hook('process_token', token=token) + self.delete_authorization_code(authorization_code) + return 200, token, self.TOKEN_RESPONSE_HEADER + + def generate_authorization_code(self): + """"The method to generate "code" value for authorization code data. + Developers may rewrite this method, or customize the code length with:: + + class MyAuthorizationCodeGrant(AuthorizationCodeGrant): + AUTHORIZATION_CODE_LENGTH = 32 # default is 48 + """ + return generate_token(self.AUTHORIZATION_CODE_LENGTH) + + def save_authorization_code(self, code, request): + """Save authorization_code for later use. Developers MUST implement + it in subclass. Here is an example:: + + def save_authorization_code(self, code, request): + client = request.client + item = AuthorizationCode( + code=code, + client_id=client.client_id, + redirect_uri=request.redirect_uri, + scope=request.scope, + user_id=request.user.id, + ) + item.save() + """ + raise NotImplementedError() + + def query_authorization_code(self, code, client): # pragma: no cover + """Get authorization_code from previously savings. Developers MUST + implement it in subclass:: + + def query_authorization_code(self, code, client): + return Authorization.get(code=code, client_id=client.client_id) + + :param code: a string represent the code. + :param client: client related to this code. + :return: authorization_code object + """ + if hasattr(self, 'parse_authorization_code'): + deprecate('Use "query_authorization_code" instead', '1.0') + return self.parse_authorization_code(code, client) + raise NotImplementedError() + + def delete_authorization_code(self, authorization_code): + """Delete authorization code from database or cache. Developers MUST + implement it in subclass, e.g.:: + + def delete_authorization_code(self, authorization_code): + authorization_code.delete() + + :param authorization_code: the instance of authorization_code + """ + raise NotImplementedError() + + def authenticate_user(self, authorization_code): + """Authenticate the user related to this authorization_code. Developers + MUST implement this method in subclass, e.g.:: + + def authenticate_user(self, authorization_code): + return User.query.get(authorization_code.user_id) + + :param authorization_code: AuthorizationCode object + :return: user + """ + raise NotImplementedError() + + +def validate_code_authorization_request(grant): + client_id = grant.request.client_id + log.debug('Validate authorization request of %r', client_id) + + if client_id is None: + raise InvalidClientError(state=grant.request.state) + + client = grant.server.query_client(client_id) + if not client: + raise InvalidClientError(state=grant.request.state) + + redirect_uri = grant.validate_authorization_redirect_uri(grant.request, client) + response_type = grant.request.response_type + if not client.check_response_type(response_type): + raise UnauthorizedClientError( + 'The client is not authorized to use ' + '"response_type={}"'.format(response_type), + state=grant.request.state, + redirect_uri=redirect_uri, + ) + + try: + grant.request.client = client + grant.validate_requested_scope() + grant.execute_hook('after_validate_authorization_request') + except OAuth2Error as error: + error.redirect_uri = redirect_uri + raise error + return redirect_uri diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/grants/base.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/grants/base.py new file mode 100644 index 000000000..9fe03c904 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/grants/base.py @@ -0,0 +1,151 @@ +from authlib.consts import default_json_headers +from ..errors import InvalidRequestError + + +class BaseGrant(object): + #: Allowed client auth methods for token endpoint + TOKEN_ENDPOINT_AUTH_METHODS = ['client_secret_basic'] + + #: Designed for which "grant_type" + GRANT_TYPE = None + + # NOTE: there is no charset for application/json, since + # application/json should always in UTF-8. + # The example on RFC is incorrect. + # https://tools.ietf.org/html/rfc4627 + TOKEN_RESPONSE_HEADER = default_json_headers + + def __init__(self, request, server): + self.request = request + self.server = server + self._hooks = { + 'after_validate_authorization_request': set(), + 'after_validate_consent_request': set(), + 'after_validate_token_request': set(), + 'process_token': set(), + } + + @property + def client(self): + return self.request.client + + def generate_token(self, user=None, scope=None, grant_type=None, + expires_in=None, include_refresh_token=True): + + if grant_type is None: + grant_type = self.GRANT_TYPE + + client = self.request.client + if scope is not None: + scope = client.get_allowed_scope(scope) + + return self.server.generate_token( + client=client, + grant_type=grant_type, + user=user, + scope=scope, + expires_in=expires_in, + include_refresh_token=include_refresh_token, + ) + + def authenticate_token_endpoint_client(self): + """Authenticate client with the given methods for token endpoint. + + For example, the client makes the following HTTP request using TLS: + + .. code-block:: http + + POST /token HTTP/1.1 + Host: server.example.com + Authorization: Basic czZCaGRSa3F0MzpnWDFmQmF0M2JW + Content-Type: application/x-www-form-urlencoded + + grant_type=authorization_code&code=SplxlOBeZQQYbYS6WxSbIA + &redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb + + Default available methods are: "none", "client_secret_basic" and + "client_secret_post". + + :return: client + """ + client = self.server.authenticate_client( + self.request, + self.TOKEN_ENDPOINT_AUTH_METHODS) + self.server.send_signal( + 'after_authenticate_client', + client=client, grant=self) + return client + + def save_token(self, token): + """A method to save token into database.""" + return self.server.save_token(token, self.request) + + def validate_requested_scope(self): + """Validate if requested scope is supported by Authorization Server.""" + scope = self.request.scope + state = self.request.state + return self.server.validate_requested_scope(scope, state) + + def register_hook(self, hook_type, hook): + if hook_type not in self._hooks: + raise ValueError('Hook type %s is not in %s.', + hook_type, self._hooks) + self._hooks[hook_type].add(hook) + + def execute_hook(self, hook_type, *args, **kwargs): + for hook in self._hooks[hook_type]: + hook(self, *args, **kwargs) + + +class TokenEndpointMixin(object): + #: Allowed HTTP methods of this token endpoint + TOKEN_ENDPOINT_HTTP_METHODS = ['POST'] + + #: Designed for which "grant_type" + GRANT_TYPE = None + + @classmethod + def check_token_endpoint(cls, request): + return request.grant_type == cls.GRANT_TYPE + + def validate_token_request(self): + raise NotImplementedError() + + def create_token_response(self): + raise NotImplementedError() + + +class AuthorizationEndpointMixin(object): + RESPONSE_TYPES = set() + ERROR_RESPONSE_FRAGMENT = False + + @classmethod + def check_authorization_endpoint(cls, request): + return request.response_type in cls.RESPONSE_TYPES + + @staticmethod + def validate_authorization_redirect_uri(request, client): + if request.redirect_uri: + if not client.check_redirect_uri(request.redirect_uri): + raise InvalidRequestError( + 'Redirect URI {!r} is not supported by client.'.format(request.redirect_uri), + state=request.state, + ) + return request.redirect_uri + else: + redirect_uri = client.get_default_redirect_uri() + if not redirect_uri: + raise InvalidRequestError( + 'Missing "redirect_uri" in request.' + ) + return redirect_uri + + def validate_consent_request(self): + redirect_uri = self.validate_authorization_request() + self.execute_hook('after_validate_consent_request', redirect_uri) + + def validate_authorization_request(self): + raise NotImplementedError() + + def create_authorization_response(self, redirect_uri, grant_user): + raise NotImplementedError() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/grants/client_credentials.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/grants/client_credentials.py new file mode 100644 index 000000000..784a37028 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/grants/client_credentials.py @@ -0,0 +1,103 @@ +import logging +from .base import BaseGrant, TokenEndpointMixin +from ..errors import UnauthorizedClientError + +log = logging.getLogger(__name__) + + +class ClientCredentialsGrant(BaseGrant, TokenEndpointMixin): + """The client can request an access token using only its client + credentials (or other supported means of authentication) when the + client is requesting access to the protected resources under its + control, or those of another resource owner that have been previously + arranged with the authorization server. + + The client credentials grant type MUST only be used by confidential + clients:: + + +---------+ +---------------+ + | | | | + | |>--(A)- Client Authentication --->| Authorization | + | Client | | Server | + | |<--(B)---- Access Token ---------<| | + | | | | + +---------+ +---------------+ + + https://tools.ietf.org/html/rfc6749#section-4.4 + """ + GRANT_TYPE = 'client_credentials' + + def validate_token_request(self): + """The client makes a request to the token endpoint by adding the + following parameters using the "application/x-www-form-urlencoded" + format per Appendix B with a character encoding of UTF-8 in the HTTP + request entity-body: + + grant_type + REQUIRED. Value MUST be set to "client_credentials". + + scope + OPTIONAL. The scope of the access request as described by + Section 3.3. + + The client MUST authenticate with the authorization server as + described in Section 3.2.1. + + For example, the client makes the following HTTP request using + transport-layer security (with extra line breaks for display purposes + only): + + .. code-block:: http + + POST /token HTTP/1.1 + Host: server.example.com + Authorization: Basic czZCaGRSa3F0MzpnWDFmQmF0M2JW + Content-Type: application/x-www-form-urlencoded + + grant_type=client_credentials + + The authorization server MUST authenticate the client. + """ + + # ignore validate for grant_type, since it is validated by + # check_token_endpoint + client = self.authenticate_token_endpoint_client() + log.debug('Validate token request of %r', client) + + if not client.check_grant_type(self.GRANT_TYPE): + raise UnauthorizedClientError() + + self.request.client = client + self.validate_requested_scope() + + def create_token_response(self): + """If the access token request is valid and authorized, the + authorization server issues an access token as described in + Section 5.1. A refresh token SHOULD NOT be included. If the request + failed client authentication or is invalid, the authorization server + returns an error response as described in Section 5.2. + + An example successful response: + + .. code-block:: http + + HTTP/1.1 200 OK + Content-Type: application/json + Cache-Control: no-store + Pragma: no-cache + + { + "access_token":"2YotnFZFEjr1zCsicMWpAA", + "token_type":"example", + "expires_in":3600, + "example_parameter":"example_value" + } + + :returns: (status_code, body, headers) + """ + client = self.request.client + token = self.generate_token(scope=self.request.scope, include_refresh_token=False) + log.debug('Issue token %r to %r', token, client) + self.save_token(token) + self.execute_hook('process_token', self, token=token) + return 200, token, self.TOKEN_RESPONSE_HEADER diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/grants/implicit.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/grants/implicit.py new file mode 100644 index 000000000..75b12be43 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/grants/implicit.py @@ -0,0 +1,229 @@ +import logging +from authlib.common.urls import add_params_to_uri +from .base import BaseGrant, AuthorizationEndpointMixin +from ..errors import ( + OAuth2Error, + UnauthorizedClientError, + AccessDeniedError, +) + +log = logging.getLogger(__name__) + + +class ImplicitGrant(BaseGrant, AuthorizationEndpointMixin): + """The implicit grant type is used to obtain access tokens (it does not + support the issuance of refresh tokens) and is optimized for public + clients known to operate a particular redirection URI. These clients + are typically implemented in a browser using a scripting language + such as JavaScript. + + Since this is a redirection-based flow, the client must be capable of + interacting with the resource owner's user-agent (typically a web + browser) and capable of receiving incoming requests (via redirection) + from the authorization server. + + Unlike the authorization code grant type, in which the client makes + separate requests for authorization and for an access token, the + client receives the access token as the result of the authorization + request. + + The implicit grant type does not include client authentication, and + relies on the presence of the resource owner and the registration of + the redirection URI. Because the access token is encoded into the + redirection URI, it may be exposed to the resource owner and other + applications residing on the same device:: + + +----------+ + | Resource | + | Owner | + | | + +----------+ + ^ + | + (B) + +----|-----+ Client Identifier +---------------+ + | -+----(A)-- & Redirection URI --->| | + | User- | | Authorization | + | Agent -|----(B)-- User authenticates -->| Server | + | | | | + | |<---(C)--- Redirection URI ----<| | + | | with Access Token +---------------+ + | | in Fragment + | | +---------------+ + | |----(D)--- Redirection URI ---->| Web-Hosted | + | | without Fragment | Client | + | | | Resource | + | (F) |<---(E)------- Script ---------<| | + | | +---------------+ + +-|--------+ + | | + (A) (G) Access Token + | | + ^ v + +---------+ + | | + | Client | + | | + +---------+ + """ + #: authorization_code grant type has authorization endpoint + AUTHORIZATION_ENDPOINT = True + #: Allowed client auth methods for token endpoint + TOKEN_ENDPOINT_AUTH_METHODS = ['none'] + + RESPONSE_TYPES = {'token'} + GRANT_TYPE = 'implicit' + ERROR_RESPONSE_FRAGMENT = True + + def validate_authorization_request(self): + """The client constructs the request URI by adding the following + parameters to the query component of the authorization endpoint URI + using the "application/x-www-form-urlencoded" format. + Per `Section 4.2.1`_. + + response_type + REQUIRED. Value MUST be set to "token". + + client_id + REQUIRED. The client identifier as described in Section 2.2. + + redirect_uri + OPTIONAL. As described in Section 3.1.2. + + scope + OPTIONAL. The scope of the access request as described by + Section 3.3. + + state + RECOMMENDED. An opaque value used by the client to maintain + state between the request and callback. The authorization + server includes this value when redirecting the user-agent back + to the client. The parameter SHOULD be used for preventing + cross-site request forgery as described in Section 10.12. + + The client directs the resource owner to the constructed URI using an + HTTP redirection response, or by other means available to it via the + user-agent. + + For example, the client directs the user-agent to make the following + HTTP request using TLS: + + .. code-block:: http + + GET /authorize?response_type=token&client_id=s6BhdRkqt3&state=xyz + &redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb HTTP/1.1 + Host: server.example.com + + .. _`Section 4.2.1`: https://tools.ietf.org/html/rfc6749#section-4.2.1 + """ + # ignore validate for response_type, since it is validated by + # check_authorization_endpoint + + # The implicit grant type is optimized for public clients + client = self.authenticate_token_endpoint_client() + log.debug('Validate authorization request of %r', client) + + redirect_uri = self.validate_authorization_redirect_uri( + self.request, client) + + response_type = self.request.response_type + if not client.check_response_type(response_type): + raise UnauthorizedClientError( + 'The client is not authorized to use ' + '"response_type={}"'.format(response_type), + state=self.request.state, + redirect_uri=redirect_uri, + redirect_fragment=True, + ) + + try: + self.request.client = client + self.validate_requested_scope() + self.execute_hook('after_validate_authorization_request') + except OAuth2Error as error: + error.redirect_uri = redirect_uri + error.redirect_fragment = True + raise error + return redirect_uri + + def create_authorization_response(self, redirect_uri, grant_user): + """If the resource owner grants the access request, the authorization + server issues an access token and delivers it to the client by adding + the following parameters to the fragment component of the redirection + URI using the "application/x-www-form-urlencoded" format. + Per `Section 4.2.2`_. + + access_token + REQUIRED. The access token issued by the authorization server. + + token_type + REQUIRED. The type of the token issued as described in + Section 7.1. Value is case insensitive. + + expires_in + RECOMMENDED. The lifetime in seconds of the access token. For + example, the value "3600" denotes that the access token will + expire in one hour from the time the response was generated. + If omitted, the authorization server SHOULD provide the + expiration time via other means or document the default value. + + scope + OPTIONAL, if identical to the scope requested by the client; + otherwise, REQUIRED. The scope of the access token as + described by Section 3.3. + + state + REQUIRED if the "state" parameter was present in the client + authorization request. The exact value received from the + client. + + The authorization server MUST NOT issue a refresh token. + + For example, the authorization server redirects the user-agent by + sending the following HTTP response: + + .. code-block:: http + + HTTP/1.1 302 Found + Location: http://example.com/cb#access_token=2YotnFZFEjr1zCsicMWpAA + &state=xyz&token_type=example&expires_in=3600 + + Developers should note that some user-agents do not support the + inclusion of a fragment component in the HTTP "Location" response + header field. Such clients will require using other methods for + redirecting the client than a 3xx redirection response -- for + example, returning an HTML page that includes a 'continue' button + with an action linked to the redirection URI. + + .. _`Section 4.2.2`: https://tools.ietf.org/html/rfc6749#section-4.2.2 + + :param redirect_uri: Redirect to the given URI for the authorization + :param grant_user: if resource owner granted the request, pass this + resource owner, otherwise pass None. + :returns: (status_code, body, headers) + """ + state = self.request.state + if grant_user: + self.request.user = grant_user + token = self.generate_token( + user=grant_user, + scope=self.request.scope, + include_refresh_token=False, + ) + log.debug('Grant token %r to %r', token, self.request.client) + + self.save_token(token) + self.execute_hook('process_token', token=token) + params = [(k, token[k]) for k in token] + if state: + params.append(('state', state)) + + uri = add_params_to_uri(redirect_uri, params, fragment=True) + headers = [('Location', uri)] + return 302, '', headers + else: + raise AccessDeniedError( + state=state, + redirect_uri=redirect_uri, + redirect_fragment=True + ) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/grants/refresh_token.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/grants/refresh_token.py new file mode 100644 index 000000000..d29f4f950 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/grants/refresh_token.py @@ -0,0 +1,183 @@ +""" + authlib.oauth2.rfc6749.grants.refresh_token + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + A special grant endpoint for refresh_token grant_type. Refreshing an + Access Token per `Section 6`_. + + .. _`Section 6`: https://tools.ietf.org/html/rfc6749#section-6 +""" + +import logging +from .base import BaseGrant, TokenEndpointMixin +from ..util import scope_to_list +from ..errors import ( + InvalidRequestError, + InvalidScopeError, + InvalidGrantError, + UnauthorizedClientError, +) +log = logging.getLogger(__name__) + + +class RefreshTokenGrant(BaseGrant, TokenEndpointMixin): + """A special grant endpoint for refresh_token grant_type. Refreshing an + Access Token per `Section 6`_. + + .. _`Section 6`: https://tools.ietf.org/html/rfc6749#section-6 + """ + GRANT_TYPE = 'refresh_token' + + #: The authorization server MAY issue a new refresh token + INCLUDE_NEW_REFRESH_TOKEN = False + + def _validate_request_client(self): + # require client authentication for confidential clients or for any + # client that was issued client credentials (or with other + # authentication requirements) + client = self.authenticate_token_endpoint_client() + log.debug('Validate token request of %r', client) + + if not client.check_grant_type(self.GRANT_TYPE): + raise UnauthorizedClientError() + + return client + + def _validate_request_token(self, client): + refresh_token = self.request.form.get('refresh_token') + if refresh_token is None: + raise InvalidRequestError( + 'Missing "refresh_token" in request.', + ) + + token = self.authenticate_refresh_token(refresh_token) + if not token or token.get_client_id() != client.get_client_id(): + raise InvalidGrantError() + return token + + def _validate_token_scope(self, token): + scope = self.request.scope + if not scope: + return + + original_scope = token.get_scope() + if not original_scope: + raise InvalidScopeError() + + original_scope = set(scope_to_list(original_scope)) + if not original_scope.issuperset(set(scope_to_list(scope))): + raise InvalidScopeError() + + def validate_token_request(self): + """If the authorization server issued a refresh token to the client, the + client makes a refresh request to the token endpoint by adding the + following parameters using the "application/x-www-form-urlencoded" + format per Appendix B with a character encoding of UTF-8 in the HTTP + request entity-body, per Section 6: + + grant_type + REQUIRED. Value MUST be set to "refresh_token". + + refresh_token + REQUIRED. The refresh token issued to the client. + + scope + OPTIONAL. The scope of the access request as described by + Section 3.3. The requested scope MUST NOT include any scope + not originally granted by the resource owner, and if omitted is + treated as equal to the scope originally granted by the + resource owner. + + + For example, the client makes the following HTTP request using + transport-layer security (with extra line breaks for display purposes + only): + + .. code-block:: http + + POST /token HTTP/1.1 + Host: server.example.com + Authorization: Basic czZCaGRSa3F0MzpnWDFmQmF0M2JW + Content-Type: application/x-www-form-urlencoded + + grant_type=refresh_token&refresh_token=tGzv3JOkF0XG5Qx2TlKWIA + """ + client = self._validate_request_client() + self.request.client = client + token = self._validate_request_token(client) + self._validate_token_scope(token) + self.request.credential = token + + def create_token_response(self): + """If valid and authorized, the authorization server issues an access + token as described in Section 5.1. If the request failed + verification or is invalid, the authorization server returns an error + response as described in Section 5.2. + """ + credential = self.request.credential + user = self.authenticate_user(credential) + if not user: + raise InvalidRequestError('There is no "user" for this token.') + + client = self.request.client + token = self.issue_token(user, credential) + log.debug('Issue token %r to %r', token, client) + + self.request.user = user + self.save_token(token) + self.execute_hook('process_token', token=token) + self.revoke_old_credential(credential) + return 200, token, self.TOKEN_RESPONSE_HEADER + + def issue_token(self, user, credential): + expires_in = credential.get_expires_in() + scope = self.request.scope + if not scope: + scope = credential.get_scope() + + token = self.generate_token( + user=user, + expires_in=expires_in, + scope=scope, + include_refresh_token=self.INCLUDE_NEW_REFRESH_TOKEN, + ) + return token + + def authenticate_refresh_token(self, refresh_token): + """Get token information with refresh_token string. Developers MUST + implement this method in subclass:: + + def authenticate_refresh_token(self, refresh_token): + item = Token.get(refresh_token=refresh_token) + if item and item.is_refresh_token_active(): + return item + + :param refresh_token: The refresh token issued to the client + :return: token + """ + raise NotImplementedError() + + def authenticate_user(self, credential): + """Authenticate the user related to this credential. Developers MUST + implement this method in subclass:: + + def authenticate_user(self, credential): + return User.query.get(credential.user_id) + + :param credential: Token object + :return: user + """ + raise NotImplementedError() + + def revoke_old_credential(self, credential): + """The authorization server MAY revoke the old refresh token after + issuing a new refresh token to the client. Developers MUST implement + this method in subclass:: + + def revoke_old_credential(self, credential): + credential.revoked = True + credential.save() + + :param credential: Token object + """ + raise NotImplementedError() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/grants/resource_owner_password_credentials.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/grants/resource_owner_password_credentials.py new file mode 100644 index 000000000..df31c867c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/grants/resource_owner_password_credentials.py @@ -0,0 +1,154 @@ +import logging +from .base import BaseGrant, TokenEndpointMixin +from ..errors import ( + UnauthorizedClientError, + InvalidRequestError, +) + +log = logging.getLogger(__name__) + + +class ResourceOwnerPasswordCredentialsGrant(BaseGrant, TokenEndpointMixin): + """The resource owner password credentials grant type is suitable in + cases where the resource owner has a trust relationship with the + client, such as the device operating system or a highly privileged + + application. The authorization server should take special care when + enabling this grant type and only allow it when other flows are not + viable. + + This grant type is suitable for clients capable of obtaining the + resource owner's credentials (username and password, typically using + an interactive form). It is also used to migrate existing clients + using direct authentication schemes such as HTTP Basic or Digest + authentication to OAuth by converting the stored credentials to an + access token:: + + +----------+ + | Resource | + | Owner | + | | + +----------+ + v + | Resource Owner + (A) Password Credentials + | + v + +---------+ +---------------+ + | |>--(B)---- Resource Owner ------->| | + | | Password Credentials | Authorization | + | Client | | Server | + | |<--(C)---- Access Token ---------<| | + | | (w/ Optional Refresh Token) | | + +---------+ +---------------+ + """ + GRANT_TYPE = 'password' + + def validate_token_request(self): + """The client makes a request to the token endpoint by adding the + following parameters using the "application/x-www-form-urlencoded" + format per Appendix B with a character encoding of UTF-8 in the HTTP + request entity-body: + + grant_type + REQUIRED. Value MUST be set to "password". + + username + REQUIRED. The resource owner username. + + password + REQUIRED. The resource owner password. + + scope + OPTIONAL. The scope of the access request as described by + Section 3.3. + + If the client type is confidential or the client was issued client + credentials (or assigned other authentication requirements), the + client MUST authenticate with the authorization server as described + in Section 3.2.1. + + For example, the client makes the following HTTP request using + transport-layer security (with extra line breaks for display purposes + only): + + .. code-block:: http + + POST /token HTTP/1.1 + Host: server.example.com + Authorization: Basic czZCaGRSa3F0MzpnWDFmQmF0M2JW + Content-Type: application/x-www-form-urlencoded + + grant_type=password&username=johndoe&password=A3ddj3w + """ + # ignore validate for grant_type, since it is validated by + # check_token_endpoint + client = self.authenticate_token_endpoint_client() + log.debug('Validate token request of %r', client) + + if not client.check_grant_type(self.GRANT_TYPE): + raise UnauthorizedClientError() + + params = self.request.form + if 'username' not in params: + raise InvalidRequestError('Missing "username" in request.') + if 'password' not in params: + raise InvalidRequestError('Missing "password" in request.') + + log.debug('Authenticate user of %r', params['username']) + user = self.authenticate_user( + params['username'], + params['password'] + ) + if not user: + raise InvalidRequestError( + 'Invalid "username" or "password" in request.', + ) + self.request.client = client + self.request.user = user + self.validate_requested_scope() + + def create_token_response(self): + """If the access token request is valid and authorized, the + authorization server issues an access token and optional refresh + token as described in Section 5.1. If the request failed client + authentication or is invalid, the authorization server returns an + error response as described in Section 5.2. + + An example successful response: + + .. code-block:: http + + HTTP/1.1 200 OK + Content-Type: application/json + Cache-Control: no-store + Pragma: no-cache + + { + "access_token":"2YotnFZFEjr1zCsicMWpAA", + "token_type":"example", + "expires_in":3600, + "refresh_token":"tGzv3JOkF0XG5Qx2TlKWIA", + "example_parameter":"example_value" + } + + :returns: (status_code, body, headers) + """ + user = self.request.user + scope = self.request.scope + token = self.generate_token(user=user, scope=scope) + log.debug('Issue token %r to %r', token, self.request.client) + self.save_token(token) + self.execute_hook('process_token', token=token) + return 200, token, self.TOKEN_RESPONSE_HEADER + + def authenticate_user(self, username, password): + """validate the resource owner password credentials using its + existing password validation algorithm:: + + def authenticate_user(self, username, password): + user = get_user_by_username(username) + if user.check_password(password): + return user + """ + raise NotImplementedError() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/models.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/models.py new file mode 100644 index 000000000..0f86a4aa2 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/models.py @@ -0,0 +1,212 @@ +""" + authlib.oauth2.rfc6749.models + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + This module defines how to construct Client, AuthorizationCode and Token. +""" + + +class ClientMixin(object): + """Implementation of OAuth 2 Client described in `Section 2`_ with + some methods to help validation. A client has at least these information: + + * client_id: A string represents client identifier. + * client_secret: A string represents client password. + * token_endpoint_auth_method: A way to authenticate client at token + endpoint. + + .. _`Section 2`: https://tools.ietf.org/html/rfc6749#section-2 + """ + + def get_client_id(self): + """A method to return client_id of the client. For instance, the value + in database is saved in a column called ``client_id``:: + + def get_client_id(self): + return self.client_id + + :return: string + """ + raise NotImplementedError() + + def get_default_redirect_uri(self): + """A method to get client default redirect_uri. For instance, the + database table for client has a column called ``default_redirect_uri``:: + + def get_default_redirect_uri(self): + return self.default_redirect_uri + + :return: A URL string + """ + raise NotImplementedError() + + def get_allowed_scope(self, scope): + """A method to return a list of requested scopes which are supported by + this client. For instance, there is a ``scope`` column:: + + def get_allowed_scope(self, scope): + if not scope: + return '' + allowed = set(scope_to_list(self.scope)) + return list_to_scope([s for s in scope.split() if s in allowed]) + + :param scope: the requested scope. + :return: string of scope + """ + raise NotImplementedError() + + def check_redirect_uri(self, redirect_uri): + """Validate redirect_uri parameter in Authorization Endpoints. For + instance, in the client table, there is an ``allowed_redirect_uris`` + column:: + + def check_redirect_uri(self, redirect_uri): + return redirect_uri in self.allowed_redirect_uris + + :param redirect_uri: A URL string for redirecting. + :return: bool + """ + raise NotImplementedError() + + def has_client_secret(self): + """A method returns that if the client has ``client_secret`` value. + If the value is in ``client_secret`` column:: + + def has_client_secret(self): + return bool(self.client_secret) + + :return: bool + """ + raise NotImplementedError() + + def check_client_secret(self, client_secret): + """Check client_secret matching with the client. For instance, in + the client table, the column is called ``client_secret``:: + + def check_client_secret(self, client_secret): + return self.client_secret == client_secret + + :param client_secret: A string of client secret + :return: bool + """ + raise NotImplementedError() + + def check_token_endpoint_auth_method(self, method): + """Check client ``token_endpoint_auth_method`` defined via `RFC7591`_. + Values defined by this specification are: + + * "none": The client is a public client as defined in OAuth 2.0, + and does not have a client secret. + + * "client_secret_post": The client uses the HTTP POST parameters + as defined in OAuth 2.0 + + * "client_secret_basic": The client uses HTTP Basic as defined in + OAuth 2.0 + + .. _`RFC7591`: https://tools.ietf.org/html/rfc7591 + """ + raise NotImplementedError() + + def check_response_type(self, response_type): + """Validate if the client can handle the given response_type. There + are two response types defined by RFC6749: code and token. For + instance, there is a ``allowed_response_types`` column in your client:: + + def check_response_type(self, response_type): + return response_type in self.response_types + + :param response_type: the requested response_type string. + :return: bool + """ + raise NotImplementedError() + + def check_grant_type(self, grant_type): + """Validate if the client can handle the given grant_type. There are + four grant types defined by RFC6749: + + * authorization_code + * implicit + * client_credentials + * password + + For instance, there is a ``allowed_grant_types`` column in your client:: + + def check_grant_type(self, grant_type): + return grant_type in self.grant_types + + :param grant_type: the requested grant_type string. + :return: bool + """ + raise NotImplementedError() + + +class AuthorizationCodeMixin(object): + def get_redirect_uri(self): + """A method to get authorization code's ``redirect_uri``. + For instance, the database table for authorization code has a + column called ``redirect_uri``:: + + def get_redirect_uri(self): + return self.redirect_uri + + :return: A URL string + """ + raise NotImplementedError() + + def get_scope(self): + """A method to get scope of the authorization code. For instance, + the column is called ``scope``:: + + def get_scope(self): + return self.scope + + :return: scope string + """ + raise NotImplementedError() + + +class TokenMixin(object): + def get_client_id(self): + """A method to return client_id of the token. For instance, the value + in database is saved in a column called ``client_id``:: + + def get_client_id(self): + return self.client_id + + :return: string + """ + raise NotImplementedError() + + def get_scope(self): + """A method to get scope of the authorization code. For instance, + the column is called ``scope``:: + + def get_scope(self): + return self.scope + + :return: scope string + """ + raise NotImplementedError() + + def get_expires_in(self): + """A method to get the ``expires_in`` value of the token. e.g. + the column is called ``expires_in``:: + + def get_expires_in(self): + return self.expires_in + + :return: timestamp int + """ + raise NotImplementedError() + + def get_expires_at(self): + """A method to get the value when this token will be expired. e.g. + it would be:: + + def get_expires_at(self): + return self.created_at + self.expires_in + + :return: timestamp int + """ + raise NotImplementedError() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/parameters.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/parameters.py new file mode 100644 index 000000000..20461fdbc --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/parameters.py @@ -0,0 +1,213 @@ +from authlib.common.urls import ( + urlparse, + add_params_to_uri, + add_params_to_qs, +) +from authlib.common.encoding import to_unicode +from .errors import ( + MissingCodeException, + MissingTokenException, + MissingTokenTypeException, + MismatchingStateException, +) +from .util import list_to_scope + + +def prepare_grant_uri(uri, client_id, response_type, redirect_uri=None, + scope=None, state=None, **kwargs): + """Prepare the authorization grant request URI. + + The client constructs the request URI by adding the following + parameters to the query component of the authorization endpoint URI + using the ``application/x-www-form-urlencoded`` format: + + :param uri: The authorize endpoint to fetch "code" or "token". + :param client_id: The client identifier as described in `Section 2.2`_. + :param response_type: To indicate which OAuth 2 grant/flow is required, + "code" and "token". + :param redirect_uri: The client provided URI to redirect back to after + authorization as described in `Section 3.1.2`_. + :param scope: The scope of the access request as described by + `Section 3.3`_. + :param state: An opaque value used by the client to maintain + state between the request and callback. The authorization + server includes this value when redirecting the user-agent + back to the client. The parameter SHOULD be used for + preventing cross-site request forgery as described in + `Section 10.12`_. + :param kwargs: Extra arguments to embed in the grant/authorization URL. + + An example of an authorization code grant authorization URL:: + + /authorize?response_type=code&client_id=s6BhdRkqt3&state=xyz + &redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb + + .. _`Section 2.2`: https://tools.ietf.org/html/rfc6749#section-2.2 + .. _`Section 3.1.2`: https://tools.ietf.org/html/rfc6749#section-3.1.2 + .. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3 + .. _`section 10.12`: https://tools.ietf.org/html/rfc6749#section-10.12 + """ + params = [ + ('response_type', response_type), + ('client_id', client_id) + ] + + if redirect_uri: + params.append(('redirect_uri', redirect_uri)) + if scope: + params.append(('scope', list_to_scope(scope))) + if state: + params.append(('state', state)) + + for k in kwargs: + if kwargs[k]: + params.append((to_unicode(k), kwargs[k])) + + return add_params_to_uri(uri, params) + + +def prepare_token_request(grant_type, body='', redirect_uri=None, **kwargs): + """Prepare the access token request. Per `Section 4.1.3`_. + + The client makes a request to the token endpoint by adding the + following parameters using the ``application/x-www-form-urlencoded`` + format in the HTTP request entity-body: + + :param grant_type: To indicate grant type being used, i.e. "password", + "authorization_code" or "client_credentials". + :param body: Existing request body to embed parameters in. + :param redirect_uri: If the "redirect_uri" parameter was included in the + authorization request as described in + `Section 4.1.1`_, and their values MUST be identical. + :param kwargs: Extra arguments to embed in the request body. + + An example of an authorization code token request body:: + + grant_type=authorization_code&code=SplxlOBeZQQYbYS6WxSbIA + &redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb + + .. _`Section 4.1.1`: https://tools.ietf.org/html/rfc6749#section-4.1.1 + .. _`Section 4.1.3`: https://tools.ietf.org/html/rfc6749#section-4.1.3 + """ + params = [('grant_type', grant_type)] + + if redirect_uri: + params.append(('redirect_uri', redirect_uri)) + + if 'scope' in kwargs: + kwargs['scope'] = list_to_scope(kwargs['scope']) + + if grant_type == 'authorization_code' and 'code' not in kwargs: + raise MissingCodeException() + + for k in kwargs: + if kwargs[k]: + params.append((to_unicode(k), kwargs[k])) + + return add_params_to_qs(body, params) + + +def parse_authorization_code_response(uri, state=None): + """Parse authorization grant response URI into a dict. + + If the resource owner grants the access request, the authorization + server issues an authorization code and delivers it to the client by + adding the following parameters to the query component of the + redirection URI using the ``application/x-www-form-urlencoded`` format: + + **code** + REQUIRED. The authorization code generated by the + authorization server. The authorization code MUST expire + shortly after it is issued to mitigate the risk of leaks. A + maximum authorization code lifetime of 10 minutes is + RECOMMENDED. The client MUST NOT use the authorization code + more than once. If an authorization code is used more than + once, the authorization server MUST deny the request and SHOULD + revoke (when possible) all tokens previously issued based on + that authorization code. The authorization code is bound to + the client identifier and redirection URI. + + **state** + REQUIRED if the "state" parameter was present in the client + authorization request. The exact value received from the + client. + + :param uri: The full redirect URL back to the client. + :param state: The state parameter from the authorization request. + + For example, the authorization server redirects the user-agent by + sending the following HTTP response: + + .. code-block:: http + + HTTP/1.1 302 Found + Location: https://client.example.com/cb?code=SplxlOBeZQQYbYS6WxSbIA + &state=xyz + + """ + query = urlparse.urlparse(uri).query + params = dict(urlparse.parse_qsl(query)) + + if 'code' not in params: + raise MissingCodeException() + + if state and params.get('state', None) != state: + raise MismatchingStateException() + + return params + + +def parse_implicit_response(uri, state=None): + """Parse the implicit token response URI into a dict. + + If the resource owner grants the access request, the authorization + server issues an access token and delivers it to the client by adding + the following parameters to the fragment component of the redirection + URI using the ``application/x-www-form-urlencoded`` format: + + **access_token** + REQUIRED. The access token issued by the authorization server. + + **token_type** + REQUIRED. The type of the token issued as described in + Section 7.1. Value is case insensitive. + + **expires_in** + RECOMMENDED. The lifetime in seconds of the access token. For + example, the value "3600" denotes that the access token will + expire in one hour from the time the response was generated. + If omitted, the authorization server SHOULD provide the + expiration time via other means or document the default value. + + **scope** + OPTIONAL, if identical to the scope requested by the client, + otherwise REQUIRED. The scope of the access token as described + by Section 3.3. + + **state** + REQUIRED if the "state" parameter was present in the client + authorization request. The exact value received from the + client. + + Similar to the authorization code response, but with a full token provided + in the URL fragment: + + .. code-block:: http + + HTTP/1.1 302 Found + Location: http://example.com/cb#access_token=2YotnFZFEjr1zCsicMWpAA + &state=xyz&token_type=example&expires_in=3600 + """ + fragment = urlparse.urlparse(uri).fragment + params = dict(urlparse.parse_qsl(fragment, keep_blank_values=True)) + + if 'access_token' not in params: + raise MissingTokenException() + + if 'token_type' not in params: + raise MissingTokenTypeException() + + if state and params.get('state', None) != state: + raise MismatchingStateException() + + return params diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/resource_protector.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/resource_protector.py new file mode 100644 index 000000000..40567950c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/resource_protector.py @@ -0,0 +1,37 @@ +""" + authlib.oauth2.rfc6749.resource_protector + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Implementation of Accessing Protected Resources per `Section 7`_. + + .. _`Section 7`: https://tools.ietf.org/html/rfc6749#section-7 +""" + +from .errors import MissingAuthorizationError, UnsupportedTokenTypeError + + +class ResourceProtector(object): + def __init__(self): + self._token_validators = {} + + def register_token_validator(self, validator): + if validator.TOKEN_TYPE not in self._token_validators: + self._token_validators[validator.TOKEN_TYPE] = validator + + def validate_request(self, scope, request, scope_operator='AND'): + auth = request.headers.get('Authorization') + if not auth: + raise MissingAuthorizationError() + + # https://tools.ietf.org/html/rfc6749#section-7.1 + token_parts = auth.split(None, 1) + if len(token_parts) != 2: + raise UnsupportedTokenTypeError() + + token_type, token_string = token_parts + + validator = self._token_validators.get(token_type.lower()) + if not validator: + raise UnsupportedTokenTypeError() + + return validator(token_string, scope, request, scope_operator) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/token_endpoint.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/token_endpoint.py new file mode 100644 index 000000000..726f7e0fb --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/token_endpoint.py @@ -0,0 +1,34 @@ +class TokenEndpoint(object): + #: Endpoint name to be registered + ENDPOINT_NAME = None + #: Supported token types + SUPPORTED_TOKEN_TYPES = ('access_token', 'refresh_token') + #: Allowed client authenticate methods + CLIENT_AUTH_METHODS = ['client_secret_basic'] + + def __init__(self, server): + self.server = server + + def __call__(self, request): + # make it callable for authorization server + # ``create_endpoint_response`` + return self.create_endpoint_response(request) + + def create_endpoint_request(self, request): + return self.server.create_oauth2_request(request) + + def authenticate_endpoint_client(self, request): + """Authentication client for endpoint with ``CLIENT_AUTH_METHODS``. + """ + client = self.server.authenticate_client( + request=request, + methods=self.CLIENT_AUTH_METHODS, + ) + request.client = client + return client + + def authenticate_endpoint_credential(self, request, client): + raise NotImplementedError() + + def create_endpoint_response(self, request): + raise NotImplementedError() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/util.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/util.py new file mode 100644 index 000000000..a216fbf34 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/util.py @@ -0,0 +1,40 @@ +import base64 +import binascii +from authlib.common.encoding import to_unicode + + +def list_to_scope(scope): + """Convert a list of scopes to a space separated string.""" + if isinstance(scope, (set, tuple, list)): + return " ".join([to_unicode(s) for s in scope]) + if scope is None: + return scope + return to_unicode(scope) + + +def scope_to_list(scope): + """Convert a space separated string to a list of scopes.""" + if isinstance(scope, (tuple, list, set)): + return [to_unicode(s) for s in scope] + elif scope is None: + return None + return scope.strip().split() + + +def extract_basic_authorization(headers): + auth = headers.get('Authorization') + if not auth or ' ' not in auth: + return None, None + + auth_type, auth_token = auth.split(None, 1) + if auth_type.lower() != 'basic': + return None, None + + try: + query = to_unicode(base64.b64decode(auth_token)) + except (binascii.Error, TypeError): + return None, None + if ':' in query: + username, password = query.split(':', 1) + return username, password + return query, None diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/wrappers.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/wrappers.py new file mode 100644 index 000000000..a1f454312 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6749/wrappers.py @@ -0,0 +1,96 @@ +import time +from authlib.common.urls import urlparse, url_decode +from .errors import InsecureTransportError + + +class OAuth2Token(dict): + def __init__(self, params): + if params.get('expires_at'): + params['expires_at'] = int(params['expires_at']) + elif params.get('expires_in'): + params['expires_at'] = int(time.time()) + \ + int(params['expires_in']) + super(OAuth2Token, self).__init__(params) + + def is_expired(self): + expires_at = self.get('expires_at') + if not expires_at: + return None + return expires_at < time.time() + + @classmethod + def from_dict(cls, token): + if isinstance(token, dict) and not isinstance(token, cls): + token = cls(token) + return token + + +class OAuth2Request(object): + def __init__(self, method, uri, body=None, headers=None): + InsecureTransportError.check(uri) + #: HTTP method + self.method = method + self.uri = uri + self.body = body + #: HTTP headers + self.headers = headers or {} + + self.query = urlparse.urlparse(uri).query + + self.args = dict(url_decode(self.query)) + self.form = self.body or {} + + #: dict of query and body params + data = {} + data.update(self.args) + data.update(self.form) + self.data = data + + #: authenticate method + self.auth_method = None + #: authenticated user on this request + self.user = None + #: authorization_code or token model instance + self.credential = None + #: client which sending this request + self.client = None + + @property + def client_id(self): + """The authorization server issues the registered client a client + identifier -- a unique string representing the registration + information provided by the client. The value is extracted from + request. + + :return: string + """ + return self.data.get('client_id') + + @property + def response_type(self): + return self.data.get('response_type') + + @property + def grant_type(self): + return self.data.get('grant_type') + + @property + def redirect_uri(self): + return self.data.get('redirect_uri') + + @property + def scope(self): + return self.data.get('scope') + + @property + def state(self): + return self.data.get('state') + + +class HttpRequest(object): + def __init__(self, method, uri, data=None, headers=None): + self.method = method + self.uri = uri + self.data = data + self.headers = headers or {} + self.user = None diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6750/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6750/__init__.py new file mode 100644 index 000000000..4ad021269 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6750/__init__.py @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- +""" + authlib.oauth2.rfc6750 + ~~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + The OAuth 2.0 Authorization Framework: Bearer Token Usage. + + https://tools.ietf.org/html/rfc6750 +""" + +from .errors import InvalidRequestError, InvalidTokenError, InsufficientScopeError +from .parameters import add_bearer_token +from .wrappers import BearerToken +from .validator import BearerTokenValidator + + +__all__ = [ + 'InvalidRequestError', 'InvalidTokenError', 'InsufficientScopeError', + 'add_bearer_token', + 'BearerToken', + 'BearerTokenValidator', +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6750/errors.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6750/errors.py new file mode 100644 index 000000000..fe8029c32 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6750/errors.py @@ -0,0 +1,91 @@ +""" + authlib.rfc6750.errors + ~~~~~~~~~~~~~~~~~~~~~~ + + OAuth Extensions Error Registration. When a request fails, + the resource server responds using the appropriate HTTP + status code and includes one of the following error codes + in the response. + + https://tools.ietf.org/html/rfc6750#section-6.2 + + :copyright: (c) 2017 by Hsiaoming Yang. +""" +from ..base import OAuth2Error +from ..rfc6749.errors import InvalidRequestError + +__all__ = [ + 'InvalidRequestError', 'InvalidTokenError', 'InsufficientScopeError' +] + + +class InvalidTokenError(OAuth2Error): + """The access token provided is expired, revoked, malformed, or + invalid for other reasons. The resource SHOULD respond with + the HTTP 401 (Unauthorized) status code. The client MAY + request a new access token and retry the protected resource + request. + + https://tools.ietf.org/html/rfc6750#section-3.1 + """ + error = 'invalid_token' + status_code = 401 + + def __init__(self, description=None, uri=None, status_code=None, + state=None, realm=None): + super(InvalidTokenError, self).__init__( + description, uri, status_code, state) + self.realm = realm + + def get_error_description(self): + return self.gettext( + 'The access token provided is expired, revoked, malformed, ' + 'or invalid for other reasons.' + ) + + def get_headers(self): + """If the protected resource request does not include authentication + credentials or does not contain an access token that enables access + to the protected resource, the resource server MUST include the HTTP + "WWW-Authenticate" response header field; it MAY include it in + response to other conditions as well. + + https://tools.ietf.org/html/rfc6750#section-3 + """ + headers = super(InvalidTokenError, self).get_headers() + + extras = [] + if self.realm: + extras.append('realm="{}"'.format(self.realm)) + extras.append('error="{}"'.format(self.error)) + error_description = self.get_error_description() + extras.append('error_description="{}"'.format(error_description)) + headers.append( + ('WWW-Authenticate', 'Bearer ' + ', '.join(extras)) + ) + return headers + + +class InsufficientScopeError(OAuth2Error): + """The request requires higher privileges than provided by the + access token. The resource server SHOULD respond with the HTTP + 403 (Forbidden) status code and MAY include the "scope" + attribute with the scope necessary to access the protected + resource. + + https://tools.ietf.org/html/rfc6750#section-3.1 + """ + error = 'insufficient_scope' + status_code = 403 + + def __init__(self, token_scope, required_scope): + super(InsufficientScopeError, self).__init__() + self.token_scope = token_scope + self.required_scope = required_scope + + def get_error_description(self): + return self.gettext( + 'The request requires higher privileges than ' + 'provided by the access token. ' + 'Required: "%(required)s", Provided:"%(provided)s"' + ) % dict(required=self.required_scope, provided=self.token_scope) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6750/parameters.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6750/parameters.py new file mode 100644 index 000000000..5f4e1006e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6750/parameters.py @@ -0,0 +1,41 @@ +from authlib.common.urls import add_params_to_qs, add_params_to_uri + + +def add_to_uri(token, uri): + """Add a Bearer Token to the request URI. + Not recommended, use only if client can't use authorization header or body. + + http://www.example.com/path?access_token=h480djs93hd8 + """ + return add_params_to_uri(uri, [('access_token', token)]) + + +def add_to_headers(token, headers=None): + """Add a Bearer Token to the request URI. + Recommended method of passing bearer tokens. + + Authorization: Bearer h480djs93hd8 + """ + headers = headers or {} + headers['Authorization'] = 'Bearer {}'.format(token) + return headers + + +def add_to_body(token, body=None): + """Add a Bearer Token to the request body. + + access_token=h480djs93hd8 + """ + if body is None: + body = '' + return add_params_to_qs(body, [('access_token', token)]) + + +def add_bearer_token(token, uri, headers, body, placement='header'): + if placement in ('uri', 'url', 'query'): + uri = add_to_uri(token, uri) + elif placement in ('header', 'headers'): + headers = add_to_headers(token, headers) + elif placement == 'body': + body = add_to_body(token, body) + return uri, headers, body diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6750/validator.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6750/validator.py new file mode 100644 index 000000000..fc26a149a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6750/validator.py @@ -0,0 +1,99 @@ +""" + authlib.oauth2.rfc6750.validator + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Validate Bearer Token for in request, scope and token. +""" + +import time +from ..rfc6749.util import scope_to_list +from .errors import ( + InvalidRequestError, + InvalidTokenError, + InsufficientScopeError +) + + +class BearerTokenValidator(object): + TOKEN_TYPE = 'bearer' + + def __init__(self, realm=None): + self.realm = realm + + def authenticate_token(self, token_string): + """A method to query token from database with the given token string. + Developers MUST re-implement this method. For instance:: + + def authenticate_token(self, token_string): + return get_token_from_database(token_string) + + :param token_string: A string to represent the access_token. + :return: token + """ + raise NotImplementedError() + + def request_invalid(self, request): + """Check if the HTTP request is valid or not. Developers MUST + re-implement this method. For instance, your server requires a + "X-Device-Version" in the header:: + + def request_invalid(self, request): + return 'X-Device-Version' in request.headers + + Usually, you don't have to detect if the request is valid or not, + you can just return a ``False``. + + :param request: instance of HttpRequest + :return: Boolean + """ + raise NotImplementedError() + + def token_revoked(self, token): + """Check if this token is revoked. Developers MUST re-implement this + method. If there is a column called ``revoked`` on the token table:: + + def token_revoked(self, token): + return token.revoked + + :param token: token instance + :return: Boolean + """ + raise NotImplementedError() + + def token_expired(self, token): + expires_at = token.get_expires_at() + if not expires_at: + return False + return expires_at < time.time() + + def scope_insufficient(self, token, scope, operator='AND'): + if not scope: + return False + + token_scopes = scope_to_list(token.get_scope()) + if not token_scopes: + return True + + token_scopes = set(token_scopes) + resource_scopes = set(scope_to_list(scope)) + if operator == 'AND': + return not token_scopes.issuperset(resource_scopes) + if operator == 'OR': + return not token_scopes & resource_scopes + if callable(operator): + return not operator(token_scopes, resource_scopes) + raise ValueError('Invalid operator value') + + def __call__(self, token_string, scope, request, scope_operator='AND'): + if self.request_invalid(request): + raise InvalidRequestError() + token = self.authenticate_token(token_string) + if not token: + raise InvalidTokenError(realm=self.realm) + if self.token_expired(token): + raise InvalidTokenError(realm=self.realm) + if self.token_revoked(token): + raise InvalidTokenError(realm=self.realm) + if self.scope_insufficient(token, scope, scope_operator): + raise InsufficientScopeError(token.get_scope(), scope) + return token diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6750/wrappers.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6750/wrappers.py new file mode 100644 index 000000000..9e2c226c3 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc6750/wrappers.py @@ -0,0 +1,97 @@ + +class BearerToken(object): + """Bearer Token generator which can create the payload for token response + by OAuth 2 server. A typical token response would be: + + .. code-block:: http + + HTTP/1.1 200 OK + Content-Type: application/json;charset=UTF-8 + Cache-Control: no-store + Pragma: no-cache + + { + "access_token":"mF_9.B5f-4.1JqM", + "token_type":"Bearer", + "expires_in":3600, + "refresh_token":"tGzv3JOkF0XG5Qx2TlKWIA" + } + + :param access_token_generator: a function to generate access_token. + :param refresh_token_generator: a function to generate refresh_token, + if not provided, refresh_token will not be added into token response. + :param expires_generator: The expires_generator can be an int value or a + function. If it is int, all token expires_in will be this value. If it + is function, it can generate expires_in depending on client and + grant_type:: + + def expires_generator(client, grant_type): + if is_official_client(client): + return 3600 * 1000 + if grant_type == 'implicit': + return 3600 + return 3600 * 10 + :return: Callable + + When BearerToken is initialized, it will be callable:: + + token_generator = BearerToken(access_token_generator) + token = token_generator(client, grant_type, expires_in=None, + scope=None, include_refresh_token=True) + + The callable function that BearerToken created accepts these parameters: + + :param client: the client that making the request. + :param grant_type: current requested grant_type. + :param expires_in: if provided, use this value as expires_in. + :param scope: current requested scope. + :param include_refresh_token: should refresh_token be included. + :return: Token dict + """ + + #: default expires_in value + DEFAULT_EXPIRES_IN = 3600 + #: default expires_in value differentiate by grant_type + GRANT_TYPES_EXPIRES_IN = { + 'authorization_code': 864000, + 'implicit': 3600, + 'password': 864000, + 'client_credentials': 864000 + } + + def __init__(self, access_token_generator, + refresh_token_generator=None, + expires_generator=None): + self.access_token_generator = access_token_generator + self.refresh_token_generator = refresh_token_generator + self.expires_generator = expires_generator + + def _get_expires_in(self, client, grant_type): + if self.expires_generator is None: + expires_in = self.GRANT_TYPES_EXPIRES_IN.get( + grant_type, self.DEFAULT_EXPIRES_IN) + elif callable(self.expires_generator): + expires_in = self.expires_generator(client, grant_type) + elif isinstance(self.expires_generator, int): + expires_in = self.expires_generator + else: + expires_in = self.DEFAULT_EXPIRES_IN + return expires_in + + def __call__(self, client, grant_type, user=None, scope=None, + expires_in=None, include_refresh_token=True): + access_token = self.access_token_generator(client, grant_type, user, scope) + if expires_in is None: + expires_in = self._get_expires_in(client, grant_type) + + token = { + 'token_type': 'Bearer', + 'access_token': access_token, + 'expires_in': expires_in + } + if include_refresh_token and self.refresh_token_generator: + token['refresh_token'] = self.refresh_token_generator( + client, grant_type, user, scope) + if scope: + token['scope'] = scope + return token diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7009/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7009/__init__.py new file mode 100644 index 000000000..0b8bc7f2e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7009/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +""" + authlib.oauth2.rfc7009 + ~~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + OAuth 2.0 Token Revocation. + + https://tools.ietf.org/html/rfc7009 +""" + +from .parameters import prepare_revoke_token_request +from .revocation import RevocationEndpoint + +__all__ = ['prepare_revoke_token_request', 'RevocationEndpoint'] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7009/parameters.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7009/parameters.py new file mode 100644 index 000000000..2a829a752 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7009/parameters.py @@ -0,0 +1,25 @@ +from authlib.common.urls import add_params_to_qs + + +def prepare_revoke_token_request(token, token_type_hint=None, + body=None, headers=None): + """Construct request body and headers for revocation endpoint. + + :param token: access_token or refresh_token string. + :param token_type_hint: Optional, `access_token` or `refresh_token`. + :param body: current request body. + :param headers: current request headers. + :return: tuple of (body, headers) + + https://tools.ietf.org/html/rfc7009#section-2.1 + """ + params = [('token', token)] + if token_type_hint: + params.append(('token_type_hint', token_type_hint)) + + body = add_params_to_qs(body or '', params) + if headers is None: + headers = {} + + headers['Content-Type'] = 'application/x-www-form-urlencoded' + return body, headers diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7009/revocation.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7009/revocation.py new file mode 100644 index 000000000..aafdd37d4 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7009/revocation.py @@ -0,0 +1,98 @@ +from authlib.consts import default_json_headers +from ..rfc6749 import TokenEndpoint +from ..rfc6749 import ( + InvalidRequestError, + UnsupportedTokenTypeError, +) + + +class RevocationEndpoint(TokenEndpoint): + """Implementation of revocation endpoint which is described in + `RFC7009`_. + + .. _RFC7009: https://tools.ietf.org/html/rfc7009 + """ + #: Endpoint name to be registered + ENDPOINT_NAME = 'revocation' + + def authenticate_endpoint_credential(self, request, client): + """The client constructs the request by including the following + parameters using the "application/x-www-form-urlencoded" format in + the HTTP request entity-body: + + token + REQUIRED. The token that the client wants to get revoked. + + token_type_hint + OPTIONAL. A hint about the type of the token submitted for + revocation. + """ + if 'token' not in request.form: + raise InvalidRequestError() + + token_type = request.form.get('token_type_hint') + if token_type and token_type not in self.SUPPORTED_TOKEN_TYPES: + raise UnsupportedTokenTypeError() + return self.query_token(request.form['token'], token_type, client) + + def create_endpoint_response(self, request): + """Validate revocation request and create the response for revocation. + For example, a client may request the revocation of a refresh token + with the following request:: + + POST /revoke HTTP/1.1 + Host: server.example.com + Content-Type: application/x-www-form-urlencoded + Authorization: Basic czZCaGRSa3F0MzpnWDFmQmF0M2JW + + token=45ghiukldjahdnhzdauz&token_type_hint=refresh_token + + :returns: (status_code, body, headers) + """ + # The authorization server first validates the client credentials + client = self.authenticate_endpoint_client(request) + + # then verifies whether the token was issued to the client making + # the revocation request + credential = self.authenticate_endpoint_credential(request, client) + + # the authorization server invalidates the token + if credential: + self.revoke_token(credential) + self.server.send_signal( + 'after_revoke_token', + token=credential, + client=client, + ) + return 200, {}, default_json_headers + + def query_token(self, token, token_type_hint, client): + """Get the token from database/storage by the given token string. + Developers should implement this method:: + + def query_token(self, token, token_type_hint, client): + if token_type_hint == 'access_token': + return Token.query_by_access_token(token, client.client_id) + if token_type_hint == 'refresh_token': + return Token.query_by_refresh_token(token, client.client_id) + return Token.query_by_access_token(token, client.client_id) or \ + Token.query_by_refresh_token(token, client.client_id) + """ + raise NotImplementedError() + + def revoke_token(self, token): + """Mark token as revoked. Since token MUST be unique, it would be + dangerous to delete it. Consider this situation: + + 1. Jane obtained a token XYZ + 2. Jane revoked (deleted) token XYZ + 3. Bob generated a new token XYZ + 4. Jane can use XYZ to access Bob's resource + + It would be secure to mark a token as revoked:: + + def revoke_token(self, token): + token.revoked = True + token.save() + """ + raise NotImplementedError() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7521/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7521/__init__.py new file mode 100644 index 000000000..0dbe0b30b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7521/__init__.py @@ -0,0 +1,3 @@ +from .client import AssertionClient + +__all__ = ['AssertionClient'] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7521/client.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7521/client.py new file mode 100644 index 000000000..d1b98ba55 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7521/client.py @@ -0,0 +1,84 @@ +from authlib.common.encoding import to_native +from authlib.oauth2.base import OAuth2Error + + +class AssertionClient(object): + """Constructs a new Assertion Framework for OAuth 2.0 Authorization Grants + per RFC7521_. + + .. _RFC7521: https://tools.ietf.org/html/rfc7521 + """ + DEFAULT_GRANT_TYPE = None + ASSERTION_METHODS = {} + token_auth_class = None + + def __init__(self, session, token_endpoint, issuer, subject, + audience=None, grant_type=None, claims=None, + token_placement='header', scope=None, **kwargs): + + self.session = session + + if audience is None: + audience = token_endpoint + + self.token_endpoint = token_endpoint + + if grant_type is None: + grant_type = self.DEFAULT_GRANT_TYPE + + self.grant_type = grant_type + + # https://tools.ietf.org/html/rfc7521#section-5.1 + self.issuer = issuer + self.subject = subject + self.audience = audience + self.claims = claims + self.scope = scope + if self.token_auth_class is not None: + self.token_auth = self.token_auth_class(None, token_placement, self) + self._kwargs = kwargs + + @property + def token(self): + return self.token_auth.token + + @token.setter + def token(self, token): + self.token_auth.set_token(token) + + def refresh_token(self): + """Using Assertions as Authorization Grants to refresh token as + described in `Section 4.1`_. + + .. _`Section 4.1`: https://tools.ietf.org/html/rfc7521#section-4.1 + """ + generate_assertion = self.ASSERTION_METHODS[self.grant_type] + assertion = generate_assertion( + issuer=self.issuer, + subject=self.subject, + audience=self.audience, + claims=self.claims, + **self._kwargs + ) + data = { + 'assertion': to_native(assertion), + 'grant_type': self.grant_type, + } + if self.scope: + data['scope'] = self.scope + + return self._refresh_token(data) + + def _refresh_token(self, data): + resp = self.session.request( + 'POST', self.token_endpoint, data=data, withhold_token=True) + + token = resp.json() + if 'error' in token: + raise OAuth2Error( + error=token['error'], + description=token.get('error_description') + ) + + self.token = token + return self.token diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7523/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7523/__init__.py new file mode 100644 index 000000000..843c07503 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7523/__init__.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +""" + authlib.oauth2.rfc7523 + ~~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + JSON Web Token (JWT) Profile for OAuth 2.0 Client + Authentication and Authorization Grants. + + https://tools.ietf.org/html/rfc7523 +""" + +from .jwt_bearer import JWTBearerGrant +from .client import ( + JWTBearerClientAssertion, +) +from .assertion import ( + client_secret_jwt_sign, + private_key_jwt_sign, +) +from .auth import ( + ClientSecretJWT, PrivateKeyJWT, + register_session_client_auth_method, +) + +__all__ = [ + 'JWTBearerGrant', + 'JWTBearerClientAssertion', + 'client_secret_jwt_sign', + 'private_key_jwt_sign', + 'ClientSecretJWT', + 'PrivateKeyJWT', + 'register_session_client_auth_method', +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7523/assertion.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7523/assertion.py new file mode 100644 index 000000000..0b70e6629 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7523/assertion.py @@ -0,0 +1,68 @@ +import time +from authlib.jose import jwt +from authlib.common.security import generate_token + + +def sign_jwt_bearer_assertion( + key, issuer, audience, subject=None, issued_at=None, + expires_at=None, claims=None, header=None, **kwargs): + + if header is None: + header = {} + alg = kwargs.pop('alg', None) + if alg is not None: + header['alg'] = alg + if 'alg' not in header: + raise ValueError('Missing "alg" in header') + + payload = {'iss': issuer, 'aud': audience} + + # subject is not required in Google service + if subject: + payload['sub'] = subject + + if not issued_at: + issued_at = int(time.time()) + + expires_in = kwargs.pop('expires_in', 3600) + if not expires_at: + expires_at = issued_at + expires_in + + payload['iat'] = issued_at + payload['exp'] = expires_at + + if claims: + payload.update(claims) + + return jwt.encode(header, payload, key) + + +def client_secret_jwt_sign(client_secret, client_id, token_endpoint, alg='HS256', + claims=None, header=None, **kwargs): + return _sign(client_secret, client_id, token_endpoint, + alg, claims=claims, header=header, **kwargs) + + +def private_key_jwt_sign(private_key, client_id, token_endpoint, alg='RS256', + claims=None, header=None, **kwargs): + return _sign(private_key, client_id, token_endpoint, + alg, claims=claims, header=header, **kwargs) + + +def _sign(key, client_id, token_endpoint, alg, claims=None, **kwargs): + # REQUIRED. Issuer. This MUST contain the client_id of the OAuth Client. + issuer = client_id + # REQUIRED. Subject. This MUST contain the client_id of the OAuth Client. + subject = client_id + # The Audience SHOULD be the URL of the Authorization Server's Token Endpoint. + audience = token_endpoint + + # jti is required + if claims is None: + claims = {} + if 'jti' not in claims: + claims['jti'] = generate_token(36) + + return sign_jwt_bearer_assertion( + key=key, issuer=issuer, audience=audience, subject=subject, + claims=claims, alg=alg, **kwargs) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7523/auth.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7523/auth.py new file mode 100644 index 000000000..c9eff9940 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7523/auth.py @@ -0,0 +1,107 @@ +from authlib.common.urls import add_params_to_qs +from authlib.deprecate import deprecate +from .assertion import client_secret_jwt_sign, private_key_jwt_sign +from .client import ASSERTION_TYPE + + +class ClientSecretJWT(object): + """Authentication method for OAuth 2.0 Client. This authentication + method is called ``client_secret_jwt``, which is using ``client_id`` + and ``client_secret`` constructed with JWT to identify a client. + + Here is an example of use ``client_secret_jwt`` with Requests Session:: + + from authlib.integrations.requests_client import OAuth2Session + + token_endpoint = 'https://example.com/oauth/token' + session = OAuth2Session( + 'your-client-id', 'your-client-secret', + token_endpoint_auth_method='client_secret_jwt' + ) + session.register_client_auth_method(ClientSecretJWT(token_endpoint)) + session.fetch_token(token_endpoint) + + :param token_endpoint: A string URL of the token endpoint + :param claims: Extra JWT claims + """ + name = 'client_secret_jwt' + alg = 'HS256' + + def __init__(self, token_endpoint=None, claims=None, header=None): + self.token_endpoint = token_endpoint + self.claims = claims + self.header = header + + def sign(self, auth, token_endpoint): + return client_secret_jwt_sign( + auth.client_secret, + client_id=auth.client_id, + token_endpoint=token_endpoint, + claims=self.claims, + header=self.header, + alg=self.alg, + ) + + def __call__(self, auth, method, uri, headers, body): + token_endpoint = self.token_endpoint + if not token_endpoint: + token_endpoint = uri + + client_assertion = self.sign(auth, token_endpoint) + body = add_params_to_qs(body or '', [ + ('client_assertion_type', ASSERTION_TYPE), + ('client_assertion', client_assertion) + ]) + return uri, headers, body + + +class PrivateKeyJWT(ClientSecretJWT): + """Authentication method for OAuth 2.0 Client. This authentication + method is called ``private_key_jwt``, which is using ``client_id`` + and ``private_key`` constructed with JWT to identify a client. + + Here is an example of use ``private_key_jwt`` with Requests Session:: + + from authlib.integrations.requests_client import OAuth2Session + + token_endpoint = 'https://example.com/oauth/token' + session = OAuth2Session( + 'your-client-id', 'your-client-private-key', + token_endpoint_auth_method='private_key_jwt' + ) + session.register_client_auth_method(PrivateKeyJWT(token_endpoint)) + session.fetch_token(token_endpoint) + + :param token_endpoint: A string URL of the token endpoint + :param claims: Extra JWT claims + """ + name = 'private_key_jwt' + alg = 'RS256' + + def sign(self, auth, token_endpoint): + return private_key_jwt_sign( + auth.client_secret, + client_id=auth.client_id, + token_endpoint=token_endpoint, + claims=self.claims, + header=self.header, + alg=self.alg, + ) + + +def register_session_client_auth_method(session, token_url=None, **kwargs): # pragma: no cover + """Register "client_secret_jwt" or "private_key_jwt" token endpoint auth + method to OAuth2Session. + + :param session: OAuth2Session instance. + :param token_url: Optional token endpoint url. + """ + deprecate('Use `ClientSecretJWT` and `PrivateKeyJWT` instead', '1.0', 'Jeclj', 'ca') + if session.token_endpoint_auth_method == 'client_secret_jwt': + cls = ClientSecretJWT + elif session.token_endpoint_auth_method == 'private_key_jwt': + cls = PrivateKeyJWT + else: + raise ValueError('Invalid token_endpoint_auth_method') + + session.register_client_auth_method(cls(token_url)) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7523/client.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7523/client.py new file mode 100644 index 000000000..cda82c84a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7523/client.py @@ -0,0 +1,113 @@ +import logging +from authlib.jose import jwt +from authlib.jose.errors import JoseError +from ..rfc6749 import InvalidClientError + +ASSERTION_TYPE = 'urn:ietf:params:oauth:client-assertion-type:jwt-bearer' +log = logging.getLogger(__name__) + + +class JWTBearerClientAssertion(object): + """Implementation of Using JWTs for Client Authentication, which is + defined by RFC7523. + """ + #: Value of ``client_assertion_type`` of JWTs + CLIENT_ASSERTION_TYPE = ASSERTION_TYPE + #: Name of the client authentication method + CLIENT_AUTH_METHOD = 'client_assertion_jwt' + + def __init__(self, token_url, validate_jti=True): + self.token_url = token_url + self._validate_jti = validate_jti + + def __call__(self, query_client, request): + data = request.form + assertion_type = data.get('client_assertion_type') + assertion = data.get('client_assertion') + if assertion_type == ASSERTION_TYPE and assertion: + resolve_key = self.create_resolve_key_func(query_client, request) + self.process_assertion_claims(assertion, resolve_key) + return self.authenticate_client(request.client) + log.debug('Authenticate via %r failed', self.CLIENT_AUTH_METHOD) + + def create_claims_options(self): + """Create a claims_options for verify JWT payload claims. Developers + MAY overwrite this method to create a more strict options.""" + # https://tools.ietf.org/html/rfc7523#section-3 + # The Audience SHOULD be the URL of the Authorization Server's Token Endpoint + options = { + 'iss': {'essential': True, 'validate': _validate_iss}, + 'sub': {'essential': True}, + 'aud': {'essential': True, 'value': self.token_url}, + 'exp': {'essential': True}, + } + if self._validate_jti: + options['jti'] = {'essential': True, 'validate': self.validate_jti} + return options + + def process_assertion_claims(self, assertion, resolve_key): + """Extract JWT payload claims from request "assertion", per + `Section 3.1`_. + + :param assertion: assertion string value in the request + :param resolve_key: function to resolve the sign key + :return: JWTClaims + :raise: InvalidClientError + + .. _`Section 3.1`: https://tools.ietf.org/html/rfc7523#section-3.1 + """ + try: + claims = jwt.decode( + assertion, resolve_key, + claims_options=self.create_claims_options() + ) + claims.validate() + except JoseError as e: + log.debug('Assertion Error: %r', e) + raise InvalidClientError() + return claims + + def authenticate_client(self, client): + if client.check_token_endpoint_auth_method(self.CLIENT_AUTH_METHOD): + return client + raise InvalidClientError() + + def create_resolve_key_func(self, query_client, request): + def resolve_key(headers, payload): + # https://tools.ietf.org/html/rfc7523#section-3 + # For client authentication, the subject MUST be the + # "client_id" of the OAuth client + client_id = payload['sub'] + client = query_client(client_id) + if not client: + raise InvalidClientError() + request.client = client + return self.resolve_client_public_key(client, headers) + return resolve_key + + def validate_jti(self, claims, jti): + """Validate if the given ``jti`` value is used before. Developers + MUST implement this method:: + + def validate_jti(self, claims, jti): + key = 'jti:{}-{}'.format(claims['sub'], jti) + if redis.get(key): + return False + redis.set(key, 1, ex=3600) + return True + """ + raise NotImplementedError() + + def resolve_client_public_key(self, client, headers): + """Resolve the client public key for verifying the JWT signature. + A client may have many public keys, in this case, we can retrieve it + via ``kid`` value in headers. Developers MUST implement this method:: + + def resolve_client_public_key(self, client, headers): + return client.public_key + """ + raise NotImplementedError() + + +def _validate_iss(claims, iss): + return claims['sub'] == iss diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7523/jwt_bearer.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7523/jwt_bearer.py new file mode 100644 index 000000000..a11336d55 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7523/jwt_bearer.py @@ -0,0 +1,155 @@ +import logging +from authlib.jose import jwt +from authlib.jose.errors import JoseError +from ..rfc6749 import BaseGrant, TokenEndpointMixin +from ..rfc6749 import ( + UnauthorizedClientError, + InvalidRequestError, + InvalidGrantError +) +from .assertion import sign_jwt_bearer_assertion + +log = logging.getLogger(__name__) +JWT_BEARER_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:jwt-bearer' + + +class JWTBearerGrant(BaseGrant, TokenEndpointMixin): + GRANT_TYPE = JWT_BEARER_GRANT_TYPE + + @staticmethod + def sign(key, issuer, audience, subject=None, + issued_at=None, expires_at=None, claims=None, **kwargs): + return sign_jwt_bearer_assertion( + key, issuer, audience, subject, issued_at, + expires_at, claims, **kwargs) + + def create_claims_options(self): + """Create a claims_options for verify JWT payload claims. Developers + MAY overwrite this method to create a more strict options. + """ + # https://tools.ietf.org/html/rfc7523#section-3 + return { + 'iss': {'essential': True}, + 'sub': {'essential': True}, + 'aud': {'essential': True}, + 'exp': {'essential': True}, + } + + def process_assertion_claims(self, assertion): + """Extract JWT payload claims from request "assertion", per + `Section 3.1`_. + + :param assertion: assertion string value in the request + :return: JWTClaims + :raise: InvalidGrantError + + .. _`Section 3.1`: https://tools.ietf.org/html/rfc7523#section-3.1 + """ + claims = jwt.decode( + assertion, self.resolve_public_key, + claims_options=self.create_claims_options()) + try: + claims.validate() + except JoseError as e: + log.debug('Assertion Error: %r', e) + raise InvalidGrantError(description=e.description) + return claims + + def validate_token_request(self): + """The client makes a request to the token endpoint by sending the + following parameters using the "application/x-www-form-urlencoded" + format per `Section 2.1`_: + + grant_type + REQUIRED. Value MUST be set to + "urn:ietf:params:oauth:grant-type:jwt-bearer". + + assertion + REQUIRED. Value MUST contain a single JWT. + + scope + OPTIONAL. + + The following example demonstrates an access token request with a JWT + as an authorization grant: + + .. code-block:: http + + POST /token.oauth2 HTTP/1.1 + Host: as.example.com + Content-Type: application/x-www-form-urlencoded + + grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Ajwt-bearer + &assertion=eyJhbGciOiJFUzI1NiIsImtpZCI6IjE2In0. + eyJpc3Mi[...omitted for brevity...]. + J9l-ZhwP[...omitted for brevity...] + + .. _`Section 2.1`: https://tools.ietf.org/html/rfc7523#section-2.1 + """ + assertion = self.request.form.get('assertion') + if not assertion: + raise InvalidRequestError('Missing "assertion" in request') + + claims = self.process_assertion_claims(assertion) + client = self.authenticate_client(claims) + log.debug('Validate token request of %s', client) + + if not client.check_grant_type(self.GRANT_TYPE): + raise UnauthorizedClientError() + + self.request.client = client + self.validate_requested_scope() + self.request.user = self.authenticate_user(client, claims) + + def create_token_response(self): + """If valid and authorized, the authorization server issues an access + token. + """ + token = self.generate_token( + scope=self.request.scope, + include_refresh_token=False, + ) + log.debug('Issue token %r to %r', token, self.request.client) + self.save_token(token) + return 200, token, self.TOKEN_RESPONSE_HEADER + + def authenticate_user(self, client, claims): + """Authenticate user with the given assertion claims. Developers MUST + implement it in subclass, e.g.:: + + def authenticate_user(self, client, claims): + user = User.get_by_sub(claims['sub']) + if is_authorized_to_client(user, client): + return user + + :param client: OAuth Client instance + :param claims: assertion payload claims + :return: User instance + """ + raise NotImplementedError() + + def authenticate_client(self, claims): + """Authenticate client with the given assertion claims. Developers MUST + implement it in subclass, e.g.:: + + def authenticate_client(self, claims): + return Client.get_by_iss(claims['iss']) + + :param claims: assertion payload claims + :return: Client instance + """ + raise NotImplementedError() + + def resolve_public_key(self, headers, payload): + """Find public key to verify assertion signature. Developers MUST + implement it in subclass, e.g.:: + + def resolve_public_key(self, headers, payload): + jwk_set = get_jwk_set_by_iss(payload['iss']) + return filter_jwk_set(jwk_set, headers['kid']) + + :param headers: JWT headers dict + :param payload: JWT payload dict + :return: A public key + """ + raise NotImplementedError() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7591/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7591/__init__.py new file mode 100644 index 000000000..8ebb07096 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7591/__init__.py @@ -0,0 +1,25 @@ +""" + authlib.oauth2.rfc7591 + ~~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + OAuth 2.0 Dynamic Client Registration Protocol. + + https://tools.ietf.org/html/rfc7591 +""" + + +from .claims import ClientMetadataClaims +from .endpoint import ClientRegistrationEndpoint +from .errors import ( + InvalidRedirectURIError, + InvalidClientMetadataError, + InvalidSoftwareStatementError, + UnapprovedSoftwareStatementError, +) + +__all__ = [ + 'ClientMetadataClaims', 'ClientRegistrationEndpoint', + 'InvalidRedirectURIError', 'InvalidClientMetadataError', + 'InvalidSoftwareStatementError', 'UnapprovedSoftwareStatementError', +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7591/claims.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7591/claims.py new file mode 100644 index 000000000..b6157b520 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7591/claims.py @@ -0,0 +1,218 @@ +from authlib.jose import BaseClaims, JsonWebKey +from authlib.jose.errors import InvalidClaimError +from authlib.common.urls import is_valid_url + + +class ClientMetadataClaims(BaseClaims): + # https://tools.ietf.org/html/rfc7591#section-2 + REGISTERED_CLAIMS = [ + 'redirect_uris', + 'token_endpoint_auth_method', + 'grant_types', + 'response_types', + 'client_name', + 'client_uri', + 'logo_uri', + 'scope', + 'contacts', + 'tos_uri', + 'policy_uri', + 'jwks_uri', + 'jwks', + 'software_id', + 'software_version', + ] + + def validate(self): + self._validate_essential_claims() + self.validate_redirect_uris() + self.validate_token_endpoint_auth_method() + self.validate_grant_types() + self.validate_response_types() + self.validate_client_name() + self.validate_client_uri() + self.validate_logo_uri() + self.validate_scope() + self.validate_contacts() + self.validate_tos_uri() + self.validate_policy_uri() + self.validate_jwks_uri() + self.validate_jwks() + self.validate_software_id() + self.validate_software_version() + + def validate_redirect_uris(self): + """Array of redirection URI strings for use in redirect-based flows + such as the authorization code and implicit flows. As required by + Section 2 of OAuth 2.0 [RFC6749], clients using flows with + redirection MUST register their redirection URI values. + Authorization servers that support dynamic registration for + redirect-based flows MUST implement support for this metadata + value. + """ + uris = self.get('redirect_uris') + if uris: + for uri in uris: + self._validate_uri('redirect_uris', uri) + + def validate_token_endpoint_auth_method(self): + """String indicator of the requested authentication method for the + token endpoint. + """ + # If unspecified or omitted, the default is "client_secret_basic" + if 'token_endpoint_auth_method' not in self: + self['token_endpoint_auth_method'] = 'client_secret_basic' + self._validate_claim_value('token_endpoint_auth_method') + + def validate_grant_types(self): + """Array of OAuth 2.0 grant type strings that the client can use at + the token endpoint. + """ + self._validate_claim_value('grant_types') + + def validate_response_types(self): + """Array of the OAuth 2.0 response type strings that the client can + use at the authorization endpoint. + """ + self._validate_claim_value('response_types') + + def validate_client_name(self): + """Human-readable string name of the client to be presented to the + end-user during authorization. If omitted, the authorization + server MAY display the raw "client_id" value to the end-user + instead. It is RECOMMENDED that clients always send this field. + The value of this field MAY be internationalized, as described in + Section 2.2. + """ + + def validate_client_uri(self): + """URL string of a web page providing information about the client. + If present, the server SHOULD display this URL to the end-user in + a clickable fashion. It is RECOMMENDED that clients always send + this field. The value of this field MUST point to a valid web + page. The value of this field MAY be internationalized, as + described in Section 2.2. + """ + self._validate_uri('client_uri') + + def validate_logo_uri(self): + """URL string that references a logo for the client. If present, the + server SHOULD display this image to the end-user during approval. + The value of this field MUST point to a valid image file. The + value of this field MAY be internationalized, as described in + Section 2.2. + """ + self._validate_uri('logo_uri') + + def validate_scope(self): + """String containing a space-separated list of scope values (as + described in Section 3.3 of OAuth 2.0 [RFC6749]) that the client + can use when requesting access tokens. The semantics of values in + this list are service specific. If omitted, an authorization + server MAY register a client with a default set of scopes. + """ + self._validate_claim_value('scope') + + def validate_contacts(self): + """Array of strings representing ways to contact people responsible + for this client, typically email addresses. The authorization + server MAY make these contact addresses available to end-users for + support requests for the client. See Section 6 for information on + Privacy Considerations. + """ + if 'contacts' in self and not isinstance(self['contacts'], list): + raise InvalidClaimError('contacts') + + def validate_tos_uri(self): + """URL string that points to a human-readable terms of service + document for the client that describes a contractual relationship + between the end-user and the client that the end-user accepts when + authorizing the client. The authorization server SHOULD display + this URL to the end-user if it is provided. The value of this + field MUST point to a valid web page. The value of this field MAY + be internationalized, as described in Section 2.2. + """ + self._validate_uri('tos_uri') + + def validate_policy_uri(self): + """URL string that points to a human-readable privacy policy document + that describes how the deployment organization collects, uses, + retains, and discloses personal data. The authorization server + SHOULD display this URL to the end-user if it is provided. The + value of this field MUST point to a valid web page. The value of + this field MAY be internationalized, as described in Section 2.2. + """ + self._validate_uri('policy_uri') + + def validate_jwks_uri(self): + """URL string referencing the client's JSON Web Key (JWK) Set + [RFC7517] document, which contains the client's public keys. The + value of this field MUST point to a valid JWK Set document. These + keys can be used by higher-level protocols that use signing or + encryption. For instance, these keys might be used by some + applications for validating signed requests made to the token + endpoint when using JWTs for client authentication [RFC7523]. Use + of this parameter is preferred over the "jwks" parameter, as it + allows for easier key rotation. The "jwks_uri" and "jwks" + parameters MUST NOT both be present in the same request or + response. + """ + # TODO: use real HTTP library + self._validate_uri('jwks_uri') + + def validate_jwks(self): + """Client's JSON Web Key Set [RFC7517] document value, which contains + the client's public keys. The value of this field MUST be a JSON + object containing a valid JWK Set. These keys can be used by + higher-level protocols that use signing or encryption. This + parameter is intended to be used by clients that cannot use the + "jwks_uri" parameter, such as native clients that cannot host + public URLs. The "jwks_uri" and "jwks" parameters MUST NOT both + be present in the same request or response. + """ + if 'jwks' in self: + if 'jwks_uri' in self: + # The "jwks_uri" and "jwks" parameters MUST NOT both be present + raise InvalidClaimError('jwks') + + jwks = self['jwks'] + try: + key_set = JsonWebKey.import_key_set(jwks) + if not key_set: + raise InvalidClaimError('jwks') + except ValueError: + raise InvalidClaimError('jwks') + + def validate_software_id(self): + """A unique identifier string (e.g., a Universally Unique Identifier + (UUID)) assigned by the client developer or software publisher + used by registration endpoints to identify the client software to + be dynamically registered. Unlike "client_id", which is issued by + the authorization server and SHOULD vary between instances, the + "software_id" SHOULD remain the same for all instances of the + client software. The "software_id" SHOULD remain the same across + multiple updates or versions of the same piece of software. The + value of this field is not intended to be human readable and is + usually opaque to the client and authorization server. + """ + + def validate_software_version(self): + """A version identifier string for the client software identified by + "software_id". The value of the "software_version" SHOULD change + on any update to the client software identified by the same + "software_id". The value of this field is intended to be compared + using string equality matching and no other comparison semantics + are defined by this specification. The value of this field is + outside the scope of this specification, but it is not intended to + be human readable and is usually opaque to the client and + authorization server. The definition of what constitutes an + update to client software that would trigger a change to this + value is specific to the software itself and is outside the scope + of this specification. + """ + + def _validate_uri(self, key, uri=None): + if uri is None: + uri = self.get(key) + if uri and not is_valid_url(uri): + raise InvalidClaimError(key) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7591/endpoint.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7591/endpoint.py new file mode 100644 index 000000000..eff588cea --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7591/endpoint.py @@ -0,0 +1,199 @@ +import os +import time +import binascii +from authlib.consts import default_json_headers +from authlib.common.security import generate_token +from authlib.jose import JsonWebToken, JoseError +from ..rfc6749 import AccessDeniedError, InvalidRequestError +from ..rfc6749.util import scope_to_list +from .claims import ClientMetadataClaims +from .errors import ( + InvalidClientMetadataError, + UnapprovedSoftwareStatementError, + InvalidSoftwareStatementError, +) + + +class ClientRegistrationEndpoint(object): + """The client registration endpoint is an OAuth 2.0 endpoint designed to + allow a client to be registered with the authorization server. + """ + ENDPOINT_NAME = 'client_registration' + + #: The claims validation class + claims_class = ClientMetadataClaims + + #: Rewrite this value with a list to support ``software_statement`` + #: e.g. ``software_statement_alg_values_supported = ['RS256']`` + software_statement_alg_values_supported = None + + def __init__(self, server): + self.server = server + + def __call__(self, request): + return self.create_registration_response(request) + + def create_registration_response(self, request): + token = self.authenticate_token(request) + if not token: + raise AccessDeniedError() + + request.credential = token + + client_metadata = self.extract_client_metadata(request) + client_info = self.generate_client_info() + body = {} + body.update(client_metadata) + body.update(client_info) + client = self.save_client(client_info, client_metadata, request) + registration_info = self.generate_client_registration_info(client, request) + if registration_info: + body.update(registration_info) + return 201, body, default_json_headers + + def extract_client_metadata(self, request): + if not request.data: + raise InvalidRequestError() + + json_data = request.data.copy() + software_statement = json_data.pop('software_statement', None) + if software_statement and self.software_statement_alg_values_supported: + data = self.extract_software_statement(software_statement, request) + json_data.update(data) + + options = self.get_claims_options() + claims = self.claims_class(json_data, {}, options, self.server.metadata) + try: + claims.validate() + except JoseError as error: + raise InvalidClientMetadataError(error.description) + return claims.get_registered_claims() + + def extract_software_statement(self, software_statement, request): + key = self.resolve_public_key(request) + if not key: + raise UnapprovedSoftwareStatementError() + + try: + jwt = JsonWebToken(self.software_statement_alg_values_supported) + claims = jwt.decode(software_statement, key) + # there is no need to validate claims + return claims + except JoseError: + raise InvalidSoftwareStatementError() + + def get_claims_options(self): + """Generate claims options validation from Authorization Server metadata.""" + metadata = self.server.metadata + if not metadata: + return {} + + scopes_supported = metadata.get('scopes_supported') + response_types_supported = metadata.get('response_types_supported') + grant_types_supported = metadata.get('grant_types_supported') + auth_methods_supported = metadata.get('token_endpoint_auth_methods_supported') + options = {} + if scopes_supported is not None: + scopes_supported = set(scopes_supported) + + def _validate_scope(claims, value): + if not value: + return True + scopes = set(scope_to_list(value)) + return scopes_supported.issuperset(scopes) + + options['scope'] = {'validate': _validate_scope} + + if response_types_supported is not None: + response_types_supported = set(response_types_supported) + + def _validate_response_types(claims, value): + return response_types_supported.issuperset(set(value)) + + options['response_types'] = {'validate': _validate_response_types} + + if grant_types_supported is not None: + grant_types_supported = set(grant_types_supported) + + def _validate_grant_types(claims, value): + return grant_types_supported.issuperset(set(value)) + + options['grant_types'] = {'validate': _validate_grant_types} + + if auth_methods_supported is not None: + options['token_endpoint_auth_method'] = {'values': auth_methods_supported} + + return options + + def generate_client_info(self): + # https://tools.ietf.org/html/rfc7591#section-3.2.1 + client_id = self.generate_client_id() + client_secret = self.generate_client_secret() + client_id_issued_at = int(time.time()) + client_secret_expires_at = 0 + return dict( + client_id=client_id, + client_secret=client_secret, + client_id_issued_at=client_id_issued_at, + client_secret_expires_at=client_secret_expires_at, + ) + + def generate_client_registration_info(self, client, request): + """Generate ```registration_client_uri`` and ``registration_access_token`` + for RFC7592. This method returns ``None`` by default. Developers MAY rewrite + this method to return registration information.""" + return None + + def create_endpoint_request(self, request): + return self.server.create_json_request(request) + + def generate_client_id(self): + """Generate ``client_id`` value. Developers MAY rewrite this method + to use their own way to generate ``client_id``. + """ + return generate_token(42) + + def generate_client_secret(self): + """Generate ``client_secret`` value. Developers MAY rewrite this method + to use their own way to generate ``client_secret``. + """ + return binascii.hexlify(os.urandom(24)).decode('ascii') + + def authenticate_token(self, request): + """Authenticate current credential who is requesting to register a client. + Developers MUST implement this method in subclass:: + + def authenticate_token(self, request): + auth = request.headers.get('Authorization') + return get_token_by_auth(auth) + + :return: token instance + """ + raise NotImplementedError() + + def resolve_public_key(self, request): + """Resolve a public key for decoding ``software_statement``. If + ``enable_software_statement=True``, developers MUST implement this + method in subclass:: + + def resolve_public_key(self, request): + return get_public_key_from_user(request.credential) + + :return: JWK or Key string + """ + raise NotImplementedError() + + def save_client(self, client_info, client_metadata, request): + """Save client into database. Developers MUST implement this method + in subclass:: + + def save_client(self, client_info, client_metadata, request): + client = OAuthClient( + client_id=client_info['client_id'], + client_secret=client_info['client_secret'], + ... + ) + client.save() + return client + """ + raise NotImplementedError() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7591/errors.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7591/errors.py new file mode 100644 index 000000000..31693c047 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7591/errors.py @@ -0,0 +1,33 @@ +from ..rfc6749 import OAuth2Error + + +class InvalidRedirectURIError(OAuth2Error): + """The value of one or more redirection URIs is invalid. + https://tools.ietf.org/html/rfc7591#section-3.2.2 + """ + error = 'invalid_redirect_uri' + + +class InvalidClientMetadataError(OAuth2Error): + """The value of one of the client metadata fields is invalid and the + server has rejected this request. Note that an authorization + server MAY choose to substitute a valid value for any requested + parameter of a client's metadata. + https://tools.ietf.org/html/rfc7591#section-3.2.2 + """ + error = 'invalid_client_metadata' + + +class InvalidSoftwareStatementError(OAuth2Error): + """The software statement presented is invalid. + https://tools.ietf.org/html/rfc7591#section-3.2.2 + """ + error = 'invalid_software_statement' + + +class UnapprovedSoftwareStatementError(OAuth2Error): + """The software statement presented is not approved for use by this + authorization server. + https://tools.ietf.org/html/rfc7591#section-3.2.2 + """ + error = 'unapproved_software_statement' diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7592/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7592/__init__.py new file mode 100644 index 000000000..6a6457be5 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7592/__init__.py @@ -0,0 +1,13 @@ +""" + authlib.oauth2.rfc7592 + ~~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + OAuth 2.0 Dynamic Client Registration Management Protocol. + + https://tools.ietf.org/html/rfc7592 +""" + +from .endpoint import ClientConfigurationEndpoint + +__all__ = ['ClientConfigurationEndpoint'] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7592/endpoint.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7592/endpoint.py new file mode 100644 index 000000000..5a036d714 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7592/endpoint.py @@ -0,0 +1,172 @@ +from authlib.consts import default_json_headers +from ..rfc6749 import AccessDeniedError +from ..rfc6750 import InvalidTokenError + + +class ClientConfigurationEndpoint(object): + ENDPOINT_NAME = 'client_configuration' + + def __init__(self, server): + self.server = server + + def __call__(self, request): + return self.create_configuration_response(request) + + def create_configuration_response(self, request): + token = self.authenticate_token(request) + if not token: + raise InvalidTokenError() + + request.credential = token + + client = self.authenticate_client(request) + if not client: + # If the client does not exist on this server, the server MUST respond + # with HTTP 401 Unauthorized and the registration access token used to + # make this request SHOULD be immediately revoked. + self.revoke_access_token(request) + raise InvalidTokenError() + + if not self.check_permission(client, request): + # If the client does not have permission to read its record, the server + # MUST return an HTTP 403 Forbidden. + raise AccessDeniedError() + + request.client = client + + if request.method == 'GET': + return self.create_read_client_response(client, request) + elif request.method == 'DELETE': + return self.create_delete_client_response(client, request) + elif request.method == 'PUT': + return self.create_update_client_response(client, request) + + def create_endpoint_request(self, request): + return self.server.create_json_request(request) + + def create_read_client_response(self, client, request): + body = self.introspect_client(client) + info = self.generate_client_registration_info(client, request) + body.update(info) + return 200, body, default_json_headers + + def create_delete_client_response(self, client, request): + """To deprive itself on the authorization server, the client makes + an HTTP DELETE request to the client configuration endpoint. This + request is authenticated by the registration access token issued to + the client. + + The following is a non-normative example request:: + + DELETE /register/s6BhdRkqt3 HTTP/1.1 + Host: server.example.com + Authorization: Bearer reg-23410913-abewfq.123483 + """ + self.delete_client(client, request) + headers = [ + ('Cache-Control', 'no-store'), + ('Pragma', 'no-cache'), + ] + return 204, '', headers + + def create_update_client_response(self, client, request): + """ To update a previously registered client's registration with an + authorization server, the client makes an HTTP PUT request to the + client configuration endpoint with a content type of "application/ + json". + + The following is a non-normative example request:: + + PUT /register/s6BhdRkqt3 HTTP/1.1 + Accept: application/json + Host: server.example.com + Authorization: Bearer reg-23410913-abewfq.123483 + + { + "client_id": "s6BhdRkqt3", + "client_secret": "cf136dc3c1fc93f31185e5885805d", + "redirect_uris": [ + "https://client.example.org/callback", + "https://client.example.org/alt" + ], + "grant_types": ["authorization_code", "refresh_token"], + "token_endpoint_auth_method": "client_secret_basic", + "jwks_uri": "https://client.example.org/my_public_keys.jwks", + "client_name": "My New Example", + "client_name#fr": "Mon Nouvel Exemple", + "logo_uri": "https://client.example.org/newlogo.png", + "logo_uri#fr": "https://client.example.org/fr/newlogo.png" + } + """ + # The updated client metadata fields request MUST NOT include the + # "registration_access_token", "registration_client_uri", + # "client_secret_expires_at", or "client_id_issued_at" fields + must_not_include = ( + 'registration_access_token', 'registration_client_uri', + 'client_secret_expires_at', 'client_id_issued_at', + ) + for k in must_not_include: + if k in request.data: + return + + # The client MUST include its "client_id" field in the request + client_id = request.data.get('client_id') + if not client_id: + raise + if client_id != client.get_client_id(): + raise + + # If the client includes the "client_secret" field in the request, + # the value of this field MUST match the currently issued client + # secret for that client. + if 'client_secret' in request.data: + if not client.check_client_secret(request.data['client_secret']): + raise + + client = self.save_client(client, request) + return self.create_read_client_response(client, request) + + def generate_client_registration_info(self, client, request): + """Generate ```registration_client_uri`` and ``registration_access_token`` + for RFC7592. This method returns ``None`` by default. Developers MAY rewrite + this method to return registration information.""" + raise NotImplementedError() + + def authenticate_token(self, request): + """Authenticate current credential who is requesting to register a client. + Developers MUST implement this method in subclass:: + + def authenticate_token(self, request): + auth = request.headers.get('Authorization') + return get_token_by_auth(auth) + + :return: token instance + """ + raise NotImplementedError() + + def authenticate_client(self, request): + raise NotImplementedError() + + def revoke_access_token(self, request): + raise NotImplementedError() + + def check_permission(self, client, request): + raise NotImplementedError() + + def introspect_client(self, client): + raise NotImplementedError() + + def delete_client(self, client, request): + """Delete authorization code from database or cache. Developers MUST + implement it in subclass, e.g.:: + + def delete_client(self, client, request): + client.delete() + + :param client: the instance of OAuth client + :param request: formatted request instance + """ + raise NotImplementedError() + + def save_client(self, client, request): + raise NotImplementedError() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7636/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7636/__init__.py new file mode 100644 index 000000000..d943f3e1a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7636/__init__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +""" + authlib.oauth2.rfc7636 + ~~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + Proof Key for Code Exchange by OAuth Public Clients. + + https://tools.ietf.org/html/rfc7636 +""" + +from .challenge import CodeChallenge, create_s256_code_challenge + +__all__ = ['CodeChallenge', 'create_s256_code_challenge'] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7636/challenge.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7636/challenge.py new file mode 100644 index 000000000..26159b8f1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7636/challenge.py @@ -0,0 +1,134 @@ +import re +import hashlib +from authlib.common.encoding import to_bytes, to_unicode, urlsafe_b64encode +from ..rfc6749.errors import InvalidRequestError, InvalidGrantError + + +CODE_VERIFIER_PATTERN = re.compile(r'^[a-zA-Z0-9\-._~]{43,128}$') + + +def create_s256_code_challenge(code_verifier): + """Create S256 code_challenge with the given code_verifier.""" + data = hashlib.sha256(to_bytes(code_verifier, 'ascii')).digest() + return to_unicode(urlsafe_b64encode(data)) + + +def compare_plain_code_challenge(code_verifier, code_challenge): + # If the "code_challenge_method" from Section 4.3 was "plain", + # they are compared directly + return code_verifier == code_challenge + + +def compare_s256_code_challenge(code_verifier, code_challenge): + # BASE64URL-ENCODE(SHA256(ASCII(code_verifier))) == code_challenge + return create_s256_code_challenge(code_verifier) == code_challenge + + +class CodeChallenge(object): + """CodeChallenge extension to Authorization Code Grant. It is used to + improve the security of Authorization Code flow for public clients by + sending extra "code_challenge" and "code_verifier" to the authorization + server. + + The AuthorizationCodeGrant SHOULD save the ``code_challenge`` and + ``code_challenge_method`` into database when ``save_authorization_code``. + Then register this extension via:: + + server.register_grant( + AuthorizationCodeGrant, + [CodeChallenge(required=True)] + ) + """ + #: defaults to "plain" if not present in the request + DEFAULT_CODE_CHALLENGE_METHOD = 'plain' + #: supported ``code_challenge_method`` + SUPPORTED_CODE_CHALLENGE_METHOD = ['plain', 'S256'] + + CODE_CHALLENGE_METHODS = { + 'plain': compare_plain_code_challenge, + 'S256': compare_s256_code_challenge, + } + + def __init__(self, required=True): + self.required = required + + def __call__(self, grant): + grant.register_hook( + 'after_validate_authorization_request', + self.validate_code_challenge, + ) + grant.register_hook( + 'after_validate_token_request', + self.validate_code_verifier, + ) + + def validate_code_challenge(self, grant): + request = grant.request + challenge = request.args.get('code_challenge') + method = request.args.get('code_challenge_method') + if not challenge and not method: + return + + if not challenge: + raise InvalidRequestError('Missing "code_challenge"') + + if method and method not in self.SUPPORTED_CODE_CHALLENGE_METHOD: + raise InvalidRequestError('Unsupported "code_challenge_method"') + + def validate_code_verifier(self, grant): + request = grant.request + verifier = request.form.get('code_verifier') + + # public client MUST verify code challenge + if self.required and request.auth_method == 'none' and not verifier: + raise InvalidRequestError('Missing "code_verifier"') + + authorization_code = request.credential + challenge = self.get_authorization_code_challenge(authorization_code) + + # ignore, it is the normal RFC6749 authorization_code request + if not challenge and not verifier: + return + + # challenge exists, code_verifier is required + if not verifier: + raise InvalidRequestError('Missing "code_verifier"') + + if not CODE_VERIFIER_PATTERN.match(verifier): + raise InvalidRequestError('Invalid "code_verifier"') + + # 4.6. Server Verifies code_verifier before Returning the Tokens + method = self.get_authorization_code_challenge_method(authorization_code) + if method is None: + method = self.DEFAULT_CODE_CHALLENGE_METHOD + + func = self.CODE_CHALLENGE_METHODS.get(method) + if not func: + raise RuntimeError('No verify method for "{}"'.format(method)) + + # If the values are not equal, an error response indicating + # "invalid_grant" MUST be returned. + if not func(verifier, challenge): + raise InvalidGrantError(description='Code challenge failed.') + + def get_authorization_code_challenge(self, authorization_code): + """Get "code_challenge" associated with this authorization code. + Developers MAY re-implement it in subclass, the default logic:: + + def get_authorization_code_challenge(self, authorization_code): + return authorization_code.code_challenge + + :param authorization_code: the instance of authorization_code + """ + return authorization_code.code_challenge + + def get_authorization_code_challenge_method(self, authorization_code): + """Get "code_challenge_method" associated with this authorization code. + Developers MAY re-implement it in subclass, the default logic:: + + def get_authorization_code_challenge_method(self, authorization_code): + return authorization_code.code_challenge_method + + :param authorization_code: the instance of authorization_code + """ + return authorization_code.code_challenge_method diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7662/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7662/__init__.py new file mode 100644 index 000000000..283776188 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7662/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +""" + authlib.oauth2.rfc7662 + ~~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + OAuth 2.0 Token Introspection. + + https://tools.ietf.org/html/rfc7662 +""" + +from .introspection import IntrospectionEndpoint +from .models import IntrospectionToken + +__all__ = ['IntrospectionEndpoint', 'IntrospectionToken'] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7662/introspection.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7662/introspection.py new file mode 100644 index 000000000..44c76c851 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7662/introspection.py @@ -0,0 +1,119 @@ +import time +from authlib.consts import default_json_headers +from ..rfc6749 import ( + TokenEndpoint, + InvalidRequestError, + UnsupportedTokenTypeError, +) + + +class IntrospectionEndpoint(TokenEndpoint): + """Implementation of introspection endpoint which is described in + `RFC7662`_. + + .. _RFC7662: https://tools.ietf.org/html/rfc7662 + """ + #: Endpoint name to be registered + ENDPOINT_NAME = 'introspection' + + def authenticate_endpoint_credential(self, request, client): + """The protected resource calls the introspection endpoint using an HTTP + ``POST`` request with parameters sent as + "application/x-www-form-urlencoded" data. The protected resource sends a + parameter representing the token along with optional parameters + representing additional context that is known by the protected resource + to aid the authorization server in its response. + + token + **REQUIRED** The string value of the token. For access tokens, this + is the ``access_token`` value returned from the token endpoint + defined in OAuth 2.0. For refresh tokens, this is the + ``refresh_token`` value returned from the token endpoint as defined + in OAuth 2.0. + + token_type_hint + **OPTIONAL** A hint about the type of the token submitted for + introspection. + """ + params = request.form + if 'token' not in params: + raise InvalidRequestError() + + token_type = params.get('token_type_hint') + if token_type and token_type not in self.SUPPORTED_TOKEN_TYPES: + raise UnsupportedTokenTypeError() + + return self.query_token(params['token'], token_type, client) + + def create_endpoint_response(self, request): + """Validate introspection request and create the response. + + :returns: (status_code, body, headers) + """ + # The authorization server first validates the client credentials + client = self.authenticate_endpoint_client(request) + + # then verifies whether the token was issued to the client making + # the revocation request + credential = self.authenticate_endpoint_credential(request, client) + + # the authorization server invalidates the token + body = self.create_introspection_payload(credential) + return 200, body, default_json_headers + + def create_introspection_payload(self, token): + # the token is not active, does not exist on this server, or the + # protected resource is not allowed to introspect this particular + # token, then the authorization server MUST return an introspection + # response with the "active" field set to "false" + if not token: + return {'active': False} + expires_at = token.get_expires_at() + if expires_at < time.time() or token.revoked: + return {'active': False} + payload = self.introspect_token(token) + if 'active' not in payload: + payload['active'] = True + return payload + + def query_token(self, token, token_type_hint, client): + """Get the token from database/storage by the given token string. + Developers should implement this method:: + + def query_token(self, token, token_type_hint, client): + if token_type_hint == 'access_token': + tok = Token.query_by_access_token(token) + elif token_type_hint == 'refresh_token': + tok = Token.query_by_refresh_token(token) + else: + tok = Token.query_by_access_token(token) + if not tok: + tok = Token.query_by_refresh_token(token) + + if check_client_permission(client, tok): + return tok + """ + raise NotImplementedError() + + def introspect_token(self, token): + """Read given token and return its introspection metadata as a + dictionary following `Section 2.2`_:: + + def introspect_token(self, token): + active = is_token_active(token) + return { + 'active': active, + 'client_id': token.client_id, + 'token_type': token.token_type, + 'username': get_token_username(token), + 'scope': token.get_scope(), + 'sub': get_token_user_sub(token), + 'aud': token.client_id, + 'iss': 'https://server.example.com/', + 'exp': token.expires_at, + 'iat': token.issued_at, + } + + .. _`Section 2.2`: https://tools.ietf.org/html/rfc7662#section-2.2 + """ + raise NotImplementedError() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7662/models.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7662/models.py new file mode 100644 index 000000000..0f4f0c215 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc7662/models.py @@ -0,0 +1,30 @@ +from ..rfc6749 import TokenMixin + + +class IntrospectionToken(dict, TokenMixin): + def get_client_id(self): + return self.get('client_id') + + def get_scope(self): + return self.get('scope') + + def get_expires_in(self): + # this method is only used in refresh token, + # no need to implement it + return 0 + + def get_expires_at(self): + return self.get('exp', 0) + + def __getattr__(self, key): + # https://tools.ietf.org/html/rfc7662#section-2.2 + available_keys = { + 'active', 'scope', 'client_id', 'username', 'token_type', + 'exp', 'iat', 'nbf', 'sub', 'aud', 'iss', 'jti' + } + try: + return object.__getattribute__(self, key) + except AttributeError as error: + if key in available_keys: + return self.get(key) + raise error diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8414/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8414/__init__.py new file mode 100644 index 000000000..2cdbfbdc0 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8414/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +""" + authlib.oauth2.rfc8414 + ~~~~~~~~~~~~~~~~~~~~~~ + + This module represents a direct implementation of + OAuth 2.0 Authorization Server Metadata. + + https://tools.ietf.org/html/rfc8414 +""" + +from .models import AuthorizationServerMetadata +from .well_known import get_well_known_url + + +__all__ = ['AuthorizationServerMetadata', 'get_well_known_url'] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8414/models.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8414/models.py new file mode 100644 index 000000000..3e89a5c98 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8414/models.py @@ -0,0 +1,368 @@ +from authlib.common.urls import urlparse, is_valid_url +from authlib.common.security import is_secure_transport + + +class AuthorizationServerMetadata(dict): + """Define Authorization Server Metadata via `Section 2`_ in RFC8414_. + + .. _RFC8414: https://tools.ietf.org/html/rfc8414 + .. _`Section 2`: https://tools.ietf.org/html/rfc8414#section-2 + """ + REGISTRY_KEYS = [ + 'issuer', 'authorization_endpoint', 'token_endpoint', + 'jwks_uri', 'registration_endpoint', 'scopes_supported', + 'response_types_supported', 'response_modes_supported', + 'grant_types_supported', 'token_endpoint_auth_methods_supported', + 'token_endpoint_auth_signing_alg_values_supported', + 'service_documentation', 'ui_locales_supported', + 'op_policy_uri', 'op_tos_uri', 'revocation_endpoint', + 'revocation_endpoint_auth_methods_supported', + 'revocation_endpoint_auth_signing_alg_values_supported', + 'introspection_endpoint', + 'introspection_endpoint_auth_methods_supported', + 'introspection_endpoint_auth_signing_alg_values_supported', + 'code_challenge_methods_supported', + ] + + def validate_issuer(self): + """REQUIRED. The authorization server's issuer identifier, which is + a URL that uses the "https" scheme and has no query or fragment + components. + """ + issuer = self.get('issuer') + + #: 1. REQUIRED + if not issuer: + raise ValueError('"issuer" is required') + + parsed = urlparse.urlparse(issuer) + + #: 2. uses the "https" scheme + if not is_secure_transport(issuer): + raise ValueError('"issuer" MUST use "https" scheme') + + #: 3. has no query or fragment + if parsed.query or parsed.fragment: + raise ValueError('"issuer" has no query or fragment') + + def validate_authorization_endpoint(self): + """URL of the authorization server's authorization endpoint + [RFC6749]. This is REQUIRED unless no grant types are supported + that use the authorization endpoint. + """ + url = self.get('authorization_endpoint') + if url: + if not is_secure_transport(url): + raise ValueError( + '"authorization_endpoint" MUST use "https" scheme') + return + + grant_types_supported = set(self.grant_types_supported) + authorization_grant_types = {'authorization_code', 'implicit'} + if grant_types_supported & authorization_grant_types: + raise ValueError('"authorization_endpoint" is required') + + def validate_token_endpoint(self): + """URL of the authorization server's token endpoint [RFC6749]. This + is REQUIRED unless only the implicit grant type is supported. + """ + grant_types_supported = self.get('grant_types_supported') + if grant_types_supported and len(grant_types_supported) == 1 and \ + grant_types_supported[0] == 'implicit': + return + + url = self.get('token_endpoint') + if not url: + raise ValueError('"token_endpoint" is required') + + if not is_secure_transport(url): + raise ValueError('"token_endpoint" MUST use "https" scheme') + + def validate_jwks_uri(self): + """OPTIONAL. URL of the authorization server's JWK Set [JWK] + document. The referenced document contains the signing key(s) the + client uses to validate signatures from the authorization server. + This URL MUST use the "https" scheme. The JWK Set MAY also + contain the server's encryption key or keys, which are used by + clients to encrypt requests to the server. When both signing and + encryption keys are made available, a "use" (public key use) + parameter value is REQUIRED for all keys in the referenced JWK Set + to indicate each key's intended usage. + """ + url = self.get('jwks_uri') + if url and not is_secure_transport(url): + raise ValueError('"jwks_uri" MUST use "https" scheme') + + def validate_registration_endpoint(self): + """OPTIONAL. URL of the authorization server's OAuth 2.0 Dynamic + Client Registration endpoint [RFC7591]. + """ + url = self.get('registration_endpoint') + if url and not is_secure_transport(url): + raise ValueError( + '"registration_endpoint" MUST use "https" scheme') + + def validate_scopes_supported(self): + """RECOMMENDED. JSON array containing a list of the OAuth 2.0 + [RFC6749] "scope" values that this authorization server supports. + Servers MAY choose not to advertise some supported scope values + even when this parameter is used. + """ + validate_array_value(self, 'scopes_supported') + + def validate_response_types_supported(self): + """REQUIRED. JSON array containing a list of the OAuth 2.0 + "response_type" values that this authorization server supports. + The array values used are the same as those used with the + "response_types" parameter defined by "OAuth 2.0 Dynamic Client + Registration Protocol" [RFC7591]. + """ + response_types_supported = self.get('response_types_supported') + if not response_types_supported: + raise ValueError('"response_types_supported" is required') + if not isinstance(response_types_supported, list): + raise ValueError('"response_types_supported" MUST be JSON array') + + def validate_response_modes_supported(self): + """OPTIONAL. JSON array containing a list of the OAuth 2.0 + "response_mode" values that this authorization server supports, as + specified in "OAuth 2.0 Multiple Response Type Encoding Practices" + [OAuth.Responses]. If omitted, the default is "["query", + "fragment"]". The response mode value "form_post" is also defined + in "OAuth 2.0 Form Post Response Mode" [OAuth.Post]. + """ + validate_array_value(self, 'response_modes_supported') + + def validate_grant_types_supported(self): + """OPTIONAL. JSON array containing a list of the OAuth 2.0 grant + type values that this authorization server supports. The array + values used are the same as those used with the "grant_types" + parameter defined by "OAuth 2.0 Dynamic Client Registration + Protocol" [RFC7591]. If omitted, the default value is + "["authorization_code", "implicit"]". + """ + validate_array_value(self, 'grant_types_supported') + + def validate_token_endpoint_auth_methods_supported(self): + """OPTIONAL. JSON array containing a list of client authentication + methods supported by this token endpoint. Client authentication + method values are used in the "token_endpoint_auth_method" + parameter defined in Section 2 of [RFC7591]. If omitted, the + default is "client_secret_basic" -- the HTTP Basic Authentication + Scheme specified in Section 2.3.1 of OAuth 2.0 [RFC6749]. + """ + validate_array_value(self, 'token_endpoint_auth_methods_supported') + + def validate_token_endpoint_auth_signing_alg_values_supported(self): + """OPTIONAL. JSON array containing a list of the JWS signing + algorithms ("alg" values) supported by the token endpoint for the + signature on the JWT [JWT] used to authenticate the client at the + token endpoint for the "private_key_jwt" and "client_secret_jwt" + authentication methods. This metadata entry MUST be present if + either of these authentication methods are specified in the + "token_endpoint_auth_methods_supported" entry. No default + algorithms are implied if this entry is omitted. Servers SHOULD + support "RS256". The value "none" MUST NOT be used. + """ + _validate_alg_values( + self, + 'token_endpoint_auth_signing_alg_values_supported', + self.token_endpoint_auth_methods_supported + ) + + def validate_service_documentation(self): + """OPTIONAL. URL of a page containing human-readable information + that developers might want or need to know when using the + authorization server. In particular, if the authorization server + does not support Dynamic Client Registration, then information on + how to register clients needs to be provided in this + documentation. + """ + value = self.get('service_documentation') + if value and not is_valid_url(value): + raise ValueError('"service_documentation" MUST be a URL') + + def validate_ui_locales_supported(self): + """OPTIONAL. Languages and scripts supported for the user interface, + represented as a JSON array of language tag values from BCP 47 + [RFC5646]. If omitted, the set of supported languages and scripts + is unspecified. + """ + validate_array_value(self, 'ui_locales_supported') + + def validate_op_policy_uri(self): + """OPTIONAL. URL that the authorization server provides to the + person registering the client to read about the authorization + server's requirements on how the client can use the data provided + by the authorization server. The registration process SHOULD + display this URL to the person registering the client if it is + given. As described in Section 5, despite the identifier + "op_policy_uri" appearing to be OpenID-specific, its usage in this + specification is actually referring to a general OAuth 2.0 feature + that is not specific to OpenID Connect. + """ + value = self.get('op_policy_uri') + if value and not is_valid_url(value): + raise ValueError('"op_policy_uri" MUST be a URL') + + def validate_op_tos_uri(self): + """OPTIONAL. URL that the authorization server provides to the + person registering the client to read about the authorization + server's terms of service. The registration process SHOULD + display this URL to the person registering the client if it is + given. As described in Section 5, despite the identifier + "op_tos_uri", appearing to be OpenID-specific, its usage in this + specification is actually referring to a general OAuth 2.0 feature + that is not specific to OpenID Connect. + """ + value = self.get('op_tos_uri') + if value and not is_valid_url(value): + raise ValueError('"op_tos_uri" MUST be a URL') + + def validate_revocation_endpoint(self): + """OPTIONAL. URL of the authorization server's OAuth 2.0 revocation + endpoint [RFC7009].""" + url = self.get('revocation_endpoint') + if url and not is_secure_transport(url): + raise ValueError('"revocation_endpoint" MUST use "https" scheme') + + def validate_revocation_endpoint_auth_methods_supported(self): + """OPTIONAL. JSON array containing a list of client authentication + methods supported by this revocation endpoint. The valid client + authentication method values are those registered in the IANA + "OAuth Token Endpoint Authentication Methods" registry + [IANA.OAuth.Parameters]. If omitted, the default is + "client_secret_basic" -- the HTTP Basic Authentication Scheme + specified in Section 2.3.1 of OAuth 2.0 [RFC6749]. + """ + validate_array_value(self, 'revocation_endpoint_auth_methods_supported') + + def validate_revocation_endpoint_auth_signing_alg_values_supported(self): + """OPTIONAL. JSON array containing a list of the JWS signing + algorithms ("alg" values) supported by the revocation endpoint for + the signature on the JWT [JWT] used to authenticate the client at + the revocation endpoint for the "private_key_jwt" and + "client_secret_jwt" authentication methods. This metadata entry + MUST be present if either of these authentication methods are + specified in the "revocation_endpoint_auth_methods_supported" + entry. No default algorithms are implied if this entry is + omitted. The value "none" MUST NOT be used. + """ + _validate_alg_values( + self, + 'revocation_endpoint_auth_signing_alg_values_supported', + self.revocation_endpoint_auth_methods_supported + ) + + def validate_introspection_endpoint(self): + """OPTIONAL. URL of the authorization server's OAuth 2.0 + introspection endpoint [RFC7662]. + """ + url = self.get('introspection_endpoint') + if url and not is_secure_transport(url): + raise ValueError( + '"introspection_endpoint" MUST use "https" scheme') + + def validate_introspection_endpoint_auth_methods_supported(self): + """OPTIONAL. JSON array containing a list of client authentication + methods supported by this introspection endpoint. The valid + client authentication method values are those registered in the + IANA "OAuth Token Endpoint Authentication Methods" registry + [IANA.OAuth.Parameters] or those registered in the IANA "OAuth + Access Token Types" registry [IANA.OAuth.Parameters]. (These + values are and will remain distinct, due to Section 7.2.) If + omitted, the set of supported authentication methods MUST be + determined by other means. + """ + validate_array_value(self, 'introspection_endpoint_auth_methods_supported') + + def validate_introspection_endpoint_auth_signing_alg_values_supported(self): + """OPTIONAL. JSON array containing a list of the JWS signing + algorithms ("alg" values) supported by the introspection endpoint + for the signature on the JWT [JWT] used to authenticate the client + at the introspection endpoint for the "private_key_jwt" and + "client_secret_jwt" authentication methods. This metadata entry + MUST be present if either of these authentication methods are + specified in the "introspection_endpoint_auth_methods_supported" + entry. No default algorithms are implied if this entry is + omitted. The value "none" MUST NOT be used. + """ + _validate_alg_values( + self, + 'introspection_endpoint_auth_signing_alg_values_supported', + self.introspection_endpoint_auth_methods_supported + ) + + def validate_code_challenge_methods_supported(self): + """OPTIONAL. JSON array containing a list of Proof Key for Code + Exchange (PKCE) [RFC7636] code challenge methods supported by this + authorization server. Code challenge method values are used in + the "code_challenge_method" parameter defined in Section 4.3 of + [RFC7636]. The valid code challenge method values are those + registered in the IANA "PKCE Code Challenge Methods" registry + [IANA.OAuth.Parameters]. If omitted, the authorization server + does not support PKCE. + """ + validate_array_value(self, 'code_challenge_methods_supported') + + @property + def response_modes_supported(self): + #: If omitted, the default is ["query", "fragment"] + return self.get('response_modes_supported', ["query", "fragment"]) + + @property + def grant_types_supported(self): + #: If omitted, the default value is ["authorization_code", "implicit"] + return self.get('grant_types_supported', ["authorization_code", "implicit"]) + + @property + def token_endpoint_auth_methods_supported(self): + #: If omitted, the default is "client_secret_basic" + return self.get('token_endpoint_auth_methods_supported', ["client_secret_basic"]) + + @property + def revocation_endpoint_auth_methods_supported(self): + #: If omitted, the default is "client_secret_basic" + return self.get('revocation_endpoint_auth_methods_supported', ["client_secret_basic"]) + + @property + def introspection_endpoint_auth_methods_supported(self): + #: If omitted, the set of supported authentication methods MUST be + #: determined by other means + #: here, we use "client_secret_basic" + return self.get('introspection_endpoint_auth_methods_supported', ["client_secret_basic"]) + + def validate(self): + """Validate all server metadata value.""" + for key in self.REGISTRY_KEYS: + object.__getattribute__(self, 'validate_{}'.format(key))() + + def __getattr__(self, key): + try: + return object.__getattribute__(self, key) + except AttributeError as error: + if key in self.REGISTRY_KEYS: + return self.get(key) + raise error + + +def _validate_alg_values(data, key, auth_methods_supported): + value = data.get(key) + if value and not isinstance(value, list): + raise ValueError('"{}" MUST be JSON array'.format(key)) + + auth_methods = set(auth_methods_supported) + jwt_auth_methods = {'private_key_jwt', 'client_secret_jwt'} + if auth_methods & jwt_auth_methods: + if not value: + raise ValueError('"{}" is required'.format(key)) + + if value and 'none' in value: + raise ValueError( + 'the value "none" MUST NOT be used in "{}"'.format(key)) + + +def validate_array_value(metadata, key): + values = metadata.get(key) + if values is not None and not isinstance(values, list): + raise ValueError('"{}" MUST be JSON array'.format(key)) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8414/well_known.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8414/well_known.py new file mode 100644 index 000000000..dc948d883 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8414/well_known.py @@ -0,0 +1,22 @@ +from authlib.common.urls import urlparse + + +def get_well_known_url(issuer, external=False, suffix='oauth-authorization-server'): + """Get well-known URI with issuer via `Section 3.1`_. + + .. _`Section 3.1`: https://tools.ietf.org/html/rfc8414#section-3.1 + + :param issuer: URL of the issuer + :param external: return full external url or not + :param suffix: well-known URI suffix for RFC8414 + :return: URL + """ + parsed = urlparse.urlparse(issuer) + path = parsed.path + if path and path != '/': + url_path = '/.well-known/{}{}'.format(suffix, path) + else: + url_path = '/.well-known/{}'.format(suffix) + if not external: + return url_path + return parsed.scheme + '://' + parsed.netloc + url_path diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8628/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8628/__init__.py new file mode 100644 index 000000000..2d4447f85 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8628/__init__.py @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- +""" + authlib.oauth2.rfc8628 + ~~~~~~~~~~~~~~~~~~~~~~ + + This module represents an implementation of + OAuth 2.0 Device Authorization Grant. + + https://tools.ietf.org/html/rfc8628 +""" + +from .endpoint import DeviceAuthorizationEndpoint +from .device_code import DeviceCodeGrant, DEVICE_CODE_GRANT_TYPE +from .models import DeviceCredentialMixin, DeviceCredentialDict +from .errors import AuthorizationPendingError, SlowDownError, ExpiredTokenError + + +__all__ = [ + 'DeviceAuthorizationEndpoint', + 'DeviceCodeGrant', 'DEVICE_CODE_GRANT_TYPE', + 'DeviceCredentialMixin', 'DeviceCredentialDict', + 'AuthorizationPendingError', 'SlowDownError', 'ExpiredTokenError', +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8628/device_code.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8628/device_code.py new file mode 100644 index 000000000..67be3365b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8628/device_code.py @@ -0,0 +1,198 @@ +import time +import logging +from ..rfc6749.errors import ( + InvalidRequestError, + InvalidClientError, + UnauthorizedClientError, + AccessDeniedError, +) +from ..rfc6749 import BaseGrant, TokenEndpointMixin +from .errors import ( + AuthorizationPendingError, + ExpiredTokenError, + SlowDownError, +) + +log = logging.getLogger(__name__) +DEVICE_CODE_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:device_code' + + +class DeviceCodeGrant(BaseGrant, TokenEndpointMixin): + """This OAuth 2.0 [RFC6749] protocol extension enables OAuth clients to + request user authorization from applications on devices that have + limited input capabilities or lack a suitable browser. Such devices + include smart TVs, media consoles, picture frames, and printers, + which lack an easy input method or a suitable browser required for + traditional OAuth interactions. Here is the authorization flow:: + + +----------+ +----------------+ + | |>---(A)-- Client Identifier --->| | + | | | | + | |<---(B)-- Device Code, ---<| | + | | User Code, | | + | Device | & Verification URI | | + | Client | | | + | | [polling] | | + | |>---(E)-- Device Code --->| | + | | & Client Identifier | | + | | | Authorization | + | |<---(F)-- Access Token ---<| Server | + +----------+ (& Optional Refresh Token) | | + v | | + : | | + (C) User Code & Verification URI | | + : | | + v | | + +----------+ | | + | End User | | | + | at |<---(D)-- End user reviews --->| | + | Browser | authorization request | | + +----------+ +----------------+ + + This DeviceCodeGrant is the implementation of step (E) and (F). + + (E) While the end user reviews the client's request (step D), the + client repeatedly polls the authorization server to find out if + the user completed the user authorization step. The client + includes the device code and its client identifier. + + (F) The authorization server validates the device code provided by + the client and responds with the access token if the client is + granted access, an error if they are denied access, or an + indication that the client should continue to poll. + """ + GRANT_TYPE = DEVICE_CODE_GRANT_TYPE + + def validate_token_request(self): + """After displaying instructions to the user, the client creates an + access token request and sends it to the token endpoint with the + following parameters: + + grant_type + REQUIRED. Value MUST be set to + "urn:ietf:params:oauth:grant-type:device_code". + + device_code + REQUIRED. The device verification code, "device_code" from the + device authorization response. + + client_id + REQUIRED if the client is not authenticating with the + authorization server as described in Section 3.2.1. of [RFC6749]. + The client identifier as described in Section 2.2 of [RFC6749]. + + For example, the client makes the following HTTPS request:: + + POST /token HTTP/1.1 + Host: server.example.com + Content-Type: application/x-www-form-urlencoded + + grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Adevice_code + &device_code=GmRhmhcxhwAzkoEqiMEg_DnyEysNkuNhszIySk9eS + &client_id=1406020730 + """ + device_code = self.request.data.get('device_code') + if not device_code: + raise InvalidRequestError('Missing "device_code" in payload') + + if not self.request.client_id: + raise InvalidRequestError('Missing "client_id" in payload') + + credential = self.query_device_credential(device_code) + if not credential: + raise InvalidRequestError('Invalid "device_code" in payload') + + if credential.get_client_id() != self.request.client_id: + raise UnauthorizedClientError() + + client = self.authenticate_token_endpoint_client() + if not client.check_grant_type(self.GRANT_TYPE): + raise UnauthorizedClientError() + + user = self.validate_device_credential(credential) + self.request.user = user + self.request.client = client + self.request.credential = credential + + def create_token_response(self): + """If the access token request is valid and authorized, the + authorization server issues an access token and optional refresh + token. + """ + client = self.request.client + scope = self.request.credential.get_scope() + token = self.generate_token( + user=self.request.user, + scope=scope, + include_refresh_token=client.check_grant_type('refresh_token'), + ) + log.debug('Issue token %r to %r', token, client) + self.save_token(token) + self.execute_hook('process_token', token=token) + return 200, token, self.TOKEN_RESPONSE_HEADER + + def validate_device_credential(self, credential): + user_code = credential.get_user_code() + user_grant = self.query_user_grant(user_code) + + if user_grant is not None: + user, approved = user_grant + if not approved: + raise AccessDeniedError() + return user + + exp = credential.get_expires_at() + now = time.time() + if exp < now: + raise ExpiredTokenError() + + if self.should_slow_down(credential, now): + raise SlowDownError() + + raise AuthorizationPendingError() + + def authenticate_token_endpoint_client(self): + client = self.server.query_client(self.request.client_id) + if not client: + raise InvalidClientError() + self.server.send_signal( + 'after_authenticate_client', + client=client, grant=self) + return client + + def query_device_credential(self, device_code): + """Get device credential from previously savings via ``DeviceAuthorizationEndpoint``. + Developers MUST implement it in subclass:: + + def query_device_credential(self, device_code): + return DeviceCredential.query.get(device_code) + + :param device_code: a string represent the code. + :return: DeviceCredential instance + """ + raise NotImplementedError() + + def query_user_grant(self, user_code): + """Get user and grant via the given user code. Developers MUST + implement it in subclass:: + + def query_user_grant(self, user_code): + # e.g. we saved user grant info in redis + data = redis.get('oauth_user_grant:' + user_code) + if not data: + return None + + user_id, allowed = data.split() + user = User.query.get(user_id) + return user, bool(allowed) + + Note, user grant information is saved by verification endpoint. + """ + raise NotImplementedError() + + def should_slow_down(self, credential, now): + """The authorization request is still pending and polling should + continue, but the interval MUST be increased by 5 seconds for this + and all subsequent requests. + """ + raise NotImplementedError() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8628/endpoint.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8628/endpoint.py new file mode 100644 index 000000000..fda5f1a3f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8628/endpoint.py @@ -0,0 +1,148 @@ +from authlib.consts import default_json_headers +from authlib.common.security import generate_token +from authlib.common.urls import add_params_to_uri +from ..rfc6749.errors import InvalidRequestError + + +class DeviceAuthorizationEndpoint(object): + """This OAuth 2.0 [RFC6749] protocol extension enables OAuth clients to + request user authorization from applications on devices that have + limited input capabilities or lack a suitable browser. Such devices + include smart TVs, media consoles, picture frames, and printers, + which lack an easy input method or a suitable browser required for + traditional OAuth interactions. Here is the authorization flow:: + + +----------+ +----------------+ + | |>---(A)-- Client Identifier --->| | + | | | | + | |<---(B)-- Device Code, ---<| | + | | User Code, | | + | Device | & Verification URI | | + | Client | | | + | | [polling] | | + | |>---(E)-- Device Code --->| | + | | & Client Identifier | | + | | | Authorization | + | |<---(F)-- Access Token ---<| Server | + +----------+ (& Optional Refresh Token) | | + v | | + : | | + (C) User Code & Verification URI | | + : | | + v | | + +----------+ | | + | End User | | | + | at |<---(D)-- End user reviews --->| | + | Browser | authorization request | | + +----------+ +----------------+ + + This DeviceAuthorizationEndpoint is the implementation of step (A) and (B). + + (A) The client requests access from the authorization server and + includes its client identifier in the request. + + (B) The authorization server issues a device code and an end-user + code and provides the end-user verification URI. + """ + + ENDPOINT_NAME = 'device_authorization' + + #: customize "user_code" type, string or digital + USER_CODE_TYPE = 'string' + + #: The lifetime in seconds of the "device_code" and "user_code" + EXPIRES_IN = 1800 + + #: The minimum amount of time in seconds that the client SHOULD + #: wait between polling requests to the token endpoint. + INTERVAL = 5 + + def __init__(self, server): + self.server = server + + def __call__(self, request): + # make it callable for authorization server + # ``create_endpoint_response`` + return self.create_endpoint_response(request) + + def create_endpoint_request(self, request): + return self.server.create_oauth2_request(request) + + def create_endpoint_response(self, request): + # https://tools.ietf.org/html/rfc8628#section-3.1 + if not request.client_id: + raise InvalidRequestError('Missing "client_id" in payload') + + self.server.validate_requested_scope(request.scope) + + device_code = self.generate_device_code() + user_code = self.generate_user_code() + verification_uri = self.get_verification_uri() + verification_uri_complete = add_params_to_uri( + verification_uri, [('user_code', user_code)]) + + data = { + 'device_code': device_code, + 'user_code': user_code, + 'verification_uri': verification_uri, + 'verification_uri_complete': verification_uri_complete, + 'expires_in': self.EXPIRES_IN, + 'interval': self.INTERVAL, + } + + self.save_device_credential(request.client_id, request.scope, data) + return 200, data, default_json_headers + + def generate_user_code(self): + """A method to generate ``user_code`` value for device authorization + endpoint. This method will generate a random string like MQNA-JPOZ. + Developers can rewrite this method to create their own ``user_code``. + """ + # https://tools.ietf.org/html/rfc8628#section-6.1 + if self.USER_CODE_TYPE == 'digital': + return create_digital_user_code() + return create_string_user_code() + + def generate_device_code(self): + """A method to generate ``device_code`` value for device authorization + endpoint. This method will generate a random string of 42 characters. + Developers can rewrite this method to create their own ``device_code``. + """ + return generate_token(42) + + def get_verification_uri(self): + """Define the ``verification_uri`` of device authorization endpoint. + Developers MUST implement this method in subclass:: + + def get_verification_uri(self): + return 'https://your-company.com/active' + """ + raise NotImplementedError() + + def save_device_credential(self, client_id, scope, data): + """Save device token into database for later use. Developers MUST + implement this method in subclass:: + + def save_device_credential(self, client_id, scope, data): + item = DeviceCredential( + client_id=client_id, + scope=scope, + **data + ) + item.save() + """ + raise NotImplementedError() + + +def create_string_user_code(): + base = 'BCDFGHJKLMNPQRSTVWXZ' + return '-'.join([generate_token(4, base), generate_token(4, base)]) + + +def create_digital_user_code(): + base = '0123456789' + return '-'.join([ + generate_token(3, base), + generate_token(3, base), + generate_token(3, base), + ]) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8628/errors.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8628/errors.py new file mode 100644 index 000000000..4a63db825 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8628/errors.py @@ -0,0 +1,27 @@ +from ..rfc6749.errors import OAuth2Error + +# https://tools.ietf.org/html/rfc8628#section-3.5 + + +class AuthorizationPendingError(OAuth2Error): + """The authorization request is still pending as the end user hasn't + yet completed the user-interaction steps (Section 3.3). + """ + error = 'authorization_pending' + + +class SlowDownError(OAuth2Error): + """A variant of "authorization_pending", the authorization request is + still pending and polling should continue, but the interval MUST + be increased by 5 seconds for this and all subsequent requests. + """ + error = 'slow_down' + + +class ExpiredTokenError(OAuth2Error): + """The "device_code" has expired, and the device authorization + session has concluded. The client MAY commence a new device + authorization request but SHOULD wait for user interaction before + restarting to avoid unnecessary polling. + """ + error = 'expired_token' diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8628/models.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8628/models.py new file mode 100644 index 000000000..4090ed674 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8628/models.py @@ -0,0 +1,27 @@ + +class DeviceCredentialMixin(object): + def get_client_id(self): + raise NotImplementedError() + + def get_scope(self): + raise NotImplementedError() + + def get_user_code(self): + raise NotImplementedError() + + def get_expires_at(self): + raise NotImplementedError() + + +class DeviceCredentialDict(dict, DeviceCredentialMixin): + def get_client_id(self): + return self['client_id'] + + def get_scope(self): + return self.get('scope') + + def get_user_code(self): + return self['user_code'] + + def get_expires_at(self): + return self.get('expires_at') diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8693/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8693/__init__.py new file mode 100644 index 000000000..110b3874f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oauth2/rfc8693/__init__.py @@ -0,0 +1,10 @@ +# -*- coding: utf-8 -*- +""" + authlib.oauth2.rfc8693 + ~~~~~~~~~~~~~~~~~~~~~~ + + This module represents an implementation of + OAuth 2.0 Token Exchange. + + https://tools.ietf.org/html/rfc8693 +""" diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/__init__.py new file mode 100644 index 000000000..8ee628fa0 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/__init__.py @@ -0,0 +1,23 @@ +""" + authlib.oidc.core + ~~~~~~~~~~~~~~~~~ + + OpenID Connect Core 1.0 Implementation. + + http://openid.net/specs/openid-connect-core-1_0.html +""" + +from .models import AuthorizationCodeMixin +from .claims import ( + IDToken, CodeIDToken, ImplicitIDToken, HybridIDToken, + UserInfo, get_claim_cls_by_response_type, +) +from .grants import OpenIDCode, OpenIDHybridGrant, OpenIDImplicitGrant + + +__all__ = [ + 'AuthorizationCodeMixin', + 'IDToken', 'CodeIDToken', 'ImplicitIDToken', 'HybridIDToken', + 'UserInfo', 'get_claim_cls_by_response_type', + 'OpenIDCode', 'OpenIDHybridGrant', 'OpenIDImplicitGrant', +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/claims.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/claims.py new file mode 100644 index 000000000..dc3a84301 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/claims.py @@ -0,0 +1,242 @@ +import time +import hmac +from authlib.common.encoding import to_bytes +from authlib.jose import JWTClaims +from authlib.jose.errors import ( + MissingClaimError, + InvalidClaimError, +) +from .util import create_half_hash + +__all__ = [ + 'IDToken', 'CodeIDToken', 'ImplicitIDToken', 'HybridIDToken', + 'UserInfo', 'get_claim_cls_by_response_type' +] + +_REGISTERED_CLAIMS = [ + 'iss', 'sub', 'aud', 'exp', 'nbf', 'iat', + 'auth_time', 'nonce', 'acr', 'amr', 'azp', + 'at_hash', +] + + +class IDToken(JWTClaims): + ESSENTIAL_CLAIMS = ['iss', 'sub', 'aud', 'exp', 'iat'] + + def validate(self, now=None, leeway=0): + for k in self.ESSENTIAL_CLAIMS: + if k not in self: + raise MissingClaimError(k) + + self._validate_essential_claims() + if now is None: + now = int(time.time()) + + self.validate_iss() + self.validate_sub() + self.validate_aud() + self.validate_exp(now, leeway) + self.validate_nbf(now, leeway) + self.validate_iat(now, leeway) + self.validate_auth_time() + self.validate_nonce() + self.validate_acr() + self.validate_amr() + self.validate_azp() + self.validate_at_hash() + + def validate_auth_time(self): + """Time when the End-User authentication occurred. Its value is a JSON + number representing the number of seconds from 1970-01-01T0:0:0Z as + measured in UTC until the date/time. When a max_age request is made or + when auth_time is requested as an Essential Claim, then this Claim is + REQUIRED; otherwise, its inclusion is OPTIONAL. + """ + auth_time = self.get('auth_time') + if self.params.get('max_age') and not auth_time: + raise MissingClaimError('auth_time') + + if auth_time and not isinstance(auth_time, int): + raise InvalidClaimError('auth_time') + + def validate_nonce(self): + """String value used to associate a Client session with an ID Token, + and to mitigate replay attacks. The value is passed through unmodified + from the Authentication Request to the ID Token. If present in the ID + Token, Clients MUST verify that the nonce Claim Value is equal to the + value of the nonce parameter sent in the Authentication Request. If + present in the Authentication Request, Authorization Servers MUST + include a nonce Claim in the ID Token with the Claim Value being the + nonce value sent in the Authentication Request. Authorization Servers + SHOULD perform no other processing on nonce values used. The nonce + value is a case sensitive string. + """ + nonce_value = self.params.get('nonce') + if nonce_value: + if 'nonce' not in self: + raise MissingClaimError('nonce') + if nonce_value != self['nonce']: + raise InvalidClaimError('nonce') + + def validate_acr(self): + """OPTIONAL. Authentication Context Class Reference. String specifying + an Authentication Context Class Reference value that identifies the + Authentication Context Class that the authentication performed + satisfied. The value "0" indicates the End-User authentication did not + meet the requirements of `ISO/IEC 29115`_ level 1. Authentication + using a long-lived browser cookie, for instance, is one example where + the use of "level 0" is appropriate. Authentications with level 0 + SHOULD NOT be used to authorize access to any resource of any monetary + value. An absolute URI or an `RFC 6711`_ registered name SHOULD be + used as the acr value; registered names MUST NOT be used with a + different meaning than that which is registered. Parties using this + claim will need to agree upon the meanings of the values used, which + may be context-specific. The acr value is a case sensitive string. + + .. _`ISO/IEC 29115`: https://www.iso.org/standard/45138.html + .. _`RFC 6711`: https://tools.ietf.org/html/rfc6711 + """ + return self._validate_claim_value('acr') + + def validate_amr(self): + """OPTIONAL. Authentication Methods References. JSON array of strings + that are identifiers for authentication methods used in the + authentication. For instance, values might indicate that both password + and OTP authentication methods were used. The definition of particular + values to be used in the amr Claim is beyond the scope of this + specification. Parties using this claim will need to agree upon the + meanings of the values used, which may be context-specific. The amr + value is an array of case sensitive strings. + """ + amr = self.get('amr') + if amr and not isinstance(self['amr'], list): + raise InvalidClaimError('amr') + + def validate_azp(self): + """OPTIONAL. Authorized party - the party to which the ID Token was + issued. If present, it MUST contain the OAuth 2.0 Client ID of this + party. This Claim is only needed when the ID Token has a single + audience value and that audience is different than the authorized + party. It MAY be included even when the authorized party is the same + as the sole audience. The azp value is a case sensitive string + containing a StringOrURI value. + """ + aud = self.get('aud') + client_id = self.params.get('client_id') + required = False + if aud and client_id: + if isinstance(aud, list) and len(aud) == 1: + aud = aud[0] + if aud != client_id: + required = True + + azp = self.get('azp') + if required and not azp: + raise MissingClaimError('azp') + + if azp and client_id and azp != client_id: + raise InvalidClaimError('azp') + + def validate_at_hash(self): + """OPTIONAL. Access Token hash value. Its value is the base64url + encoding of the left-most half of the hash of the octets of the ASCII + representation of the access_token value, where the hash algorithm + used is the hash algorithm used in the alg Header Parameter of the + ID Token's JOSE Header. For instance, if the alg is RS256, hash the + access_token value with SHA-256, then take the left-most 128 bits and + base64url encode them. The at_hash value is a case sensitive string. + """ + access_token = self.params.get('access_token') + at_hash = self.get('at_hash') + if at_hash and access_token: + if not _verify_hash(at_hash, access_token, self.header['alg']): + raise InvalidClaimError('at_hash') + + +class CodeIDToken(IDToken): + RESPONSE_TYPES = ('code',) + REGISTERED_CLAIMS = _REGISTERED_CLAIMS + + +class ImplicitIDToken(IDToken): + RESPONSE_TYPES = ('id_token', 'id_token token') + ESSENTIAL_CLAIMS = ['iss', 'sub', 'aud', 'exp', 'iat', 'nonce'] + REGISTERED_CLAIMS = _REGISTERED_CLAIMS + + def validate_at_hash(self): + """If the ID Token is issued from the Authorization Endpoint with an + access_token value, which is the case for the response_type value + id_token token, this is REQUIRED; it MAY NOT be used when no Access + Token is issued, which is the case for the response_type value + id_token. + """ + access_token = self.params.get('access_token') + if access_token and 'at_hash' not in self: + raise MissingClaimError('at_hash') + super(ImplicitIDToken, self).validate_at_hash() + + +class HybridIDToken(ImplicitIDToken): + RESPONSE_TYPES = ('code id_token', 'code token', 'code id_token token') + REGISTERED_CLAIMS = _REGISTERED_CLAIMS + ['c_hash'] + + def validate(self, now=None, leeway=0): + super(HybridIDToken, self).validate(now=now, leeway=leeway) + self.validate_c_hash() + + def validate_c_hash(self): + """Code hash value. Its value is the base64url encoding of the + left-most half of the hash of the octets of the ASCII representation + of the code value, where the hash algorithm used is the hash algorithm + used in the alg Header Parameter of the ID Token's JOSE Header. For + instance, if the alg is HS512, hash the code value with SHA-512, then + take the left-most 256 bits and base64url encode them. The c_hash + value is a case sensitive string. + If the ID Token is issued from the Authorization Endpoint with a code, + which is the case for the response_type values code id_token and code + id_token token, this is REQUIRED; otherwise, its inclusion is OPTIONAL. + """ + code = self.params.get('code') + c_hash = self.get('c_hash') + if code: + if not c_hash: + raise MissingClaimError('c_hash') + if not _verify_hash(c_hash, code, self.header['alg']): + raise InvalidClaimError('c_hash') + + +class UserInfo(dict): + """The standard claims of a UserInfo object. Defined per `Section 5.1`_. + + .. _`Section 5.1`: http://openid.net/specs/openid-connect-core-1_0.html#StandardClaims + """ + + #: registered claims that UserInfo supports + REGISTERED_CLAIMS = [ + 'sub', 'name', 'given_name', 'family_name', 'middle_name', 'nickname', + 'preferred_username', 'profile', 'picture', 'website', 'email', + 'email_verified', 'gender', 'birthdate', 'zoneinfo', 'locale', + 'phone_number', 'phone_number_verified', 'address', 'updated_at', + ] + + def __getattr__(self, key): + try: + return object.__getattribute__(self, key) + except AttributeError as error: + if key in self.REGISTERED_CLAIMS: + return self.get(key) + raise error + + +def get_claim_cls_by_response_type(response_type): + claims_classes = (CodeIDToken, ImplicitIDToken, HybridIDToken) + for claims_cls in claims_classes: + if response_type in claims_cls.RESPONSE_TYPES: + return claims_cls + + +def _verify_hash(signature, s, alg): + hash_value = create_half_hash(s, alg) + if not hash_value: + return True + return hmac.compare_digest(hash_value, to_bytes(signature)) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/errors.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/errors.py new file mode 100644 index 000000000..e5fb630e3 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/errors.py @@ -0,0 +1,78 @@ +from authlib.oauth2 import OAuth2Error + + +class InteractionRequiredError(OAuth2Error): + """The Authorization Server requires End-User interaction of some form + to proceed. This error MAY be returned when the prompt parameter value + in the Authentication Request is none, but the Authentication Request + cannot be completed without displaying a user interface for End-User + interaction. + + http://openid.net/specs/openid-connect-core-1_0.html#AuthError + """ + error = 'interaction_required' + + +class LoginRequiredError(OAuth2Error): + """The Authorization Server requires End-User authentication. This error + MAY be returned when the prompt parameter value in the Authentication + Request is none, but the Authentication Request cannot be completed + without displaying a user interface for End-User authentication. + + http://openid.net/specs/openid-connect-core-1_0.html#AuthError + """ + error = 'login_required' + + +class AccountSelectionRequiredError(OAuth2Error): + """The End-User is REQUIRED to select a session at the Authorization + Server. The End-User MAY be authenticated at the Authorization Server + with different associated accounts, but the End-User did not select a + session. This error MAY be returned when the prompt parameter value in + the Authentication Request is none, but the Authentication Request cannot + be completed without displaying a user interface to prompt for a session + to use. + + http://openid.net/specs/openid-connect-core-1_0.html#AuthError + """ + error = 'account_selection_required' + + +class ConsentRequiredError(OAuth2Error): + """The Authorization Server requires End-User consent. This error MAY be + returned when the prompt parameter value in the Authentication Request is + none, but the Authentication Request cannot be completed without + displaying a user interface for End-User consent. + + http://openid.net/specs/openid-connect-core-1_0.html#AuthError + """ + error = 'consent_required' + + +class InvalidRequestURIError(OAuth2Error): + """The request_uri in the Authorization Request returns an error or + contains invalid data. + + http://openid.net/specs/openid-connect-core-1_0.html#AuthError + """ + error = 'invalid_request_uri' + + +class InvalidRequestObjectError(OAuth2Error): + """The request parameter contains an invalid Request Object.""" + error = 'invalid_request_object' + + +class RequestNotSupportedError(OAuth2Error): + """The OP does not support use of the request parameter.""" + error = 'request_not_supported' + + +class RequestURINotSupportedError(OAuth2Error): + """The OP does not support use of the request_uri parameter.""" + error = 'request_uri_not_supported' + + +class RegistrationNotSupportedError(OAuth2Error): + """The OP does not support use of the registration parameter.""" + error = 'registration_not_supported' diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/grants/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/grants/__init__.py new file mode 100644 index 000000000..fb60bb72c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/grants/__init__.py @@ -0,0 +1,9 @@ +from .code import OpenIDCode +from .implicit import OpenIDImplicitGrant +from .hybrid import OpenIDHybridGrant + +__all__ = [ + 'OpenIDCode', + 'OpenIDImplicitGrant', + 'OpenIDHybridGrant', +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/grants/code.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/grants/code.py new file mode 100644 index 000000000..7fb3265ac --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/grants/code.py @@ -0,0 +1,119 @@ +""" + authlib.oidc.core.grants.code + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Implementation of Authentication using the Authorization Code Flow + per `Section 3.1`_. + + .. _`Section 3.1`: http://openid.net/specs/openid-connect-core-1_0.html#CodeFlowAuth +""" + +import logging +from .util import ( + is_openid_scope, + validate_nonce, + validate_request_prompt, + generate_id_token, +) + +log = logging.getLogger(__name__) + + +class OpenIDCode(object): + """An extension from OpenID Connect for "grant_type=code" request. + """ + def __init__(self, require_nonce=False): + self.require_nonce = require_nonce + + def exists_nonce(self, nonce, request): + """Check if the given nonce is existing in your database. Developers + MUST implement this method in subclass, e.g.:: + + def exists_nonce(self, nonce, request): + exists = AuthorizationCode.query.filter_by( + client_id=request.client_id, nonce=nonce + ).first() + return bool(exists) + + :param nonce: A string of "nonce" parameter in request + :param request: OAuth2Request instance + :return: Boolean + """ + raise NotImplementedError() + + def get_jwt_config(self, grant): # pragma: no cover + """Get the JWT configuration for OpenIDCode extension. The JWT + configuration will be used to generate ``id_token``. Developers + MUST implement this method in subclass, e.g.:: + + def get_jwt_config(self, grant): + return { + 'key': read_private_key_file(key_path), + 'alg': 'RS512', + 'iss': 'issuer-identity', + 'exp': 3600 + } + + :param grant: AuthorizationCodeGrant instance + :return: dict + """ + raise NotImplementedError() + + def generate_user_info(self, user, scope): # pragma: no cover + """Provide user information for the given scope. Developers + MUST implement this method in subclass, e.g.:: + + from authlib.oidc.core import UserInfo + + def generate_user_info(self, user, scope): + user_info = UserInfo(sub=user.id, name=user.name) + if 'email' in scope: + user_info['email'] = user.email + return user_info + + :param user: user instance + :param scope: scope of the token + :return: ``authlib.oidc.core.UserInfo`` instance + """ + raise NotImplementedError() + + def get_audiences(self, request): + """Parse `aud` value for id_token, default value is client id. Developers + MAY rewrite this method to provide a customized audience value. + """ + client = request.client + return [client.get_client_id()] + + def process_token(self, grant, token): + scope = token.get('scope') + if not scope or not is_openid_scope(scope): + # standard authorization code flow + return token + + request = grant.request + credential = request.credential + + config = self.get_jwt_config(grant) + config['aud'] = self.get_audiences(request) + config['nonce'] = credential.get_nonce() + config['auth_time'] = credential.get_auth_time() + + user_info = self.generate_user_info(request.user, token['scope']) + id_token = generate_id_token(token, user_info, **config) + token['id_token'] = id_token + return token + + def validate_openid_authorization_request(self, grant): + validate_nonce(grant.request, self.exists_nonce, self.require_nonce) + + def __call__(self, grant): + grant.register_hook('process_token', self.process_token) + if is_openid_scope(grant.request.scope): + grant.register_hook( + 'after_validate_authorization_request', + self.validate_openid_authorization_request + ) + grant.register_hook( + 'after_validate_consent_request', + validate_request_prompt + ) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/grants/hybrid.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/grants/hybrid.py new file mode 100644 index 000000000..d2c14acf6 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/grants/hybrid.py @@ -0,0 +1,97 @@ +import logging +from authlib.deprecate import deprecate +from authlib.common.security import generate_token +from authlib.oauth2.rfc6749 import InvalidScopeError +from authlib.oauth2.rfc6749.grants.authorization_code import ( + validate_code_authorization_request +) +from .implicit import OpenIDImplicitGrant +from .util import is_openid_scope, validate_nonce + +log = logging.getLogger(__name__) + + +class OpenIDHybridGrant(OpenIDImplicitGrant): + #: Generated "code" length + AUTHORIZATION_CODE_LENGTH = 48 + + RESPONSE_TYPES = {'code id_token', 'code token', 'code id_token token'} + GRANT_TYPE = 'code' + DEFAULT_RESPONSE_MODE = 'fragment' + + def generate_authorization_code(self): + """"The method to generate "code" value for authorization code data. + Developers may rewrite this method, or customize the code length with:: + + class MyAuthorizationCodeGrant(AuthorizationCodeGrant): + AUTHORIZATION_CODE_LENGTH = 32 # default is 48 + """ + return generate_token(self.AUTHORIZATION_CODE_LENGTH) + + def save_authorization_code(self, code, request): + """Save authorization_code for later use. Developers MUST implement + it in subclass. Here is an example:: + + def save_authorization_code(self, code, request): + client = request.client + item = AuthorizationCode( + code=code, + client_id=client.client_id, + redirect_uri=request.redirect_uri, + scope=request.scope, + nonce=request.data.get('nonce'), + user_id=request.user.id, + ) + item.save() + """ + raise NotImplementedError() + + def validate_authorization_request(self): + if not is_openid_scope(self.request.scope): + raise InvalidScopeError( + 'Missing "openid" scope', + redirect_uri=self.request.redirect_uri, + redirect_fragment=True, + ) + self.register_hook( + 'after_validate_authorization_request', + lambda grant: validate_nonce( + grant.request, grant.exists_nonce, required=True) + ) + return validate_code_authorization_request(self) + + def create_granted_params(self, grant_user): + self.request.user = grant_user + client = self.request.client + + if hasattr(self, 'create_authorization_code'): # pragma: no cover + deprecate('Use "generate_authorization_code" instead', '1.0') + code = self.create_authorization_code(client, grant_user, self.request) + else: + code = self.generate_authorization_code() + self.save_authorization_code(code, self.request) + + params = [('code', code)] + token = self.generate_token( + grant_type='implicit', + user=grant_user, + scope=self.request.scope, + include_refresh_token=False + ) + + response_types = self.request.response_type.split() + if 'token' in response_types: + log.debug('Grant token %r to %r', token, client) + self.server.save_token(token, self.request) + if 'id_token' in response_types: + token = self.process_implicit_token(token, code) + else: + # response_type is "code id_token" + token = { + 'expires_in': token['expires_in'], + 'scope': token['scope'] + } + token = self.process_implicit_token(token, code) + + params.extend([(k, token[k]) for k in token]) + return params diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/grants/implicit.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/grants/implicit.py new file mode 100644 index 000000000..ac1a76313 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/grants/implicit.py @@ -0,0 +1,151 @@ +import logging +from authlib.oauth2.rfc6749 import ( + OAuth2Error, + InvalidScopeError, + AccessDeniedError, + ImplicitGrant, +) +from .util import ( + is_openid_scope, + validate_nonce, + validate_request_prompt, + create_response_mode_response, + generate_id_token, +) + +log = logging.getLogger(__name__) + + +class OpenIDImplicitGrant(ImplicitGrant): + RESPONSE_TYPES = {'id_token token', 'id_token'} + DEFAULT_RESPONSE_MODE = 'fragment' + + def exists_nonce(self, nonce, request): + """Check if the given nonce is existing in your database. Developers + should implement this method in subclass, e.g.:: + + def exists_nonce(self, nonce, request): + exists = AuthorizationCode.query.filter_by( + client_id=request.client_id, nonce=nonce + ).first() + return bool(exists) + + :param nonce: A string of "nonce" parameter in request + :param request: OAuth2Request instance + :return: Boolean + """ + raise NotImplementedError() + + def get_jwt_config(self): + """Get the JWT configuration for OpenIDImplicitGrant. The JWT + configuration will be used to generate ``id_token``. Developers + MUST implement this method in subclass, e.g.:: + + def get_jwt_config(self): + return { + 'key': read_private_key_file(key_path), + 'alg': 'RS512', + 'iss': 'issuer-identity', + 'exp': 3600 + } + + :return: dict + """ + raise NotImplementedError() + + def generate_user_info(self, user, scope): + """Provide user information for the given scope. Developers + MUST implement this method in subclass, e.g.:: + + from authlib.oidc.core import UserInfo + + def generate_user_info(self, user, scope): + user_info = UserInfo(sub=user.id, name=user.name) + if 'email' in scope: + user_info['email'] = user.email + return user_info + + :param user: user instance + :param scope: scope of the token + :return: ``authlib.oidc.core.UserInfo`` instance + """ + raise NotImplementedError() + + def get_audiences(self, request): + """Parse `aud` value for id_token, default value is client id. Developers + MAY rewrite this method to provide a customized audience value. + """ + client = request.client + return [client.get_client_id()] + + def validate_authorization_request(self): + if not is_openid_scope(self.request.scope): + raise InvalidScopeError( + 'Missing "openid" scope', + redirect_uri=self.request.redirect_uri, + redirect_fragment=True, + ) + redirect_uri = super( + OpenIDImplicitGrant, self).validate_authorization_request() + try: + validate_nonce(self.request, self.exists_nonce, required=True) + except OAuth2Error as error: + error.redirect_uri = redirect_uri + error.redirect_fragment = True + raise error + return redirect_uri + + def validate_consent_request(self): + redirect_uri = self.validate_authorization_request() + validate_request_prompt(self, redirect_uri, redirect_fragment=True) + + def create_authorization_response(self, redirect_uri, grant_user): + state = self.request.state + if grant_user: + params = self.create_granted_params(grant_user) + if state: + params.append(('state', state)) + else: + error = AccessDeniedError(state=state) + params = error.get_body() + + # http://openid.net/specs/oauth-v2-multiple-response-types-1_0.html#ResponseModes + response_mode = self.request.data.get('response_mode', self.DEFAULT_RESPONSE_MODE) + return create_response_mode_response( + redirect_uri=redirect_uri, + params=params, + response_mode=response_mode, + ) + + def create_granted_params(self, grant_user): + self.request.user = grant_user + client = self.request.client + token = self.generate_token( + user=grant_user, + scope=self.request.scope, + include_refresh_token=False + ) + if self.request.response_type == 'id_token': + token = { + 'expires_in': token['expires_in'], + 'scope': token['scope'], + } + token = self.process_implicit_token(token) + else: + log.debug('Grant token %r to %r', token, client) + self.server.save_token(token, self.request) + token = self.process_implicit_token(token) + params = [(k, token[k]) for k in token] + return params + + def process_implicit_token(self, token, code=None): + config = self.get_jwt_config() + config['aud'] = self.get_audiences(self.request) + config['nonce'] = self.request.data.get('nonce') + if code is not None: + config['code'] = code + + user_info = self.generate_user_info(self.request.user, token['scope']) + id_token = generate_id_token(token, user_info, **config) + token['id_token'] = id_token + return token diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/grants/util.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/grants/util.py new file mode 100644 index 000000000..a83a2c948 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/grants/util.py @@ -0,0 +1,155 @@ +import time +import random +from authlib.oauth2.rfc6749 import InvalidRequestError +from authlib.oauth2.rfc6749.util import scope_to_list +from authlib.jose import JWT +from authlib.common.encoding import to_native +from authlib.common.urls import add_params_to_uri, quote_url +from ..util import create_half_hash +from ..errors import ( + LoginRequiredError, + AccountSelectionRequiredError, + ConsentRequiredError, +) + + +def is_openid_scope(scope): + scopes = scope_to_list(scope) + return scopes and 'openid' in scopes + + +def validate_request_prompt(grant, redirect_uri, redirect_fragment=False): + prompt = grant.request.data.get('prompt') + end_user = grant.request.user + if not prompt: + if not end_user: + grant.prompt = 'login' + return grant + + if prompt == 'none' and not end_user: + raise LoginRequiredError( + redirect_uri=redirect_uri, + redirect_fragment=redirect_fragment) + + prompts = prompt.split() + if 'none' in prompts and len(prompts) > 1: + # If this parameter contains none with any other value, + # an error is returned + raise InvalidRequestError( + 'Invalid "prompt" parameter.', + redirect_uri=redirect_uri, + redirect_fragment=redirect_fragment) + + prompt = _guess_prompt_value( + end_user, prompts, redirect_uri, redirect_fragment=redirect_fragment) + if prompt: + grant.prompt = prompt + return grant + + +def validate_nonce(request, exists_nonce, required=False): + nonce = request.data.get('nonce') + if not nonce: + if required: + raise InvalidRequestError('Missing "nonce" in request.') + return True + + if exists_nonce(nonce, request): + raise InvalidRequestError('Replay attack') + + +def generate_id_token( + token, user_info, key, alg, iss, aud, exp, + nonce=None, auth_time=None, code=None): + + payload = _generate_id_token_payload( + alg=alg, iss=iss, aud=aud, exp=exp, nonce=nonce, + auth_time=auth_time, code=code, + access_token=token.get('access_token'), + ) + payload.update(user_info) + return _jwt_encode(alg, payload, key) + + +def create_response_mode_response(redirect_uri, params, response_mode): + if response_mode == 'form_post': + tpl = ( + 'Redirecting' + '' + '
    {}
    ' + ) + inputs = ''.join([ + ''.format( + quote_url(k), quote_url(v)) + for k, v in params + ]) + body = tpl.format(quote_url(redirect_uri), inputs) + return 200, body, [('Content-Type', 'text/html; charset=utf-8')] + + if response_mode == 'query': + uri = add_params_to_uri(redirect_uri, params, fragment=False) + elif response_mode == 'fragment': + uri = add_params_to_uri(redirect_uri, params, fragment=True) + else: + raise InvalidRequestError('Invalid "response_mode" value') + + return 302, '', [('Location', uri)] + + +def _guess_prompt_value(end_user, prompts, redirect_uri, redirect_fragment): + # http://openid.net/specs/openid-connect-core-1_0.html#AuthRequest + + if not end_user and 'login' in prompts: + return 'login' + + if 'consent' in prompts: + if not end_user: + raise ConsentRequiredError( + redirect_uri=redirect_uri, + redirect_fragment=redirect_fragment) + return 'consent' + elif 'select_account' in prompts: + if not end_user: + raise AccountSelectionRequiredError( + redirect_uri=redirect_uri, + redirect_fragment=redirect_fragment) + return 'select_account' + + +def _generate_id_token_payload( + alg, iss, aud, exp, nonce=None, auth_time=None, + code=None, access_token=None): + now = int(time.time()) + if auth_time is None: + auth_time = now + + payload = { + 'iss': iss, + 'aud': aud, + 'iat': now, + 'exp': now + exp, + 'auth_time': auth_time, + } + if nonce: + payload['nonce'] = nonce + + if code: + payload['c_hash'] = to_native(create_half_hash(code, alg)) + + if access_token: + payload['at_hash'] = to_native(create_half_hash(access_token, alg)) + return payload + + +def _jwt_encode(alg, payload, key): + jwt = JWT(algorithms=alg) + header = {'alg': alg} + if isinstance(key, dict): + # JWK set format + if 'keys' in key: + key = random.choice(key['keys']) + header['kid'] = key['kid'] + elif 'kid' in key: + header['kid'] = key['kid'] + + return to_native(jwt.encode(header, payload, key)) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/models.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/models.py new file mode 100644 index 000000000..5f4140507 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/models.py @@ -0,0 +1,13 @@ +from authlib.oauth2.rfc6749 import ( + AuthorizationCodeMixin as _AuthorizationCodeMixin +) + + +class AuthorizationCodeMixin(_AuthorizationCodeMixin): + def get_nonce(self): + """Get "nonce" value of the authorization code object.""" + raise NotImplementedError() + + def get_auth_time(self): + """Get "auth_time" value of the authorization code object.""" + raise NotImplementedError() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/util.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/util.py new file mode 100644 index 000000000..37d23dedb --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/core/util.py @@ -0,0 +1,12 @@ +import hashlib +from authlib.common.encoding import to_bytes, urlsafe_b64encode + + +def create_half_hash(s, alg): + hash_type = 'sha{}'.format(alg[2:]) + hash_alg = getattr(hashlib, hash_type, None) + if not hash_alg: + return None + data_digest = hash_alg(to_bytes(s)).digest() + slice_index = int(len(data_digest) / 2) + return urlsafe_b64encode(data_digest[:slice_index]) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/discovery/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/discovery/__init__.py new file mode 100644 index 000000000..1e76401bd --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/discovery/__init__.py @@ -0,0 +1,13 @@ +""" + authlib.oidc.discover + ~~~~~~~~~~~~~~~~~~~~~ + + OpenID Connect Discovery 1.0 Implementation. + + https://openid.net/specs/openid-connect-discovery-1_0.html +""" + +from .models import OpenIDProviderMetadata +from .well_known import get_well_known_url + +__all__ = ['OpenIDProviderMetadata', 'get_well_known_url'] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/discovery/models.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/discovery/models.py new file mode 100644 index 000000000..db1a8046d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/discovery/models.py @@ -0,0 +1,283 @@ +from authlib.oauth2.rfc8414 import AuthorizationServerMetadata +from authlib.oauth2.rfc8414.models import validate_array_value + + +class OpenIDProviderMetadata(AuthorizationServerMetadata): + REGISTRY_KEYS = [ + 'issuer', 'authorization_endpoint', 'token_endpoint', + 'jwks_uri', 'registration_endpoint', 'scopes_supported', + 'response_types_supported', 'response_modes_supported', + 'grant_types_supported', + 'token_endpoint_auth_methods_supported', + 'token_endpoint_auth_signing_alg_values_supported', + 'service_documentation', 'ui_locales_supported', + 'op_policy_uri', 'op_tos_uri', + + # added by OpenID + 'acr_values_supported', 'subject_types_supported', + 'id_token_signing_alg_values_supported', + 'id_token_encryption_alg_values_supported', + 'id_token_encryption_enc_values_supported', + 'userinfo_signing_alg_values_supported', + 'userinfo_encryption_alg_values_supported', + 'userinfo_encryption_enc_values_supported', + 'request_object_signing_alg_values_supported', + 'request_object_encryption_alg_values_supported', + 'request_object_encryption_enc_values_supported', + 'display_values_supported', + 'claim_types_supported', + 'claims_supported', + 'claims_locales_supported', + 'claims_parameter_supported', + 'request_parameter_supported', + 'request_uri_parameter_supported', + 'require_request_uri_registration', + + # not defined by OpenID + # 'revocation_endpoint', + # 'revocation_endpoint_auth_methods_supported', + # 'revocation_endpoint_auth_signing_alg_values_supported', + # 'introspection_endpoint', + # 'introspection_endpoint_auth_methods_supported', + # 'introspection_endpoint_auth_signing_alg_values_supported', + # 'code_challenge_methods_supported', + ] + + def validate_jwks_uri(self): + # REQUIRED in OpenID Connect + jwks_uri = self.get('jwks_uri') + if jwks_uri is None: + raise ValueError('"jwks_uri" is required') + return super(OpenIDProviderMetadata, self).validate_jwks_uri() + + def validate_acr_values_supported(self): + """OPTIONAL. JSON array containing a list of the Authentication + Context Class References that this OP supports. + """ + validate_array_value(self, 'acr_values_supported') + + def validate_subject_types_supported(self): + """REQUIRED. JSON array containing a list of the Subject Identifier + types that this OP supports. Valid types include pairwise and public. + """ + # 1. REQUIRED + values = self.get('subject_types_supported') + if values is None: + raise ValueError('"subject_types_supported" is required') + + # 2. JSON array + if not isinstance(values, list): + raise ValueError('"subject_types_supported" MUST be JSON array') + + # 3. Valid types include pairwise and public + valid_types = {'pairwise', 'public'} + if not valid_types.issuperset(set(values)): + raise ValueError( + '"subject_types_supported" contains invalid values') + + def validate_id_token_signing_alg_values_supported(self): + """REQUIRED. JSON array containing a list of the JWS signing + algorithms (alg values) supported by the OP for the ID Token to + encode the Claims in a JWT [JWT]. The algorithm RS256 MUST be + included. The value none MAY be supported, but MUST NOT be used + unless the Response Type used returns no ID Token from the + Authorization Endpoint (such as when using the Authorization + Code Flow). + """ + # 1. REQUIRED + values = self.get('id_token_signing_alg_values_supported') + if values is None: + raise ValueError('"id_token_signing_alg_values_supported" is required') + + # 2. JSON array + if not isinstance(values, list): + raise ValueError('"id_token_signing_alg_values_supported" MUST be JSON array') + + # 3. The algorithm RS256 MUST be included + if 'RS256' not in values: + raise ValueError( + '"RS256" MUST be included in "id_token_signing_alg_values_supported"') + + def validate_id_token_encryption_alg_values_supported(self): + """OPTIONAL. JSON array containing a list of the JWE encryption + algorithms (alg values) supported by the OP for the ID Token to + encode the Claims in a JWT. + """ + validate_array_value(self, 'id_token_encryption_alg_values_supported') + + def validate_id_token_encryption_enc_values_supported(self): + """OPTIONAL. JSON array containing a list of the JWE encryption + algorithms (enc values) supported by the OP for the ID Token to + encode the Claims in a JWT. + """ + validate_array_value(self, 'id_token_encryption_enc_values_supported') + + def validate_userinfo_signing_alg_values_supported(self): + """OPTIONAL. JSON array containing a list of the JWS signing + algorithms (alg values) [JWA] supported by the UserInfo Endpoint + to encode the Claims in a JWT. The value none MAY be included. + """ + validate_array_value(self, 'userinfo_signing_alg_values_supported') + + def validate_userinfo_encryption_alg_values_supported(self): + """OPTIONAL. JSON array containing a list of the JWE encryption + algorithms (alg values) [JWA] supported by the UserInfo Endpoint + to encode the Claims in a JWT. + """ + validate_array_value(self, 'userinfo_encryption_alg_values_supported') + + def validate_userinfo_encryption_enc_values_supported(self): + """OPTIONAL. JSON array containing a list of the JWE encryption + algorithms (enc values) [JWA] supported by the UserInfo Endpoint + to encode the Claims in a JWT. + """ + validate_array_value(self, 'userinfo_encryption_enc_values_supported') + + def validate_request_object_signing_alg_values_supported(self): + """OPTIONAL. JSON array containing a list of the JWS signing + algorithms (alg values) supported by the OP for Request Objects, + which are described in Section 6.1 of OpenID Connect Core 1.0. + These algorithms are used both when the Request Object is passed + by value (using the request parameter) and when it is passed by + reference (using the request_uri parameter). Servers SHOULD support + none and RS256. + """ + values = self.get('request_object_signing_alg_values_supported') + if not values: + return + + if not isinstance(values, list): + raise ValueError('"request_object_signing_alg_values_supported" MUST be JSON array') + + # Servers SHOULD support none and RS256 + if 'none' not in values or 'RS256' not in values: + raise ValueError( + '"request_object_signing_alg_values_supported" ' + 'SHOULD support none and RS256') + + def validate_request_object_encryption_alg_values_supported(self): + """OPTIONAL. JSON array containing a list of the JWE encryption + algorithms (alg values) supported by the OP for Request Objects. + These algorithms are used both when the Request Object is passed + by value and when it is passed by reference. + """ + validate_array_value(self, 'request_object_encryption_alg_values_supported') + + def validate_request_object_encryption_enc_values_supported(self): + """OPTIONAL. JSON array containing a list of the JWE encryption + algorithms (enc values) supported by the OP for Request Objects. + These algorithms are used both when the Request Object is passed + by value and when it is passed by reference. + """ + validate_array_value(self, 'request_object_encryption_enc_values_supported') + + def validate_display_values_supported(self): + """OPTIONAL. JSON array containing a list of the display parameter + values that the OpenID Provider supports. These values are described + in Section 3.1.2.1 of OpenID Connect Core 1.0. + """ + values = self.get('display_values_supported') + if not values: + return + + if not isinstance(values, list): + raise ValueError('"display_values_supported" MUST be JSON array') + + valid_values = {'page', 'popup', 'touch', 'wap'} + if not valid_values.issuperset(set(values)): + raise ValueError('"display_values_supported" contains invalid values') + + def validate_claim_types_supported(self): + """OPTIONAL. JSON array containing a list of the Claim Types that + the OpenID Provider supports. These Claim Types are described in + Section 5.6 of OpenID Connect Core 1.0. Values defined by this + specification are normal, aggregated, and distributed. If omitted, + the implementation supports only normal Claims. + """ + values = self.get('claim_types_supported') + if not values: + return + + if not isinstance(values, list): + raise ValueError('"claim_types_supported" MUST be JSON array') + + valid_values = {'normal', 'aggregated', 'distributed'} + if not valid_values.issuperset(set(values)): + raise ValueError('"claim_types_supported" contains invalid values') + + def validate_claims_supported(self): + """RECOMMENDED. JSON array containing a list of the Claim Names + of the Claims that the OpenID Provider MAY be able to supply values + for. Note that for privacy or other reasons, this might not be an + exhaustive list. + """ + validate_array_value(self, 'claims_supported') + + def validate_claims_locales_supported(self): + """OPTIONAL. Languages and scripts supported for values in Claims + being returned, represented as a JSON array of BCP47 [RFC5646] + language tag values. Not all languages and scripts are necessarily + supported for all Claim values. + """ + validate_array_value(self, 'claims_locales_supported') + + def validate_claims_parameter_supported(self): + """OPTIONAL. Boolean value specifying whether the OP supports use of + the claims parameter, with true indicating support. If omitted, the + default value is false. + """ + _validate_boolean_value(self, 'claims_parameter_supported') + + def validate_request_parameter_supported(self): + """OPTIONAL. Boolean value specifying whether the OP supports use of + the request parameter, with true indicating support. If omitted, the + default value is false. + """ + _validate_boolean_value(self, 'request_parameter_supported') + + def validate_request_uri_parameter_supported(self): + """OPTIONAL. Boolean value specifying whether the OP supports use of + the request_uri parameter, with true indicating support. If omitted, + the default value is true. + """ + _validate_boolean_value(self, 'request_uri_parameter_supported') + + def validate_require_request_uri_registration(self): + """OPTIONAL. Boolean value specifying whether the OP requires any + request_uri values used to be pre-registered using the request_uris + registration parameter. Pre-registration is REQUIRED when the value + is true. If omitted, the default value is false. + """ + _validate_boolean_value(self, 'require_request_uri_registration') + + @property + def claim_types_supported(self): + # If omitted, the implementation supports only normal Claims + return self.get('claim_types_supported', ['normal']) + + @property + def claims_parameter_supported(self): + # If omitted, the default value is false. + return self.get('claims_parameter_supported', False) + + @property + def request_parameter_supported(self): + # If omitted, the default value is false. + return self.get('request_parameter_supported', False) + + @property + def request_uri_parameter_supported(self): + # If omitted, the default value is true. + return self.get('request_uri_parameter_supported', True) + + @property + def require_request_uri_registration(self): + # If omitted, the default value is false. + return self.get('require_request_uri_registration', False) + + +def _validate_boolean_value(metadata, key): + if key not in metadata: + return + if metadata[key] not in (True, False): + raise ValueError('"{}" MUST be boolean'.format(key)) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/discovery/well_known.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/discovery/well_known.py new file mode 100644 index 000000000..e3087a143 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/authlib/oidc/discovery/well_known.py @@ -0,0 +1,17 @@ +from authlib.common.urls import urlparse + + +def get_well_known_url(issuer, external=False): + """Get well-known URI with issuer via Section 4.1. + + :param issuer: URL of the issuer + :param external: return full external url or not + :return: URL + """ + # https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfigurationRequest + if external: + return issuer.rstrip('/') + '/.well-known/openid-configuration' + + parsed = urlparse.urlparse(issuer) + path = parsed.path + return path.rstrip('/') + '/.well-known/openid-configuration' diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black-24.4.2.dist-info/INSTALLER b/.direnv/python-3.8.20/lib/python3.8/site-packages/black-24.4.2.dist-info/INSTALLER new file mode 100644 index 000000000..ce3133dc1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black-24.4.2.dist-info/INSTALLER @@ -0,0 +1 @@ +Poetry 2.2.1 \ No newline at end of file diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black-24.4.2.dist-info/METADATA b/.direnv/python-3.8.20/lib/python3.8/site-packages/black-24.4.2.dist-info/METADATA new file mode 100644 index 000000000..d36820ce0 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black-24.4.2.dist-info/METADATA @@ -0,0 +1,2083 @@ +Metadata-Version: 2.3 +Name: black +Version: 24.4.2 +Summary: The uncompromising code formatter. +Project-URL: Changelog, https://github.com/psf/black/blob/main/CHANGES.md +Project-URL: Homepage, https://github.com/psf/black +Author-email: Łukasz Langa +License: MIT +License-File: AUTHORS.md +License-File: LICENSE +Keywords: automation,autopep8,formatter,gofmt,pyfmt,rustfmt,yapf +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Software Development :: Quality Assurance +Requires-Python: >=3.8 +Requires-Dist: click>=8.0.0 +Requires-Dist: mypy-extensions>=0.4.3 +Requires-Dist: packaging>=22.0 +Requires-Dist: pathspec>=0.9.0 +Requires-Dist: platformdirs>=2 +Requires-Dist: tomli>=1.1.0; python_version < '3.11' +Requires-Dist: typing-extensions>=4.0.1; python_version < '3.11' +Provides-Extra: colorama +Requires-Dist: colorama>=0.4.3; extra == 'colorama' +Provides-Extra: d +Requires-Dist: aiohttp!=3.9.0,>=3.7.4; (sys_platform == 'win32' and implementation_name == 'pypy') and extra == 'd' +Requires-Dist: aiohttp>=3.7.4; (sys_platform != 'win32' or implementation_name != 'pypy') and extra == 'd' +Provides-Extra: jupyter +Requires-Dist: ipython>=7.8.0; extra == 'jupyter' +Requires-Dist: tokenize-rt>=3.2.0; extra == 'jupyter' +Provides-Extra: uvloop +Requires-Dist: uvloop>=0.15.2; extra == 'uvloop' +Description-Content-Type: text/markdown + +[![Black Logo](https://raw.githubusercontent.com/psf/black/main/docs/_static/logo2-readme.png)](https://black.readthedocs.io/en/stable/) + +

    The Uncompromising Code Formatter

    + +

    +Actions Status +Documentation Status +Coverage Status +License: MIT +PyPI +Downloads +conda-forge +Code style: black +

    + +> “Any color you like.” + +_Black_ is the uncompromising Python code formatter. By using it, you agree to cede +control over minutiae of hand-formatting. In return, _Black_ gives you speed, +determinism, and freedom from `pycodestyle` nagging about formatting. You will save time +and mental energy for more important matters. + +Blackened code looks the same regardless of the project you're reading. Formatting +becomes transparent after a while and you can focus on the content instead. + +_Black_ makes code review faster by producing the smallest diffs possible. + +Try it out now using the [Black Playground](https://black.vercel.app). Watch the +[PyCon 2019 talk](https://youtu.be/esZLCuWs_2Y) to learn more. + +--- + +**[Read the documentation on ReadTheDocs!](https://black.readthedocs.io/en/stable)** + +--- + +## Installation and usage + +### Installation + +_Black_ can be installed by running `pip install black`. It requires Python 3.8+ to run. +If you want to format Jupyter Notebooks, install with `pip install "black[jupyter]"`. + +If you can't wait for the latest _hotness_ and want to install from GitHub, use: + +`pip install git+https://github.com/psf/black` + +### Usage + +To get started right away with sensible defaults: + +```sh +black {source_file_or_directory} +``` + +You can run _Black_ as a package if running it as a script doesn't work: + +```sh +python -m black {source_file_or_directory} +``` + +Further information can be found in our docs: + +- [Usage and Configuration](https://black.readthedocs.io/en/stable/usage_and_configuration/index.html) + +_Black_ is already [successfully used](https://github.com/psf/black#used-by) by many +projects, small and big. _Black_ has a comprehensive test suite, with efficient parallel +tests, and our own auto formatting and parallel Continuous Integration runner. Now that +we have become stable, you should not expect large formatting changes in the future. +Stylistic changes will mostly be responses to bug reports and support for new Python +syntax. For more information please refer to +[The Black Code Style](https://black.readthedocs.io/en/stable/the_black_code_style/index.html). + +Also, as a safety measure which slows down processing, _Black_ will check that the +reformatted code still produces a valid AST that is effectively equivalent to the +original (see the +[Pragmatism](https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html#ast-before-and-after-formatting) +section for details). If you're feeling confident, use `--fast`. + +## The _Black_ code style + +_Black_ is a PEP 8 compliant opinionated formatter. _Black_ reformats entire files in +place. Style configuration options are deliberately limited and rarely added. It doesn't +take previous formatting into account (see +[Pragmatism](https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html#pragmatism) +for exceptions). + +Our documentation covers the current _Black_ code style, but planned changes to it are +also documented. They're both worth taking a look at: + +- [The _Black_ Code Style: Current style](https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html) +- [The _Black_ Code Style: Future style](https://black.readthedocs.io/en/stable/the_black_code_style/future_style.html) + +Changes to the _Black_ code style are bound by the Stability Policy: + +- [The _Black_ Code Style: Stability Policy](https://black.readthedocs.io/en/stable/the_black_code_style/index.html#stability-policy) + +Please refer to this document before submitting an issue. What seems like a bug might be +intended behaviour. + +### Pragmatism + +Early versions of _Black_ used to be absolutist in some respects. They took after its +initial author. This was fine at the time as it made the implementation simpler and +there were not many users anyway. Not many edge cases were reported. As a mature tool, +_Black_ does make some exceptions to rules it otherwise holds. + +- [The _Black_ code style: Pragmatism](https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html#pragmatism) + +Please refer to this document before submitting an issue just like with the document +above. What seems like a bug might be intended behaviour. + +## Configuration + +_Black_ is able to read project-specific default values for its command line options +from a `pyproject.toml` file. This is especially useful for specifying custom +`--include` and `--exclude`/`--force-exclude`/`--extend-exclude` patterns for your +project. + +You can find more details in our documentation: + +- [The basics: Configuration via a file](https://black.readthedocs.io/en/stable/usage_and_configuration/the_basics.html#configuration-via-a-file) + +And if you're looking for more general configuration documentation: + +- [Usage and Configuration](https://black.readthedocs.io/en/stable/usage_and_configuration/index.html) + +**Pro-tip**: If you're asking yourself "Do I need to configure anything?" the answer is +"No". _Black_ is all about sensible defaults. Applying those defaults will have your +code in compliance with many other _Black_ formatted projects. + +## Used by + +The following notable open-source projects trust _Black_ with enforcing a consistent +code style: pytest, tox, Pyramid, Django, Django Channels, Hypothesis, attrs, +SQLAlchemy, Poetry, PyPA applications (Warehouse, Bandersnatch, Pipenv, virtualenv), +pandas, Pillow, Twisted, LocalStack, every Datadog Agent Integration, Home Assistant, +Zulip, Kedro, OpenOA, FLORIS, ORBIT, WOMBAT, and many more. + +The following organizations use _Black_: Facebook, Dropbox, KeepTruckin, Lyft, Mozilla, +Quora, Duolingo, QuantumBlack, Tesla, Archer Aviation. + +Are we missing anyone? Let us know. + +## Testimonials + +**Mike Bayer**, [author of `SQLAlchemy`](https://www.sqlalchemy.org/): + +> I can't think of any single tool in my entire programming career that has given me a +> bigger productivity increase by its introduction. I can now do refactorings in about +> 1% of the keystrokes that it would have taken me previously when we had no way for +> code to format itself. + +**Dusty Phillips**, +[writer](https://smile.amazon.com/s/ref=nb_sb_noss?url=search-alias%3Daps&field-keywords=dusty+phillips): + +> _Black_ is opinionated so you don't have to be. + +**Hynek Schlawack**, [creator of `attrs`](https://www.attrs.org/), core developer of +Twisted and CPython: + +> An auto-formatter that doesn't suck is all I want for Xmas! + +**Carl Meyer**, [Django](https://www.djangoproject.com/) core developer: + +> At least the name is good. + +**Kenneth Reitz**, creator of [`requests`](https://requests.readthedocs.io/en/latest/) +and [`pipenv`](https://readthedocs.org/projects/pipenv/): + +> This vastly improves the formatting of our code. Thanks a ton! + +## Show your style + +Use the badge in your project's README.md: + +```md +[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) +``` + +Using the badge in README.rst: + +``` +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black +``` + +Looks like this: +[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) + +## License + +MIT + +## Contributing + +Welcome! Happy to see you willing to make the project better. You can get started by +reading this: + +- [Contributing: The basics](https://black.readthedocs.io/en/latest/contributing/the_basics.html) + +You can also take a look at the rest of the contributing docs or talk with the +developers: + +- [Contributing documentation](https://black.readthedocs.io/en/latest/contributing/index.html) +- [Chat on Discord](https://discord.gg/RtVdv86PrH) + +## Change log + +The log has become rather long. It moved to its own file. + +See [CHANGES](https://black.readthedocs.io/en/latest/change_log.html). + +## Authors + +The author list is quite long nowadays, so it lives in its own file. + +See [AUTHORS.md](./AUTHORS.md) + +## Code of Conduct + +Everyone participating in the _Black_ project, and in particular in the issue tracker, +pull requests, and social media activity, is expected to treat other people with respect +and more generally to follow the guidelines articulated in the +[Python Community Code of Conduct](https://www.python.org/psf/codeofconduct/). + +At the same time, humor is encouraged. In fact, basic familiarity with Monty Python's +Flying Circus is expected. We are not savages. + +And if you _really_ need to slap somebody, do it with a fish while dancing. +# Change Log + +## 24.4.2 + +This is a bugfix release to fix two regressions in the new f-string parser introduced in +24.4.1. + +### Parser + +- Fix regression where certain complex f-strings failed to parse (#4332) + +### Performance + +- Fix bad performance on certain complex string literals (#4331) + +## 24.4.1 + +### Highlights + +- Add support for the new Python 3.12 f-string syntax introduced by PEP 701 (#3822) + +### Stable style + +- Fix crash involving indented dummy functions containing newlines (#4318) + +### Parser + +- Add support for type parameter defaults, a new syntactic feature added to Python 3.13 + by PEP 696 (#4327) + +### Integrations + +- Github Action now works even when `git archive` is skipped (#4313) + +## 24.4.0 + +### Stable style + +- Fix unwanted crashes caused by AST equivalency check (#4290) + +### Preview style + +- `if` guards in `case` blocks are now wrapped in parentheses when the line is too long. + (#4269) +- Stop moving multiline strings to a new line unless inside brackets (#4289) + +### Integrations + +- Add a new option `use_pyproject` to the GitHub Action `psf/black`. This will read the + Black version from `pyproject.toml`. (#4294) + +## 24.3.0 + +### Highlights + +This release is a milestone: it fixes Black's first CVE security vulnerability. If you +run Black on untrusted input, or if you habitually put thousands of leading tab +characters in your docstrings, you are strongly encouraged to upgrade immediately to fix +[CVE-2024-21503](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2024-21503). + +This release also fixes a bug in Black's AST safety check that allowed Black to make +incorrect changes to certain f-strings that are valid in Python 3.12 and higher. + +### Stable style + +- Don't move comments along with delimiters, which could cause crashes (#4248) +- Strengthen AST safety check to catch more unsafe changes to strings. Previous versions + of Black would incorrectly format the contents of certain unusual f-strings containing + nested strings with the same quote type. Now, Black will crash on such strings until + support for the new f-string syntax is implemented. (#4270) +- Fix a bug where line-ranges exceeding the last code line would not work as expected + (#4273) + +### Performance + +- Fix catastrophic performance on docstrings that contain large numbers of leading tab + characters. This fixes + [CVE-2024-21503](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2024-21503). + (#4278) + +### Documentation + +- Note what happens when `--check` is used with `--quiet` (#4236) + +## 24.2.0 + +### Stable style + +- Fixed a bug where comments where mistakenly removed along with redundant parentheses + (#4218) + +### Preview style + +- Move the `hug_parens_with_braces_and_square_brackets` feature to the unstable style + due to an outstanding crash and proposed formatting tweaks (#4198) +- Fixed a bug where base expressions caused inconsistent formatting of \*\* in tenary + expression (#4154) +- Checking for newline before adding one on docstring that is almost at the line limit + (#4185) +- Remove redundant parentheses in `case` statement `if` guards (#4214). + +### Configuration + +- Fix issue where _Black_ would ignore input files in the presence of symlinks (#4222) +- _Black_ now ignores `pyproject.toml` that is missing a `tool.black` section when + discovering project root and configuration. Since _Black_ continues to use version + control as an indicator of project root, this is expected to primarily change behavior + for users in a monorepo setup (desirably). If you wish to preserve previous behavior, + simply add an empty `[tool.black]` to the previously discovered `pyproject.toml` + (#4204) + +### Output + +- Black will swallow any `SyntaxWarning`s or `DeprecationWarning`s produced by the `ast` + module when performing equivalence checks (#4189) + +### Integrations + +- Add a JSONSchema and provide a validate-pyproject entry-point (#4181) + +## 24.1.1 + +Bugfix release to fix a bug that made Black unusable on certain file systems with strict +limits on path length. + +### Preview style + +- Consistently add trailing comma on typed parameters (#4164) + +### Configuration + +- Shorten the length of the name of the cache file to fix crashes on file systems that + do not support long paths (#4176) + +## 24.1.0 + +### Highlights + +This release introduces the new 2024 stable style (#4106), stabilizing the following +changes: + +- Add parentheses around `if`-`else` expressions (#2278) +- Dummy class and function implementations consisting only of `...` are formatted more + compactly (#3796) +- If an assignment statement is too long, we now prefer splitting on the right-hand side + (#3368) +- Hex codes in Unicode escape sequences are now standardized to lowercase (#2916) +- Allow empty first lines at the beginning of most blocks (#3967, #4061) +- Add parentheses around long type annotations (#3899) +- Enforce newline after module docstrings (#3932, #4028) +- Fix incorrect magic trailing comma handling in return types (#3916) +- Remove blank lines before class docstrings (#3692) +- Wrap multiple context managers in parentheses if combined in a single `with` statement + (#3489) +- Fix bug in line length calculations for power operations (#3942) +- Add trailing commas to collection literals even if there's a comment after the last + entry (#3393) +- When using `--skip-magic-trailing-comma` or `-C`, trailing commas are stripped from + subscript expressions with more than 1 element (#3209) +- Add extra blank lines in stubs in a few cases (#3564, #3862) +- Accept raw strings as docstrings (#3947) +- Split long lines in case blocks (#4024) +- Stop removing spaces from walrus operators within subscripts (#3823) +- Fix incorrect formatting of certain async statements (#3609) +- Allow combining `# fmt: skip` with other comments (#3959) + +There are already a few improvements in the `--preview` style, which are slated for the +2025 stable style. Try them out and +[share your feedback](https://github.com/psf/black/issues). In the past, the preview +style has included some features that we were not able to stabilize. This year, we're +adding a separate `--unstable` style for features with known problems. Now, the +`--preview` style only includes features that we actually expect to make it into next +year's stable style. + +### Stable style + +Several bug fixes were made in features that are moved to the stable style in this +release: + +- Fix comment handling when parenthesising conditional expressions (#4134) +- Fix bug where spaces were not added around parenthesized walruses in subscripts, + unlike other binary operators (#4109) +- Remove empty lines before docstrings in async functions (#4132) +- Address a missing case in the change to allow empty lines at the beginning of all + blocks, except immediately before a docstring (#4130) +- For stubs, fix logic to enforce empty line after nested classes with bodies (#4141) + +### Preview style + +- Add `--unstable` style, covering preview features that have known problems that would + block them from going into the stable style. Also add the `--enable-unstable-feature` + flag; for example, use + `--enable-unstable-feature hug_parens_with_braces_and_square_brackets` to apply this + preview feature throughout 2024, even if a later Black release downgrades the feature + to unstable (#4096) +- Format module docstrings the same as class and function docstrings (#4095) +- Fix crash when using a walrus in a dictionary (#4155) +- Fix unnecessary parentheses when wrapping long dicts (#4135) +- Stop normalizing spaces before `# fmt: skip` comments (#4146) + +### Configuration + +- Print warning when configuration in `pyproject.toml` contains an invalid key (#4165) +- Fix symlink handling, properly ignoring symlinks that point outside of root (#4161) +- Fix cache mtime logic that resulted in false positive cache hits (#4128) +- Remove the long-deprecated `--experimental-string-processing` flag. This feature can + currently be enabled with `--preview --enable-unstable-feature string_processing`. + (#4096) + +### Integrations + +- Revert the change to run Black's pre-commit integration only on specific git hooks + (#3940) for better compatibility with older versions of pre-commit (#4137) + +## 23.12.1 + +### Packaging + +- Fixed a bug that included dependencies from the `d` extra by default (#4108) + +## 23.12.0 + +### Highlights + +It's almost 2024, which means it's time for a new edition of _Black_'s stable style! +Together with this release, we'll put out an alpha release 24.1a1 showcasing the draft +2024 stable style, which we'll finalize in the January release. Please try it out and +[share your feedback](https://github.com/psf/black/issues/4042). + +This release (23.12.0) will still produce the 2023 style. Most but not all of the +changes in `--preview` mode will be in the 2024 stable style. + +### Stable style + +- Fix bug where `# fmt: off` automatically dedents when used with the `--line-ranges` + option, even when it is not within the specified line range. (#4084) +- Fix feature detection for parenthesized context managers (#4104) + +### Preview style + +- Prefer more equal signs before a break when splitting chained assignments (#4010) +- Standalone form feed characters at the module level are no longer removed (#4021) +- Additional cases of immediately nested tuples, lists, and dictionaries are now + indented less (#4012) +- Allow empty lines at the beginning of all blocks, except immediately before a + docstring (#4060) +- Fix crash in preview mode when using a short `--line-length` (#4086) +- Keep suites consisting of only an ellipsis on their own lines if they are not + functions or class definitions (#4066) (#4103) + +### Configuration + +- `--line-ranges` now skips _Black_'s internal stability check in `--safe` mode. This + avoids a crash on rare inputs that have many unformatted same-content lines. (#4034) + +### Packaging + +- Upgrade to mypy 1.7.1 (#4049) (#4069) +- Faster compiled wheels are now available for CPython 3.12 (#4070) + +### Integrations + +- Enable 3.12 CI (#4035) +- Build docker images in parallel (#4054) +- Build docker images with 3.12 (#4055) + +## 23.11.0 + +### Highlights + +- Support formatting ranges of lines with the new `--line-ranges` command-line option + (#4020) + +### Stable style + +- Fix crash on formatting bytes strings that look like docstrings (#4003) +- Fix crash when whitespace followed a backslash before newline in a docstring (#4008) +- Fix standalone comments inside complex blocks crashing Black (#4016) +- Fix crash on formatting code like `await (a ** b)` (#3994) +- No longer treat leading f-strings as docstrings. This matches Python's behaviour and + fixes a crash (#4019) + +### Preview style + +- Multiline dicts and lists that are the sole argument to a function are now indented + less (#3964) +- Multiline unpacked dicts and lists as the sole argument to a function are now also + indented less (#3992) +- In f-string debug expressions, quote types that are visible in the final string are + now preserved (#4005) +- Fix a bug where long `case` blocks were not split into multiple lines. Also enable + general trailing comma rules on `case` blocks (#4024) +- Keep requiring two empty lines between module-level docstring and first function or + class definition (#4028) +- Add support for single-line format skip with other comments on the same line (#3959) + +### Configuration + +- Consistently apply force exclusion logic before resolving symlinks (#4015) +- Fix a bug in the matching of absolute path names in `--include` (#3976) + +### Performance + +- Fix mypyc builds on arm64 on macOS (#4017) + +### Integrations + +- Black's pre-commit integration will now run only on git hooks appropriate for a code + formatter (#3940) + +## 23.10.1 + +### Highlights + +- Maintenance release to get a fix out for GitHub Action edge case (#3957) + +### Preview style + +- Fix merging implicit multiline strings that have inline comments (#3956) +- Allow empty first line after block open before a comment or compound statement (#3967) + +### Packaging + +- Change Dockerfile to hatch + compile black (#3965) + +### Integrations + +- The summary output for GitHub workflows is now suppressible using the `summary` + parameter. (#3958) +- Fix the action failing when Black check doesn't pass (#3957) + +### Documentation + +- It is known Windows documentation CI is broken + https://github.com/psf/black/issues/3968 + +## 23.10.0 + +### Stable style + +- Fix comments getting removed from inside parenthesized strings (#3909) + +### Preview style + +- Fix long lines with power operators getting split before the line length (#3942) +- Long type hints are now wrapped in parentheses and properly indented when split across + multiple lines (#3899) +- Magic trailing commas are now respected in return types. (#3916) +- Require one empty line after module-level docstrings. (#3932) +- Treat raw triple-quoted strings as docstrings (#3947) + +### Configuration + +- Fix cache versioning logic when `BLACK_CACHE_DIR` is set (#3937) + +### Parser + +- Fix bug where attributes named `type` were not accepted inside `match` statements + (#3950) +- Add support for PEP 695 type aliases containing lambdas and other unusual expressions + (#3949) + +### Output + +- Black no longer attempts to provide special errors for attempting to format Python 2 + code (#3933) +- Black will more consistently print stacktraces on internal errors in verbose mode + (#3938) + +### Integrations + +- The action output displayed in the job summary is now wrapped in Markdown (#3914) + +## 23.9.1 + +Due to various issues, the previous release (23.9.0) did not include compiled mypyc +wheels, which make Black significantly faster. These issues have now been fixed, and +this release should come with compiled wheels once again. + +There will be no wheels for Python 3.12 due to a bug in mypyc. We will provide 3.12 +wheels in a future release as soon as the mypyc bug is fixed. + +### Packaging + +- Upgrade to mypy 1.5.1 (#3864) + +### Performance + +- Store raw tuples instead of NamedTuples in Black's cache, improving performance and + decreasing the size of the cache (#3877) + +## 23.9.0 + +### Preview style + +- More concise formatting for dummy implementations (#3796) +- In stub files, add a blank line between a statement with a body (e.g an + `if sys.version_info > (3, x):`) and a function definition on the same level (#3862) +- Fix a bug whereby spaces were removed from walrus operators within subscript(#3823) + +### Configuration + +- Black now applies exclusion and ignore logic before resolving symlinks (#3846) + +### Performance + +- Avoid importing `IPython` if notebook cells do not contain magics (#3782) +- Improve caching by comparing file hashes as fallback for mtime and size (#3821) + +### _Blackd_ + +- Fix an issue in `blackd` with single character input (#3558) + +### Integrations + +- Black now has an + [official pre-commit mirror](https://github.com/psf/black-pre-commit-mirror). Swapping + `https://github.com/psf/black` to `https://github.com/psf/black-pre-commit-mirror` in + your `.pre-commit-config.yaml` will make Black about 2x faster (#3828) +- The `.black.env` folder specified by `ENV_PATH` will now be removed on the completion + of the GitHub Action (#3759) + +## 23.7.0 + +### Highlights + +- Runtime support for Python 3.7 has been removed. Formatting 3.7 code will still be + supported until further notice (#3765) + +### Stable style + +- Fix a bug where an illegal trailing comma was added to return type annotations using + PEP 604 unions (#3735) +- Fix several bugs and crashes where comments in stub files were removed or mishandled + under some circumstances (#3745) +- Fix a crash with multi-line magic comments like `type: ignore` within parentheses + (#3740) +- Fix error in AST validation when _Black_ removes trailing whitespace in a type comment + (#3773) + +### Preview style + +- Implicitly concatenated strings used as function args are no longer wrapped inside + parentheses (#3640) +- Remove blank lines between a class definition and its docstring (#3692) + +### Configuration + +- The `--workers` argument to _Black_ can now be specified via the `BLACK_NUM_WORKERS` + environment variable (#3743) +- `.pytest_cache`, `.ruff_cache` and `.vscode` are now excluded by default (#3691) +- Fix _Black_ not honouring `pyproject.toml` settings when running `--stdin-filename` + and the `pyproject.toml` found isn't in the current working directory (#3719) +- _Black_ will now error if `exclude` and `extend-exclude` have invalid data types in + `pyproject.toml`, instead of silently doing the wrong thing (#3764) + +### Packaging + +- Upgrade mypyc from 0.991 to 1.3 (#3697) +- Remove patching of Click that mitigated errors on Python 3.6 with `LANG=C` (#3768) + +### Parser + +- Add support for the new PEP 695 syntax in Python 3.12 (#3703) + +### Performance + +- Speed up _Black_ significantly when the cache is full (#3751) +- Avoid importing `IPython` in a case where we wouldn't need it (#3748) + +### Output + +- Use aware UTC datetimes internally, avoids deprecation warning on Python 3.12 (#3728) +- Change verbose logging to exactly mirror _Black_'s logic for source discovery (#3749) + +### _Blackd_ + +- The `blackd` argument parser now shows the default values for options in their help + text (#3712) + +### Integrations + +- Black is now tested with + [`PYTHONWARNDEFAULTENCODING = 1`](https://docs.python.org/3/library/io.html#io-encoding-warning) + (#3763) +- Update GitHub Action to display black output in the job summary (#3688) + +### Documentation + +- Add a CITATION.cff file to the root of the repository, containing metadata on how to + cite this software (#3723) +- Update the _classes_ and _exceptions_ documentation in Developer reference to match + the latest code base (#3755) + +## 23.3.0 + +### Highlights + +This release fixes a longstanding confusing behavior in Black's GitHub action, where the +version of the action did not determine the version of Black being run (issue #3382). In +addition, there is a small bug fix around imports and a number of improvements to the +preview style. + +Please try out the +[preview style](https://black.readthedocs.io/en/stable/the_black_code_style/future_style.html#preview-style) +with `black --preview` and tell us your feedback. All changes in the preview style are +expected to become part of Black's stable style in January 2024. + +### Stable style + +- Import lines with `# fmt: skip` and `# fmt: off` no longer have an extra blank line + added when they are right after another import line (#3610) + +### Preview style + +- Add trailing commas to collection literals even if there's a comment after the last + entry (#3393) +- `async def`, `async for`, and `async with` statements are now formatted consistently + compared to their non-async version. (#3609) +- `with` statements that contain two context managers will be consistently wrapped in + parentheses (#3589) +- Let string splitters respect [East Asian Width](https://www.unicode.org/reports/tr11/) + (#3445) +- Now long string literals can be split after East Asian commas and periods (`、` U+3001 + IDEOGRAPHIC COMMA, `。` U+3002 IDEOGRAPHIC FULL STOP, & `,` U+FF0C FULLWIDTH COMMA) + besides before spaces (#3445) +- For stubs, enforce one blank line after a nested class with a body other than just + `...` (#3564) +- Improve handling of multiline strings by changing line split behavior (#1879) + +### Parser + +- Added support for formatting files with invalid type comments (#3594) + +### Integrations + +- Update GitHub Action to use the version of Black equivalent to action's version if + version input is not specified (#3543) +- Fix missing Python binary path in autoload script for vim (#3508) + +### Documentation + +- Document that only the most recent release is supported for security issues; + vulnerabilities should be reported through Tidelift (#3612) + +## 23.1.0 + +### Highlights + +This is the first release of 2023, and following our +[stability policy](https://black.readthedocs.io/en/stable/the_black_code_style/index.html#stability-policy), +it comes with a number of improvements to our stable style, including improvements to +empty line handling, removal of redundant parentheses in several contexts, and output +that highlights implicitly concatenated strings better. + +There are also many changes to the preview style; try out `black --preview` and give us +feedback to help us set the stable style for next year. + +In addition to style changes, Black now automatically infers the supported Python +versions from your `pyproject.toml` file, removing the need to set Black's target +versions separately. + +### Stable style + +- Introduce the 2023 stable style, which incorporates most aspects of last year's + preview style (#3418). Specific changes: + - Enforce empty lines before classes and functions with sticky leading comments + (#3302) (22.12.0) + - Reformat empty and whitespace-only files as either an empty file (if no newline is + present) or as a single newline character (if a newline is present) (#3348) + (22.12.0) + - Implicitly concatenated strings used as function args are now wrapped inside + parentheses (#3307) (22.12.0) + - Correctly handle trailing commas that are inside a line's leading non-nested parens + (#3370) (22.12.0) + - `--skip-string-normalization` / `-S` now prevents docstring prefixes from being + normalized as expected (#3168) (since 22.8.0) + - When using `--skip-magic-trailing-comma` or `-C`, trailing commas are stripped from + subscript expressions with more than 1 element (#3209) (22.8.0) + - Implicitly concatenated strings inside a list, set, or tuple are now wrapped inside + parentheses (#3162) (22.8.0) + - Fix a string merging/split issue when a comment is present in the middle of + implicitly concatenated strings on its own line (#3227) (22.8.0) + - Docstring quotes are no longer moved if it would violate the line length limit + (#3044, #3430) (22.6.0) + - Parentheses around return annotations are now managed (#2990) (22.6.0) + - Remove unnecessary parentheses around awaited objects (#2991) (22.6.0) + - Remove unnecessary parentheses in `with` statements (#2926) (22.6.0) + - Remove trailing newlines after code block open (#3035) (22.6.0) + - Code cell separators `#%%` are now standardised to `# %%` (#2919) (22.3.0) + - Remove unnecessary parentheses from `except` statements (#2939) (22.3.0) + - Remove unnecessary parentheses from tuple unpacking in `for` loops (#2945) (22.3.0) + - Avoid magic-trailing-comma in single-element subscripts (#2942) (22.3.0) +- Fix a crash when a colon line is marked between `# fmt: off` and `# fmt: on` (#3439) + +### Preview style + +- Format hex codes in unicode escape sequences in string literals (#2916) +- Add parentheses around `if`-`else` expressions (#2278) +- Improve performance on large expressions that contain many strings (#3467) +- Fix a crash in preview style with assert + parenthesized string (#3415) +- Fix crashes in preview style with walrus operators used in function return annotations + and except clauses (#3423) +- Fix a crash in preview advanced string processing where mixed implicitly concatenated + regular and f-strings start with an empty span (#3463) +- Fix a crash in preview advanced string processing where a standalone comment is placed + before a dict's value (#3469) +- Fix an issue where extra empty lines are added when a decorator has `# fmt: skip` + applied or there is a standalone comment between decorators (#3470) +- Do not put the closing quotes in a docstring on a separate line, even if the line is + too long (#3430) +- Long values in dict literals are now wrapped in parentheses; correspondingly + unnecessary parentheses around short values in dict literals are now removed; long + string lambda values are now wrapped in parentheses (#3440) +- Fix two crashes in preview style involving edge cases with docstrings (#3451) +- Exclude string type annotations from improved string processing; fix crash when the + return type annotation is stringified and spans across multiple lines (#3462) +- Wrap multiple context managers in parentheses when targeting Python 3.9+ (#3489) +- Fix several crashes in preview style with walrus operators used in `with` statements + or tuples (#3473) +- Fix an invalid quote escaping bug in f-string expressions where it produced invalid + code. Implicitly concatenated f-strings with different quotes can now be merged or + quote-normalized by changing the quotes used in expressions. (#3509) +- Fix crash on `await (yield)` when Black is compiled with mypyc (#3533) + +### Configuration + +- Black now tries to infer its `--target-version` from the project metadata specified in + `pyproject.toml` (#3219) + +### Packaging + +- Upgrade mypyc from `0.971` to `0.991` so mypycified _Black_ can be built on armv7 + (#3380) + - This also fixes some crashes while using compiled Black with a debug build of + CPython +- Drop specific support for the `tomli` requirement on 3.11 alpha releases, working + around a bug that would cause the requirement not to be installed on any non-final + Python releases (#3448) +- Black now depends on `packaging` version `22.0` or later. This is required for new + functionality that needs to parse part of the project metadata (#3219) + +### Output + +- Calling `black --help` multiple times will return the same help contents each time + (#3516) +- Verbose logging now shows the values of `pyproject.toml` configuration variables + (#3392) +- Fix false symlink detection messages in verbose output due to using an incorrect + relative path to the project root (#3385) + +### Integrations + +- Move 3.11 CI to normal flow now that all dependencies support 3.11 (#3446) +- Docker: Add new `latest_prerelease` tag automation to follow latest black alpha + release on docker images (#3465) + +### Documentation + +- Expand `vim-plug` installation instructions to offer more explicit options (#3468) + +## 22.12.0 + +### Preview style + +- Enforce empty lines before classes and functions with sticky leading comments (#3302) +- Reformat empty and whitespace-only files as either an empty file (if no newline is + present) or as a single newline character (if a newline is present) (#3348) +- Implicitly concatenated strings used as function args are now wrapped inside + parentheses (#3307) +- For assignment statements, prefer splitting the right hand side if the left hand side + fits on a single line (#3368) +- Correctly handle trailing commas that are inside a line's leading non-nested parens + (#3370) + +### Configuration + +- Fix incorrectly applied `.gitignore` rules by considering the `.gitignore` location + and the relative path to the target file (#3338) +- Fix incorrectly ignoring `.gitignore` presence when more than one source directory is + specified (#3336) + +### Parser + +- Parsing support has been added for walruses inside generator expression that are + passed as function args (for example, + `any(match := my_re.match(text) for text in texts)`) (#3327). + +### Integrations + +- Vim plugin: Optionally allow using the system installation of Black via + `let g:black_use_virtualenv = 0`(#3309) + +## 22.10.0 + +### Highlights + +- Runtime support for Python 3.6 has been removed. Formatting 3.6 code will still be + supported until further notice. + +### Stable style + +- Fix a crash when `# fmt: on` is used on a different block level than `# fmt: off` + (#3281) + +### Preview style + +- Fix a crash when formatting some dicts with parenthesis-wrapped long string keys + (#3262) + +### Configuration + +- `.ipynb_checkpoints` directories are now excluded by default (#3293) +- Add `--skip-source-first-line` / `-x` option to ignore the first line of source code + while formatting (#3299) + +### Packaging + +- Executables made with PyInstaller will no longer crash when formatting several files + at once on macOS. Native x86-64 executables for macOS are available once again. + (#3275) +- Hatchling is now used as the build backend. This will not have any effect for users + who install Black with its wheels from PyPI. (#3233) +- Faster compiled wheels are now available for CPython 3.11 (#3276) + +### _Blackd_ + +- Windows style (CRLF) newlines will be preserved (#3257). + +### Integrations + +- Vim plugin: add flag (`g:black_preview`) to enable/disable the preview style (#3246) +- Update GitHub Action to support formatting of Jupyter Notebook files via a `jupyter` + option (#3282) +- Update GitHub Action to support use of version specifiers (e.g. `<23`) for Black + version (#3265) + +## 22.8.0 + +### Highlights + +- Python 3.11 is now supported, except for _blackd_ as aiohttp does not support 3.11 as + of publishing (#3234) +- This is the last release that supports running _Black_ on Python 3.6 (formatting 3.6 + code will continue to be supported until further notice) +- Reword the stability policy to say that we may, in rare cases, make changes that + affect code that was not previously formatted by _Black_ (#3155) + +### Stable style + +- Fix an infinite loop when using `# fmt: on/off` in the middle of an expression or code + block (#3158) +- Fix incorrect handling of `# fmt: skip` on colon (`:`) lines (#3148) +- Comments are no longer deleted when a line had spaces removed around power operators + (#2874) + +### Preview style + +- Single-character closing docstring quotes are no longer moved to their own line as + this is invalid. This was a bug introduced in version 22.6.0. (#3166) +- `--skip-string-normalization` / `-S` now prevents docstring prefixes from being + normalized as expected (#3168) +- When using `--skip-magic-trailing-comma` or `-C`, trailing commas are stripped from + subscript expressions with more than 1 element (#3209) +- Implicitly concatenated strings inside a list, set, or tuple are now wrapped inside + parentheses (#3162) +- Fix a string merging/split issue when a comment is present in the middle of implicitly + concatenated strings on its own line (#3227) + +### _Blackd_ + +- `blackd` now supports enabling the preview style via the `X-Preview` header (#3217) + +### Configuration + +- Black now uses the presence of debug f-strings to detect target version (#3215) +- Fix misdetection of project root and verbose logging of sources in cases involving + `--stdin-filename` (#3216) +- Immediate `.gitignore` files in source directories given on the command line are now + also respected, previously only `.gitignore` files in the project root and + automatically discovered directories were respected (#3237) + +### Documentation + +- Recommend using BlackConnect in IntelliJ IDEs (#3150) + +### Integrations + +- Vim plugin: prefix messages with `Black: ` so it's clear they come from Black (#3194) +- Docker: changed to a /opt/venv installation + added to PATH to be available to + non-root users (#3202) + +### Output + +- Change from deprecated `asyncio.get_event_loop()` to create our event loop which + removes DeprecationWarning (#3164) +- Remove logging from internal `blib2to3` library since it regularly emits error logs + about failed caching that can and should be ignored (#3193) + +### Parser + +- Type comments are now included in the AST equivalence check consistently so accidental + deletion raises an error. Though type comments can't be tracked when running on PyPy + 3.7 due to standard library limitations. (#2874) + +### Performance + +- Reduce Black's startup time when formatting a single file by 15-30% (#3211) + +## 22.6.0 + +### Style + +- Fix unstable formatting involving `#fmt: skip` and `# fmt:skip` comments (notice the + lack of spaces) (#2970) + +### Preview style + +- Docstring quotes are no longer moved if it would violate the line length limit (#3044) +- Parentheses around return annotations are now managed (#2990) +- Remove unnecessary parentheses around awaited objects (#2991) +- Remove unnecessary parentheses in `with` statements (#2926) +- Remove trailing newlines after code block open (#3035) + +### Integrations + +- Add `scripts/migrate-black.py` script to ease introduction of Black to a Git project + (#3038) + +### Output + +- Output Python version and implementation as part of `--version` flag (#2997) + +### Packaging + +- Use `tomli` instead of `tomllib` on Python 3.11 builds where `tomllib` is not + available (#2987) + +### Parser + +- [PEP 654](https://peps.python.org/pep-0654/#except) syntax (for example, + `except *ExceptionGroup:`) is now supported (#3016) +- [PEP 646](https://peps.python.org/pep-0646) syntax (for example, + `Array[Batch, *Shape]` or `def fn(*args: *T) -> None`) is now supported (#3071) + +### Vim Plugin + +- Fix `strtobool` function. It didn't parse true/on/false/off. (#3025) + +## 22.3.0 + +### Preview style + +- Code cell separators `#%%` are now standardised to `# %%` (#2919) +- Remove unnecessary parentheses from `except` statements (#2939) +- Remove unnecessary parentheses from tuple unpacking in `for` loops (#2945) +- Avoid magic-trailing-comma in single-element subscripts (#2942) + +### Configuration + +- Do not format `__pypackages__` directories by default (#2836) +- Add support for specifying stable version with `--required-version` (#2832). +- Avoid crashing when the user has no homedir (#2814) +- Avoid crashing when md5 is not available (#2905) +- Fix handling of directory junctions on Windows (#2904) + +### Documentation + +- Update pylint config documentation (#2931) + +### Integrations + +- Move test to disable plugin in Vim/Neovim, which speeds up loading (#2896) + +### Output + +- In verbose mode, log when _Black_ is using user-level config (#2861) + +### Packaging + +- Fix Black to work with Click 8.1.0 (#2966) +- On Python 3.11 and newer, use the standard library's `tomllib` instead of `tomli` + (#2903) +- `black-primer`, the deprecated internal devtool, has been removed and copied to a + [separate repository](https://github.com/cooperlees/black-primer) (#2924) + +### Parser + +- Black can now parse starred expressions in the target of `for` and `async for` + statements, e.g `for item in *items_1, *items_2: pass` (#2879). + +## 22.1.0 + +At long last, _Black_ is no longer a beta product! This is the first non-beta release +and the first release covered by our new +[stability policy](https://black.readthedocs.io/en/stable/the_black_code_style/index.html#stability-policy). + +### Highlights + +- **Remove Python 2 support** (#2740) +- Introduce the `--preview` flag (#2752) + +### Style + +- Deprecate `--experimental-string-processing` and move the functionality under + `--preview` (#2789) +- For stubs, one blank line between class attributes and methods is now kept if there's + at least one pre-existing blank line (#2736) +- Black now normalizes string prefix order (#2297) +- Remove spaces around power operators if both operands are simple (#2726) +- Work around bug that causes unstable formatting in some cases in the presence of the + magic trailing comma (#2807) +- Use parentheses for attribute access on decimal float and int literals (#2799) +- Don't add whitespace for attribute access on hexadecimal, binary, octal, and complex + literals (#2799) +- Treat blank lines in stubs the same inside top-level `if` statements (#2820) +- Fix unstable formatting with semicolons and arithmetic expressions (#2817) +- Fix unstable formatting around magic trailing comma (#2572) + +### Parser + +- Fix mapping cases that contain as-expressions, like `case {"key": 1 | 2 as password}` + (#2686) +- Fix cases that contain multiple top-level as-expressions, like `case 1 as a, 2 as b` + (#2716) +- Fix call patterns that contain as-expressions with keyword arguments, like + `case Foo(bar=baz as quux)` (#2749) +- Tuple unpacking on `return` and `yield` constructs now implies 3.8+ (#2700) +- Unparenthesized tuples on annotated assignments (e.g + `values: Tuple[int, ...] = 1, 2, 3`) now implies 3.8+ (#2708) +- Fix handling of standalone `match()` or `case()` when there is a trailing newline or a + comment inside of the parentheses. (#2760) +- `from __future__ import annotations` statement now implies Python 3.7+ (#2690) + +### Performance + +- Speed-up the new backtracking parser about 4X in general (enabled when + `--target-version` is set to 3.10 and higher). (#2728) +- _Black_ is now compiled with [mypyc](https://github.com/mypyc/mypyc) for an overall 2x + speed-up. 64-bit Windows, MacOS, and Linux (not including musl) are supported. (#1009, + #2431) + +### Configuration + +- Do not accept bare carriage return line endings in pyproject.toml (#2408) +- Add configuration option (`python-cell-magics`) to format cells with custom magics in + Jupyter Notebooks (#2744) +- Allow setting custom cache directory on all platforms with environment variable + `BLACK_CACHE_DIR` (#2739). +- Enable Python 3.10+ by default, without any extra need to specify + `--target-version=py310`. (#2758) +- Make passing `SRC` or `--code` mandatory and mutually exclusive (#2804) + +### Output + +- Improve error message for invalid regular expression (#2678) +- Improve error message when parsing fails during AST safety check by embedding the + underlying SyntaxError (#2693) +- No longer color diff headers white as it's unreadable in light themed terminals + (#2691) +- Text coloring added in the final statistics (#2712) +- Verbose mode also now describes how a project root was discovered and which paths will + be formatted. (#2526) + +### Packaging + +- All upper version bounds on dependencies have been removed (#2718) +- `typing-extensions` is no longer a required dependency in Python 3.10+ (#2772) +- Set `click` lower bound to `8.0.0` (#2791) + +### Integrations + +- Update GitHub action to support containerized runs (#2748) + +### Documentation + +- Change protocol in pip installation instructions to `https://` (#2761) +- Change HTML theme to Furo primarily for its responsive design and mobile support + (#2793) +- Deprecate the `black-primer` tool (#2809) +- Document Python support policy (#2819) + +## 21.12b0 + +### _Black_ + +- Fix determination of f-string expression spans (#2654) +- Fix bad formatting of error messages about EOF in multi-line statements (#2343) +- Functions and classes in blocks now have more consistent surrounding spacing (#2472) + +#### Jupyter Notebook support + +- Cell magics are now only processed if they are known Python cell magics. Earlier, all + cell magics were tokenized, leading to possible indentation errors e.g. with + `%%writefile`. (#2630) +- Fix assignment to environment variables in Jupyter Notebooks (#2642) + +#### Python 3.10 support + +- Point users to using `--target-version py310` if we detect 3.10-only syntax (#2668) +- Fix `match` statements with open sequence subjects, like `match a, b:` or + `match a, *b:` (#2639) (#2659) +- Fix `match`/`case` statements that contain `match`/`case` soft keywords multiple + times, like `match re.match()` (#2661) +- Fix `case` statements with an inline body (#2665) +- Fix styling of starred expressions inside `match` subject (#2667) +- Fix parser error location on invalid syntax in a `match` statement (#2649) +- Fix Python 3.10 support on platforms without ProcessPoolExecutor (#2631) +- Improve parsing performance on code that uses `match` under `--target-version py310` + up to ~50% (#2670) + +### Packaging + +- Remove dependency on `regex` (#2644) (#2663) + +## 21.11b1 + +### _Black_ + +- Bumped regex version minimum to 2021.4.4 to fix Pattern class usage (#2621) + +## 21.11b0 + +### _Black_ + +- Warn about Python 2 deprecation in more cases by improving Python 2 only syntax + detection (#2592) +- Add experimental PyPy support (#2559) +- Add partial support for the match statement. As it's experimental, it's only enabled + when `--target-version py310` is explicitly specified (#2586) +- Add support for parenthesized with (#2586) +- Declare support for Python 3.10 for running Black (#2562) + +### Integrations + +- Fixed vim plugin with Python 3.10 by removing deprecated distutils import (#2610) +- The vim plugin now parses `skip_magic_trailing_comma` from pyproject.toml (#2613) + +## 21.10b0 + +### _Black_ + +- Document stability policy, that will apply for non-beta releases (#2529) +- Add new `--workers` parameter (#2514) +- Fixed feature detection for positional-only arguments in lambdas (#2532) +- Bumped typed-ast version minimum to 1.4.3 for 3.10 compatibility (#2519) +- Fixed a Python 3.10 compatibility issue where the loop argument was still being passed + even though it has been removed (#2580) +- Deprecate Python 2 formatting support (#2523) + +### _Blackd_ + +- Remove dependency on aiohttp-cors (#2500) +- Bump required aiohttp version to 3.7.4 (#2509) + +### _Black-Primer_ + +- Add primer support for --projects (#2555) +- Print primer summary after individual failures (#2570) + +### Integrations + +- Allow to pass `target_version` in the vim plugin (#1319) +- Install build tools in docker file and use multi-stage build to keep the image size + down (#2582) + +## 21.9b0 + +### Packaging + +- Fix missing modules in self-contained binaries (#2466) +- Fix missing toml extra used during installation (#2475) + +## 21.8b0 + +### _Black_ + +- Add support for formatting Jupyter Notebook files (#2357) +- Move from `appdirs` dependency to `platformdirs` (#2375) +- Present a more user-friendly error if .gitignore is invalid (#2414) +- The failsafe for accidentally added backslashes in f-string expressions has been + hardened to handle more edge cases during quote normalization (#2437) +- Avoid changing a function return type annotation's type to a tuple by adding a + trailing comma (#2384) +- Parsing support has been added for unparenthesized walruses in set literals, set + comprehensions, and indices (#2447). +- Pin `setuptools-scm` build-time dependency version (#2457) +- Exclude typing-extensions version 3.10.0.1 due to it being broken on Python 3.10 + (#2460) + +### _Blackd_ + +- Replace sys.exit(-1) with raise ImportError as it plays more nicely with tools that + scan installed packages (#2440) + +### Integrations + +- The provided pre-commit hooks no longer specify `language_version` to avoid overriding + `default_language_version` (#2430) + +## 21.7b0 + +### _Black_ + +- Configuration files using TOML features higher than spec v0.5.0 are now supported + (#2301) +- Add primer support and test for code piped into black via STDIN (#2315) +- Fix internal error when `FORCE_OPTIONAL_PARENTHESES` feature is enabled (#2332) +- Accept empty stdin (#2346) +- Provide a more useful error when parsing fails during AST safety checks (#2304) + +### Docker + +- Add new `latest_release` tag automation to follow latest black release on docker + images (#2374) + +### Integrations + +- The vim plugin now searches upwards from the directory containing the current buffer + instead of the current working directory for pyproject.toml. (#1871) +- The vim plugin now reads the correct string normalization option in pyproject.toml + (#1869) +- The vim plugin no longer crashes Black when there's boolean values in pyproject.toml + (#1869) + +## 21.6b0 + +### _Black_ + +- Fix failure caused by `fmt: skip` and indentation (#2281) +- Account for += assignment when deciding whether to split string (#2312) +- Correct max string length calculation when there are string operators (#2292) +- Fixed option usage when using the `--code` flag (#2259) +- Do not call `uvloop.install()` when _Black_ is used as a library (#2303) +- Added `--required-version` option to require a specific version to be running (#2300) +- Fix incorrect custom breakpoint indices when string group contains fake f-strings + (#2311) +- Fix regression where `R` prefixes would be lowercased for docstrings (#2285) +- Fix handling of named escapes (`\N{...}`) when `--experimental-string-processing` is + used (#2319) + +### Integrations + +- The official Black action now supports choosing what version to use, and supports the + major 3 OSes. (#1940) + +## 21.5b2 + +### _Black_ + +- A space is no longer inserted into empty docstrings (#2249) +- Fix handling of .gitignore files containing non-ASCII characters on Windows (#2229) +- Respect `.gitignore` files in all levels, not only `root/.gitignore` file (apply + `.gitignore` rules like `git` does) (#2225) +- Restored compatibility with Click 8.0 on Python 3.6 when LANG=C used (#2227) +- Add extra uvloop install + import support if in python env (#2258) +- Fix --experimental-string-processing crash when matching parens are not found (#2283) +- Make sure to split lines that start with a string operator (#2286) +- Fix regular expression that black uses to identify f-expressions (#2287) + +### _Blackd_ + +- Add a lower bound for the `aiohttp-cors` dependency. Only 0.4.0 or higher is + supported. (#2231) + +### Packaging + +- Release self-contained x86_64 MacOS binaries as part of the GitHub release pipeline + (#2198) +- Always build binaries with the latest available Python (#2260) + +### Documentation + +- Add discussion of magic comments to FAQ page (#2272) +- `--experimental-string-processing` will be enabled by default in the future (#2273) +- Fix typos discovered by codespell (#2228) +- Fix Vim plugin installation instructions. (#2235) +- Add new Frequently Asked Questions page (#2247) +- Fix encoding + symlink issues preventing proper build on Windows (#2262) + +## 21.5b1 + +### _Black_ + +- Refactor `src/black/__init__.py` into many files (#2206) + +### Documentation + +- Replaced all remaining references to the + [`master`](https://github.com/psf/black/tree/main) branch with the + [`main`](https://github.com/psf/black/tree/main) branch. Some additional changes in + the source code were also made. (#2210) +- Significantly reorganized the documentation to make much more sense. Check them out by + heading over to [the stable docs on RTD](https://black.readthedocs.io/en/stable/). + (#2174) + +## 21.5b0 + +### _Black_ + +- Set `--pyi` mode if `--stdin-filename` ends in `.pyi` (#2169) +- Stop detecting target version as Python 3.9+ with pre-PEP-614 decorators that are + being called but with no arguments (#2182) + +### _Black-Primer_ + +- Add `--no-diff` to black-primer to suppress formatting changes (#2187) + +## 21.4b2 + +### _Black_ + +- Fix crash if the user configuration directory is inaccessible. (#2158) + +- Clarify + [circumstances](https://github.com/psf/black/blob/master/docs/the_black_code_style.md#pragmatism) + in which _Black_ may change the AST (#2159) + +- Allow `.gitignore` rules to be overridden by specifying `exclude` in `pyproject.toml` + or on the command line. (#2170) + +### _Packaging_ + +- Install `primer.json` (used by `black-primer` by default) with black. (#2154) + +## 21.4b1 + +### _Black_ + +- Fix crash on docstrings ending with "\\ ". (#2142) + +- Fix crash when atypical whitespace is cleaned out of dostrings (#2120) + +- Reflect the `--skip-magic-trailing-comma` and `--experimental-string-processing` flags + in the name of the cache file. Without this fix, changes in these flags would not take + effect if the cache had already been populated. (#2131) + +- Don't remove necessary parentheses from assignment expression containing assert / + return statements. (#2143) + +### _Packaging_ + +- Bump pathspec to >= 0.8.1 to solve invalid .gitignore exclusion handling + +## 21.4b0 + +### _Black_ + +- Fixed a rare but annoying formatting instability created by the combination of + optional trailing commas inserted by `Black` and optional parentheses looking at + pre-existing "magic" trailing commas. This fixes issue #1629 and all of its many many + duplicates. (#2126) + +- `Black` now processes one-line docstrings by stripping leading and trailing spaces, + and adding a padding space when needed to break up """". (#1740) + +- `Black` now cleans up leading non-breaking spaces in comments (#2092) + +- `Black` now respects `--skip-string-normalization` when normalizing multiline + docstring quotes (#1637) + +- `Black` no longer removes all empty lines between non-function code and decorators + when formatting typing stubs. Now `Black` enforces a single empty line. (#1646) + +- `Black` no longer adds an incorrect space after a parenthesized assignment expression + in if/while statements (#1655) + +- Added `--skip-magic-trailing-comma` / `-C` to avoid using trailing commas as a reason + to split lines (#1824) + +- fixed a crash when PWD=/ on POSIX (#1631) + +- fixed "I/O operation on closed file" when using --diff (#1664) + +- Prevent coloured diff output being interleaved with multiple files (#1673) + +- Added support for PEP 614 relaxed decorator syntax on python 3.9 (#1711) + +- Added parsing support for unparenthesized tuples and yield expressions in annotated + assignments (#1835) + +- added `--extend-exclude` argument (PR #2005) + +- speed up caching by avoiding pathlib (#1950) + +- `--diff` correctly indicates when a file doesn't end in a newline (#1662) + +- Added `--stdin-filename` argument to allow stdin to respect `--force-exclude` rules + (#1780) + +- Lines ending with `fmt: skip` will now be not formatted (#1800) + +- PR #2053: Black no longer relies on typed-ast for Python 3.8 and higher + +- PR #2053: Python 2 support is now optional, install with + `python3 -m pip install black[python2]` to maintain support. + +- Exclude `venv` directory by default (#1683) + +- Fixed "Black produced code that is not equivalent to the source" when formatting + Python 2 docstrings (#2037) + +### _Packaging_ + +- Self-contained native _Black_ binaries are now provided for releases via GitHub + Releases (#1743) + +## 20.8b1 + +### _Packaging_ + +- explicitly depend on Click 7.1.2 or newer as `Black` no longer works with versions + older than 7.0 + +## 20.8b0 + +### _Black_ + +- re-implemented support for explicit trailing commas: now it works consistently within + any bracket pair, including nested structures (#1288 and duplicates) + +- `Black` now reindents docstrings when reindenting code around it (#1053) + +- `Black` now shows colored diffs (#1266) + +- `Black` is now packaged using 'py3' tagged wheels (#1388) + +- `Black` now supports Python 3.8 code, e.g. star expressions in return statements + (#1121) + +- `Black` no longer normalizes capital R-string prefixes as those have a + community-accepted meaning (#1244) + +- `Black` now uses exit code 2 when specified configuration file doesn't exit (#1361) + +- `Black` now works on AWS Lambda (#1141) + +- added `--force-exclude` argument (#1032) + +- removed deprecated `--py36` option (#1236) + +- fixed `--diff` output when EOF is encountered (#526) + +- fixed `# fmt: off` handling around decorators (#560) + +- fixed unstable formatting with some `# type: ignore` comments (#1113) + +- fixed invalid removal on organizing brackets followed by indexing (#1575) + +- introduced `black-primer`, a CI tool that allows us to run regression tests against + existing open source users of Black (#1402) + +- introduced property-based fuzzing to our test suite based on Hypothesis and + Hypothersmith (#1566) + +- implemented experimental and disabled by default long string rewrapping (#1132), + hidden under a `--experimental-string-processing` flag while it's being worked on; + this is an undocumented and unsupported feature, you lose Internet points for + depending on it (#1609) + +### Vim plugin + +- prefer virtualenv packages over global packages (#1383) + +## 19.10b0 + +- added support for PEP 572 assignment expressions (#711) + +- added support for PEP 570 positional-only arguments (#943) + +- added support for async generators (#593) + +- added support for pre-splitting collections by putting an explicit trailing comma + inside (#826) + +- added `black -c` as a way to format code passed from the command line (#761) + +- --safe now works with Python 2 code (#840) + +- fixed grammar selection for Python 2-specific code (#765) + +- fixed feature detection for trailing commas in function definitions and call sites + (#763) + +- `# fmt: off`/`# fmt: on` comment pairs placed multiple times within the same block of + code now behave correctly (#1005) + +- _Black_ no longer crashes on Windows machines with more than 61 cores (#838) + +- _Black_ no longer crashes on standalone comments prepended with a backslash (#767) + +- _Black_ no longer crashes on `from` ... `import` blocks with comments (#829) + +- _Black_ no longer crashes on Python 3.7 on some platform configurations (#494) + +- _Black_ no longer fails on comments in from-imports (#671) + +- _Black_ no longer fails when the file starts with a backslash (#922) + +- _Black_ no longer merges regular comments with type comments (#1027) + +- _Black_ no longer splits long lines that contain type comments (#997) + +- removed unnecessary parentheses around `yield` expressions (#834) + +- added parentheses around long tuples in unpacking assignments (#832) + +- added parentheses around complex powers when they are prefixed by a unary operator + (#646) + +- fixed bug that led _Black_ format some code with a line length target of 1 (#762) + +- _Black_ no longer introduces quotes in f-string subexpressions on string boundaries + (#863) + +- if _Black_ puts parenthesis around a single expression, it moves comments to the + wrapped expression instead of after the brackets (#872) + +- `blackd` now returns the version of _Black_ in the response headers (#1013) + +- `blackd` can now output the diff of formats on source code when the `X-Diff` header is + provided (#969) + +## 19.3b0 + +- new option `--target-version` to control which Python versions _Black_-formatted code + should target (#618) + +- deprecated `--py36` (use `--target-version=py36` instead) (#724) + +- _Black_ no longer normalizes numeric literals to include `_` separators (#696) + +- long `del` statements are now split into multiple lines (#698) + +- type comments are no longer mangled in function signatures + +- improved performance of formatting deeply nested data structures (#509) + +- _Black_ now properly formats multiple files in parallel on Windows (#632) + +- _Black_ now creates cache files atomically which allows it to be used in parallel + pipelines (like `xargs -P8`) (#673) + +- _Black_ now correctly indents comments in files that were previously formatted with + tabs (#262) + +- `blackd` now supports CORS (#622) + +## 18.9b0 + +- numeric literals are now formatted by _Black_ (#452, #461, #464, #469): + + - numeric literals are normalized to include `_` separators on Python 3.6+ code + + - added `--skip-numeric-underscore-normalization` to disable the above behavior and + leave numeric underscores as they were in the input + + - code with `_` in numeric literals is recognized as Python 3.6+ + + - most letters in numeric literals are lowercased (e.g., in `1e10`, `0x01`) + + - hexadecimal digits are always uppercased (e.g. `0xBADC0DE`) + +- added `blackd`, see + [its documentation](https://github.com/psf/black/blob/18.9b0/README.md#blackd) for + more info (#349) + +- adjacent string literals are now correctly split into multiple lines (#463) + +- trailing comma is now added to single imports that don't fit on a line (#250) + +- cache is now populated when `--check` is successful for a file which speeds up + consecutive checks of properly formatted unmodified files (#448) + +- whitespace at the beginning of the file is now removed (#399) + +- fixed mangling [pweave](http://mpastell.com/pweave/) and + [Spyder IDE](https://www.spyder-ide.org/) special comments (#532) + +- fixed unstable formatting when unpacking big tuples (#267) + +- fixed parsing of `__future__` imports with renames (#389) + +- fixed scope of `# fmt: off` when directly preceding `yield` and other nodes (#385) + +- fixed formatting of lambda expressions with default arguments (#468) + +- fixed `async for` statements: _Black_ no longer breaks them into separate lines (#372) + +- note: the Vim plugin stopped registering `,=` as a default chord as it turned out to + be a bad idea (#415) + +## 18.6b4 + +- hotfix: don't freeze when multiple comments directly precede `# fmt: off` (#371) + +## 18.6b3 + +- typing stub files (`.pyi`) now have blank lines added after constants (#340) + +- `# fmt: off` and `# fmt: on` are now much more dependable: + + - they now work also within bracket pairs (#329) + + - they now correctly work across function/class boundaries (#335) + + - they now work when an indentation block starts with empty lines or misaligned + comments (#334) + +- made Click not fail on invalid environments; note that Click is right but the + likelihood we'll need to access non-ASCII file paths when dealing with Python source + code is low (#277) + +- fixed improper formatting of f-strings with quotes inside interpolated expressions + (#322) + +- fixed unnecessary slowdown when long list literals where found in a file + +- fixed unnecessary slowdown on AST nodes with very many siblings + +- fixed cannibalizing backslashes during string normalization + +- fixed a crash due to symbolic links pointing outside of the project directory (#338) + +## 18.6b2 + +- added `--config` (#65) + +- added `-h` equivalent to `--help` (#316) + +- fixed improper unmodified file caching when `-S` was used + +- fixed extra space in string unpacking (#305) + +- fixed formatting of empty triple quoted strings (#313) + +- fixed unnecessary slowdown in comment placement calculation on lines without comments + +## 18.6b1 + +- hotfix: don't output human-facing information on stdout (#299) + +- hotfix: don't output cake emoji on non-zero return code (#300) + +## 18.6b0 + +- added `--include` and `--exclude` (#270) + +- added `--skip-string-normalization` (#118) + +- added `--verbose` (#283) + +- the header output in `--diff` now actually conforms to the unified diff spec + +- fixed long trivial assignments being wrapped in unnecessary parentheses (#273) + +- fixed unnecessary parentheses when a line contained multiline strings (#232) + +- fixed stdin handling not working correctly if an old version of Click was used (#276) + +- _Black_ now preserves line endings when formatting a file in place (#258) + +## 18.5b1 + +- added `--pyi` (#249) + +- added `--py36` (#249) + +- Python grammar pickle caches are stored with the formatting caches, making _Black_ + work in environments where site-packages is not user-writable (#192) + +- _Black_ now enforces a PEP 257 empty line after a class-level docstring (and/or + fields) and the first method + +- fixed invalid code produced when standalone comments were present in a trailer that + was omitted from line splitting on a large expression (#237) + +- fixed optional parentheses being removed within `# fmt: off` sections (#224) + +- fixed invalid code produced when stars in very long imports were incorrectly wrapped + in optional parentheses (#234) + +- fixed unstable formatting when inline comments were moved around in a trailer that was + omitted from line splitting on a large expression (#238) + +- fixed extra empty line between a class declaration and the first method if no class + docstring or fields are present (#219) + +- fixed extra empty line between a function signature and an inner function or inner + class (#196) + +## 18.5b0 + +- call chains are now formatted according to the + [fluent interfaces](https://en.wikipedia.org/wiki/Fluent_interface) style (#67) + +- data structure literals (tuples, lists, dictionaries, and sets) are now also always + exploded like imports when they don't fit in a single line (#152) + +- slices are now formatted according to PEP 8 (#178) + +- parentheses are now also managed automatically on the right-hand side of assignments + and return statements (#140) + +- math operators now use their respective priorities for delimiting multiline + expressions (#148) + +- optional parentheses are now omitted on expressions that start or end with a bracket + and only contain a single operator (#177) + +- empty parentheses in a class definition are now removed (#145, #180) + +- string prefixes are now standardized to lowercase and `u` is removed on Python 3.6+ + only code and Python 2.7+ code with the `unicode_literals` future import (#188, #198, + #199) + +- typing stub files (`.pyi`) are now formatted in a style that is consistent with PEP + 484 (#207, #210) + +- progress when reformatting many files is now reported incrementally + +- fixed trailers (content with brackets) being unnecessarily exploded into their own + lines after a dedented closing bracket (#119) + +- fixed an invalid trailing comma sometimes left in imports (#185) + +- fixed non-deterministic formatting when multiple pairs of removable parentheses were + used (#183) + +- fixed multiline strings being unnecessarily wrapped in optional parentheses in long + assignments (#215) + +- fixed not splitting long from-imports with only a single name + +- fixed Python 3.6+ file discovery by also looking at function calls with unpacking. + This fixed non-deterministic formatting if trailing commas where used both in function + signatures with stars and function calls with stars but the former would be + reformatted to a single line. + +- fixed crash on dealing with optional parentheses (#193) + +- fixed "is", "is not", "in", and "not in" not considered operators for splitting + purposes + +- fixed crash when dead symlinks where encountered + +## 18.4a4 + +- don't populate the cache on `--check` (#175) + +## 18.4a3 + +- added a "cache"; files already reformatted that haven't changed on disk won't be + reformatted again (#109) + +- `--check` and `--diff` are no longer mutually exclusive (#149) + +- generalized star expression handling, including double stars; this fixes + multiplication making expressions "unsafe" for trailing commas (#132) + +- _Black_ no longer enforces putting empty lines behind control flow statements (#90) + +- _Black_ now splits imports like "Mode 3 + trailing comma" of isort (#127) + +- fixed comment indentation when a standalone comment closes a block (#16, #32) + +- fixed standalone comments receiving extra empty lines if immediately preceding a + class, def, or decorator (#56, #154) + +- fixed `--diff` not showing entire path (#130) + +- fixed parsing of complex expressions after star and double stars in function calls + (#2) + +- fixed invalid splitting on comma in lambda arguments (#133) + +- fixed missing splits of ternary expressions (#141) + +## 18.4a2 + +- fixed parsing of unaligned standalone comments (#99, #112) + +- fixed placement of dictionary unpacking inside dictionary literals (#111) + +- Vim plugin now works on Windows, too + +- fixed unstable formatting when encountering unnecessarily escaped quotes in a string + (#120) + +## 18.4a1 + +- added `--quiet` (#78) + +- added automatic parentheses management (#4) + +- added [pre-commit](https://pre-commit.com) integration (#103, #104) + +- fixed reporting on `--check` with multiple files (#101, #102) + +- fixed removing backslash escapes from raw strings (#100, #105) + +## 18.4a0 + +- added `--diff` (#87) + +- add line breaks before all delimiters, except in cases like commas, to better comply + with PEP 8 (#73) + +- standardize string literals to use double quotes (almost) everywhere (#75) + +- fixed handling of standalone comments within nested bracketed expressions; _Black_ + will no longer produce super long lines or put all standalone comments at the end of + the expression (#22) + +- fixed 18.3a4 regression: don't crash and burn on empty lines with trailing whitespace + (#80) + +- fixed 18.3a4 regression: `# yapf: disable` usage as trailing comment would cause + _Black_ to not emit the rest of the file (#95) + +- when CTRL+C is pressed while formatting many files, _Black_ no longer freaks out with + a flurry of asyncio-related exceptions + +- only allow up to two empty lines on module level and only single empty lines within + functions (#74) + +## 18.3a4 + +- `# fmt: off` and `# fmt: on` are implemented (#5) + +- automatic detection of deprecated Python 2 forms of print statements and exec + statements in the formatted file (#49) + +- use proper spaces for complex expressions in default values of typed function + arguments (#60) + +- only return exit code 1 when --check is used (#50) + +- don't remove single trailing commas from square bracket indexing (#59) + +- don't omit whitespace if the previous factor leaf wasn't a math operator (#55) + +- omit extra space in kwarg unpacking if it's the first argument (#46) + +- omit extra space in + [Sphinx auto-attribute comments](http://www.sphinx-doc.org/en/stable/ext/autodoc.html#directive-autoattribute) + (#68) + +## 18.3a3 + +- don't remove single empty lines outside of bracketed expressions (#19) + +- added ability to pipe formatting from stdin to stdin (#25) + +- restored ability to format code with legacy usage of `async` as a name (#20, #42) + +- even better handling of numpy-style array indexing (#33, again) + +## 18.3a2 + +- changed positioning of binary operators to occur at beginning of lines instead of at + the end, following + [a recent change to PEP 8](https://github.com/python/peps/commit/c59c4376ad233a62ca4b3a6060c81368bd21e85b) + (#21) + +- ignore empty bracket pairs while splitting. This avoids very weirdly looking + formattings (#34, #35) + +- remove a trailing comma if there is a single argument to a call + +- if top level functions were separated by a comment, don't put four empty lines after + the upper function + +- fixed unstable formatting of newlines with imports + +- fixed unintentional folding of post scriptum standalone comments into last statement + if it was a simple statement (#18, #28) + +- fixed missing space in numpy-style array indexing (#33) + +- fixed spurious space after star-based unary expressions (#31) + +## 18.3a1 + +- added `--check` + +- only put trailing commas in function signatures and calls if it's safe to do so. If + the file is Python 3.6+ it's always safe, otherwise only safe if there are no `*args` + or `**kwargs` used in the signature or call. (#8) + +- fixed invalid spacing of dots in relative imports (#6, #13) + +- fixed invalid splitting after comma on unpacked variables in for-loops (#23) + +- fixed spurious space in parenthesized set expressions (#7) + +- fixed spurious space after opening parentheses and in default arguments (#14, #17) + +- fixed spurious space after unary operators when the operand was a complex expression + (#15) + +## 18.3a0 + +- first published version, Happy 🍰 Day 2018! + +- alpha quality + +- date-versioned (see: ) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black-24.4.2.dist-info/RECORD b/.direnv/python-3.8.20/lib/python3.8/site-packages/black-24.4.2.dist-info/RECORD new file mode 100644 index 000000000..ccf186559 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black-24.4.2.dist-info/RECORD @@ -0,0 +1,86 @@ +../../../bin/black,sha256=ctkOQKSqa2EjRjTldK3XEni6_3IM6HdoMNjSyyX21xM,280 +../../../bin/blackd,sha256=X_O26ckxHxuXfiagr5_-Vuc_K6T5Lr1W-4IvjfuI8rI,281 +_black_version.py,sha256=sVEDQEx6fAtLwbQSaty477mqXtjh32UVgbeV3rPRIvo,19 +30fcd23745efe32ce681__mypyc.cpython-38-x86_64-linux-gnu.so,sha256=4o90jeP6htJA9J46jSNR1plR4ISrK2yIdJPvV5w6quc,4579488 +black-24.4.2.dist-info/METADATA,sha256=oQjtxFmuSYdm-wvhdVesYICa3TdzPHG2HwKvqnc41l8,77124 +black-24.4.2.dist-info/entry_points.txt,sha256=XTCA4X2yVA0tMiV7l96Gv9TyxhVhoCaznLN2XThqYSA,144 +black-24.4.2.dist-info/WHEEL,sha256=_Q_gFGhs7mmF9xXv5VpPM4buASqaj9BBu_zXDicWedU,144 +black-24.4.2.dist-info/licenses/LICENSE,sha256=nAQo8MO0d5hQz1vZbhGqqK_HLUqG1KNiI9erouWNbgA,1080 +black-24.4.2.dist-info/licenses/AUTHORS.md,sha256=8drxTtCp41j9z9NFJ9U37R1m9qL0zwTMELvgHFFkwao,8092 +black/comments.py,sha256=9CBGkhZ2s-T1A-Y0CnHoRocQK1R1T4RlllCRxyxp_-U,15983 +black/debug.py,sha256=HmpJna5KhwLpgQkqEGR2FX6GRmJ2tFh4Jl6X2aMR3ak,1906 +black/strings.py,sha256=9S1jnAXm8iDlN-gSlBFN8A7QWu8fxtysX6DBhWcwEek,13172 +black/schema.py,sha256=PiSQ5HSrqFmduqC_2_-T_-5GQ8Fr-IDYG6kbyiC_OyY,617 +black/brackets.cpython-38-x86_64-linux-gnu.so,sha256=569esNae5WNFv_jb39VPajg3Au9CcZqmW6KeHAj9OjA,8256 +black/handle_ipynb_magics.py,sha256=eAULCQCmQRbgluSRu5jFQNtILMpXnjZWNoOVQLiaEJ8,13364 +black/brackets.py,sha256=vSDU10V3cCzAqYO-XE9pztRUx49AlafLRCECApzGzsw,12426 +black/report.py,sha256=igkNi8iR5FqSa1sXddS-HnoqW7Cq7PveCmFCfd-pN6w,3452 +black/linegen.py,sha256=CLXgK9ck7b06MAcgH3egSiEcWurzNWn5Hx0SkBzgoGo,68966 +black/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +black/output.py,sha256=hhk6746lZBMbGuJ1VLgjl_Jrn7Yvb24ZBFaCfpO_ijM,3939 +black/cache.py,sha256=5wXB2noRzTIfp4Fm_XFu2GyX3kxcHDzwVtwKv4UY2BM,4835 +black/_width_table.cpython-38-x86_64-linux-gnu.so,sha256=x2msiRlQlKXnjXo6PZW1rFs4QZpneTZfQGKDG6S09Yw,8264 +black/schema.cpython-38-x86_64-linux-gnu.so,sha256=OI-ealI0FkvpX9cxzrndpDJncyVBD2CElY384hjG7N0,8256 +black/parsing.cpython-38-x86_64-linux-gnu.so,sha256=0DTBt0C9J5ZqBkvG0HF_tjzOXE2qVB5NkpD2nb6XtTo,8256 +black/ranges.py,sha256=EHKl3_aswNmDw4AMq9GZ3PVvBo42nC0RU5ZVMt4RdzU,19695 +black/numerics.cpython-38-x86_64-linux-gnu.so,sha256=0DOzRTMTp3eq2a-oi9cUbbO2flLM423m3ih6PW3gxnw,8256 +black/comments.cpython-38-x86_64-linux-gnu.so,sha256=e5GM16yzBbEwkSzKUMzVmJ2x6u-Qf-H7M7lnwib7te8,8256 +black/handle_ipynb_magics.cpython-38-x86_64-linux-gnu.so,sha256=62-SxxNXRKye5RTzEL6jDJ8_37wjTOuP1rRP4eu6_E8,8280 +black/lines.cpython-38-x86_64-linux-gnu.so,sha256=3lo_jaUyfCVAFS5gyVL8EmS6l95mAEnlTc_9FQ0Rvto,8256 +black/nodes.py,sha256=yKfYbhW6vxU6HUebZP5v9xSTEx2WWZ4HRenpvhlo9lA,29742 +black/parsing.py,sha256=R1ActEaqZWI-UrwNTSn3FVuBDQWl674somlnIqC_hSQ,8428 +black/concurrency.py,sha256=oyFRSg5wisTi2dNLKig6Cm7R2uHWVwkfLbI6pCZRNCo,6410 +black/const.cpython-38-x86_64-linux-gnu.so,sha256=V06fj9l8jNTKDQ7EBdg658PiCDujFX8uykTHhrcmnFI,8256 +black/trans.py,sha256=soTQm8RpSKR9nNePsK5a4iocTaE0Dwciw70iRKqz8a8,95476 +black/nodes.cpython-38-x86_64-linux-gnu.so,sha256=49Zdv0iUlL6U5tgMh5KMYU5dugHeku5mOGidMvkaM74,8256 +black/__init__.py,sha256=XV5ft9pudPRwoZDq9dH2Ytz4X-Ggi8_AzPu-tWMrp7U,51931 +black/mode.py,sha256=jcyx0eH4_sGCpAffl-hEEQSlo6miJRdaIdhWG7Jao8s,9313 +black/__init__.cpython-38-x86_64-linux-gnu.so,sha256=NG-VK9eGHD4F-TB6xOOCeuY7PKPl0798eT9mJDwuG-0,8248 +black/rusty.py,sha256=4LKo3KTUWYZ2cN6QKmwwZVbsCNt2fpu5lzue2V-uxIA,557 +black/ranges.cpython-38-x86_64-linux-gnu.so,sha256=aKbuGsIGHgxNMoexaMlfvtud457BVPMJK51CPidCRbI,8256 +black/trans.cpython-38-x86_64-linux-gnu.so,sha256=JXIAxuDvvDPc2uvIwVXvBUBzrlVUnZHK8Ftqn4eMPOE,8256 +black/numerics.py,sha256=xRGnTSdMVbTaA9IGechc8JM-cIuJGCc326s71hkXJIw,1655 +black/__main__.py,sha256=mogeA4o9zt4w-ufKvaQjSEhtSgQkcMVLK9ChvdB5wH8,47 +black/files.py,sha256=xZzXfmDf2geWZnjoOW-KyY6BZieUw9jLYm86pzdYW0Y,14665 +black/_width_table.py,sha256=2lSnE4s_nVXXfIj9hP2qWASqX8I003WxBM5xPnelDrQ,10761 +black/mode.cpython-38-x86_64-linux-gnu.so,sha256=iN8xRiU-grZHqWXQJOTE_a_pjibyQAj2Gup5lY-YAXg,8248 +black/lines.py,sha256=ncrM1J5V_soPNe3ZKC8EsProWBtglAM3i-5iwkWu_l8,39492 +black/linegen.cpython-38-x86_64-linux-gnu.so,sha256=rGm3hlreiEyrFsnSjQYFDM0xywjE3fCi5zpQT0A1xr4,8256 +black/cache.cpython-38-x86_64-linux-gnu.so,sha256=B3GGX9v-F8xAm-gLiLBqt2I1ezW5x8o2YEJldluK8wM,8256 +black/rusty.cpython-38-x86_64-linux-gnu.so,sha256=e9T0Sci_rgJQXPrJ86yfLNWJNKeHX7SzzcgFkYStk2A,8256 +black/strings.cpython-38-x86_64-linux-gnu.so,sha256=so6_7YkVWwMV8XzTCoVV5VkeOFBnAqqMV6MFhx0-x3g,8256 +black/const.py,sha256=U7cDnhWljmrieOtPBUdO2Vcz69J_VXB6-Br94wuCVuo,321 +black/resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +black/resources/__init__.cpython-38-x86_64-linux-gnu.so,sha256=GDSRnqaFDk3V7Nsuh4JMyvwkQ5eRIi67t8dmxlsSbEQ,8264 +black/resources/black.schema.json,sha256=Y6N2HBCLAsfchkg52rQsBBRJhEeQ4BVFp_tQGLc7Jik,7452 +blackd/middlewares.py,sha256=QS7cs86Ojuaqh64dGneimhJ-f30rDI646c27ts4Dwh0,1585 +blackd/__init__.py,sha256=Xqqesi8iCAtLvZSQ-DDahAi6xZAxUthexvFKC-8zFvg,8867 +blackd/__main__.py,sha256=L4xAcDh1K5zb6SsJB102AewW2G13P9-w2RiEwuFj8WA,37 +blib2to3/LICENSE,sha256=V4mIG4rrnJH1g19bt8q-hKD-zUuyvi9UyeaVenjseZ0,12762 +blib2to3/PatternGrammar.txt,sha256=7lul2ztnIqDi--JWDrwciD5yMo75w7TaHHxdHMZJvOM,793 +blib2to3/pytree.py,sha256=mYKS_Py2zDhT815ZvlDy9kkRQ6PanRJIhR6R6AiBBIk,32666 +blib2to3/pygram.py,sha256=l2qw7mw8I533KGWAXUFCXPGCN5F66hvYg86g-EA9GEg,4915 +blib2to3/Grammar.txt,sha256=zjM1rSC9GJjnboYyRDZyKx2IPWDkscVdodwQpDCs4So,11700 +blib2to3/pygram.cpython-38-x86_64-linux-gnu.so,sha256=zWCEzvJDVfsSonrI4ADuCy9EmfVwXgQ7Gt0l6Ah3g6U,8256 +blib2to3/__init__.py,sha256=9_8wL9Scv8_Cs8HJyJHGvx1vwXErsuvlsAqNZLcJQR0,8 +blib2to3/README,sha256=QYZYIfb1NXTTYqDV4kn8oRcNG_qlTFYH1sr3V1h65ko,1074 +blib2to3/pytree.cpython-38-x86_64-linux-gnu.so,sha256=FCIltgVp5CoMXK3cYrg-EWdWHEkm4pRsH8m3M3JDMEs,8256 +blib2to3/pgen2/token.py,sha256=Xubq2lWE3gQ7PdUikcTL6_AWDk1OWhyw_Eb9xue4XJU,1899 +blib2to3/pgen2/conv.cpython-38-x86_64-linux-gnu.so,sha256=_9XoyjyAS2XriXxCKmuNGAh8LcrNz5j9qLHGm5-oM0k,8256 +blib2to3/pgen2/literals.py,sha256=_LyRryELzqarFkW3OAEZzZ-yppCTm9g0mjqqQ2XygKE,1614 +blib2to3/pgen2/pgen.py,sha256=iQH8W999TKUT5AhuOpW38ZynwSACkVNV-I6z8kyQozY,15428 +blib2to3/pgen2/tokenize.cpython-38-x86_64-linux-gnu.so,sha256=Q26M36qIGvvWZybUCxMRdFwbe2c1AEOv6p3xbyC0XXQ,8264 +blib2to3/pgen2/tokenize.py,sha256=gnep0kccRh-kMUj13jWahGyuDm1DAizpJ89YFkTzyKQ,41353 +blib2to3/pgen2/driver.cpython-38-x86_64-linux-gnu.so,sha256=21pNH9tOz_ObfjgCwfZD8FSVNp278Nct2TnEMC50eVc,8264 +blib2to3/pgen2/conv.py,sha256=vH8a_gkalWRNxuNPRxkoigw8_UobdHHSw-PyUcUuH8I,9587 +blib2to3/pgen2/driver.py,sha256=hvycxhuy9IwXGs21qVKigEo74QGV_M1ZIPB9mFXjN58,10832 +blib2to3/pgen2/parse.py,sha256=mIGZ9BxV_G7jAjcwWefKomNGB8CfUPXqjZKQxH_fuuc,15658 +blib2to3/pgen2/parse.cpython-38-x86_64-linux-gnu.so,sha256=qghi-CW7hZJITIk4jh5Du442U4ZBzw3A9V0zwq2kiIA,8264 +blib2to3/pgen2/__init__.py,sha256=hY6w9QUzvTvRb-MoFfd_q_7ZLt6IUHC2yxWCfsZupQA,143 +blib2to3/pgen2/grammar.py,sha256=6-wFBNM1I2rsnzfDfWeRNjfNYh4Raj_fb8cZX_FByUU,6865 +blib2to3/pgen2/token.cpython-38-x86_64-linux-gnu.so,sha256=PCU4ttGHn9IxC5dNuQA6vMEf9O9IKrfMfSUr_pkCSbc,8264 +blib2to3/pgen2/literals.cpython-38-x86_64-linux-gnu.so,sha256=F1bPEHijdLUJS6YFzYpXI4CnsceYQObxeSbDtWEWusc,8264 +blib2to3/pgen2/pgen.cpython-38-x86_64-linux-gnu.so,sha256=rY5cth8x1SMRESTGsodUECp1bkkRoKzRJCVNbmPJnhw,8256 +blib2to3/pgen2/grammar.cpython-38-x86_64-linux-gnu.so,sha256=G1wIBmOtDaQLhf0c-M92Ekd6NL9B95erOTNPOTyF2Gk,8264 +black-24.4.2.dist-info/INSTALLER,sha256=sz0Tnp99msIbbLB51q7U9nA6AvRKcsOeUKhrHH0Ulg4,12 +black-24.4.2.dist-info/RECORD,, diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black-24.4.2.dist-info/WHEEL b/.direnv/python-3.8.20/lib/python3.8/site-packages/black-24.4.2.dist-info/WHEEL new file mode 100644 index 000000000..c86a47347 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black-24.4.2.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.24.2 +Root-Is-Purelib: false +Tag: cp38-cp38-manylinux_2_17_x86_64 +Tag: cp38-cp38-manylinux2014_x86_64 + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black-24.4.2.dist-info/entry_points.txt b/.direnv/python-3.8.20/lib/python3.8/site-packages/black-24.4.2.dist-info/entry_points.txt new file mode 100644 index 000000000..864da49be --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black-24.4.2.dist-info/entry_points.txt @@ -0,0 +1,6 @@ +[console_scripts] +black = black:patched_main +blackd = blackd:patched_main [d] + +[validate_pyproject.tool_schema] +black = black.schema:get_schema diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black-24.4.2.dist-info/licenses/AUTHORS.md b/.direnv/python-3.8.20/lib/python3.8/site-packages/black-24.4.2.dist-info/licenses/AUTHORS.md new file mode 100644 index 000000000..e0511bb9b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black-24.4.2.dist-info/licenses/AUTHORS.md @@ -0,0 +1,196 @@ +# Authors + +Glued together by [Łukasz Langa](mailto:lukasz@langa.pl). + +Maintained with: + +- [Carol Willing](mailto:carolcode@willingconsulting.com) +- [Carl Meyer](mailto:carl@oddbird.net) +- [Jelle Zijlstra](mailto:jelle.zijlstra@gmail.com) +- [Mika Naylor](mailto:mail@autophagy.io) +- [Zsolt Dollenstein](mailto:zsol.zsol@gmail.com) +- [Cooper Lees](mailto:me@cooperlees.com) +- [Richard Si](mailto:sichard26@gmail.com) +- [Felix Hildén](mailto:felix.hilden@gmail.com) +- [Batuhan Taskaya](mailto:batuhan@python.org) +- [Shantanu Jain](mailto:hauntsaninja@gmail.com) + +Multiple contributions by: + +- [Abdur-Rahmaan Janhangeer](mailto:arj.python@gmail.com) +- [Adam Johnson](mailto:me@adamj.eu) +- [Adam Williamson](mailto:adamw@happyassassin.net) +- [Alexander Huynh](mailto:ahrex-gh-psf-black@e.sc) +- [Alexandr Artemyev](mailto:mogost@gmail.com) +- [Alex Vandiver](mailto:github@chmrr.net) +- [Allan Simon](mailto:allan.simon@supinfo.com) +- Anders-Petter Ljungquist +- [Amethyst Reese](mailto:amy@n7.gg) +- [Andrew Thorp](mailto:andrew.thorp.dev@gmail.com) +- [Andrew Zhou](mailto:andrewfzhou@gmail.com) +- [Andrey](mailto:dyuuus@yandex.ru) +- [Andy Freeland](mailto:andy@andyfreeland.net) +- [Anthony Sottile](mailto:asottile@umich.edu) +- [Antonio Ossa Guerra](mailto:aaossa+black@uc.cl) +- [Arjaan Buijk](mailto:arjaan.buijk@gmail.com) +- [Arnav Borbornah](mailto:arnavborborah11@gmail.com) +- [Artem Malyshev](mailto:proofit404@gmail.com) +- [Asger Hautop Drewsen](mailto:asgerdrewsen@gmail.com) +- [Augie Fackler](mailto:raf@durin42.com) +- [Aviskar KC](mailto:aviskarkc10@gmail.com) +- Batuhan Taşkaya +- [Benjamin Wohlwend](mailto:bw@piquadrat.ch) +- [Benjamin Woodruff](mailto:github@benjam.info) +- [Bharat Raghunathan](mailto:bharatraghunthan9767@gmail.com) +- [Brandt Bucher](mailto:brandtbucher@gmail.com) +- [Brett Cannon](mailto:brett@python.org) +- [Bryan Bugyi](mailto:bryan.bugyi@rutgers.edu) +- [Bryan Forbes](mailto:bryan@reigndropsfall.net) +- [Calum Lind](mailto:calumlind@gmail.com) +- [Charles](mailto:peacech@gmail.com) +- Charles Reid +- [Christian Clauss](mailto:cclauss@bluewin.ch) +- [Christian Heimes](mailto:christian@python.org) +- [Chuck Wooters](mailto:chuck.wooters@microsoft.com) +- [Chris Rose](mailto:offline@offby1.net) +- Codey Oxley +- [Cong](mailto:congusbongus@gmail.com) +- [Cooper Ry Lees](mailto:me@cooperlees.com) +- [Dan Davison](mailto:dandavison7@gmail.com) +- [Daniel Hahler](mailto:github@thequod.de) +- [Daniel M. Capella](mailto:polycitizen@gmail.com) +- Daniele Esposti +- [David Hotham](mailto:david.hotham@metaswitch.com) +- [David Lukes](mailto:dafydd.lukes@gmail.com) +- [David Szotten](mailto:davidszotten@gmail.com) +- [Denis Laxalde](mailto:denis@laxalde.org) +- [Douglas Thor](mailto:dthor@transphormusa.com) +- dylanjblack +- [Eli Treuherz](mailto:eli@treuherz.com) +- [Emil Hessman](mailto:emil@hessman.se) +- [Felix Kohlgrüber](mailto:felix.kohlgrueber@gmail.com) +- [Florent Thiery](mailto:fthiery@gmail.com) +- Francisco +- [Giacomo Tagliabue](mailto:giacomo.tag@gmail.com) +- [Greg Gandenberger](mailto:ggandenberger@shoprunner.com) +- [Gregory P. Smith](mailto:greg@krypto.org) +- Gustavo Camargo +- hauntsaninja +- [Hadi Alqattan](mailto:alqattanhadizaki@gmail.com) +- [Hassan Abouelela](mailto:hassan@hassanamr.com) +- [Heaford](mailto:dan@heaford.com) +- [Hugo Barrera](mailto::hugo@barrera.io) +- Hugo van Kemenade +- [Hynek Schlawack](mailto:hs@ox.cx) +- [Ionite](mailto:dev@ionite.io) +- [Ivan Katanić](mailto:ivan.katanic@gmail.com) +- [Jakub Kadlubiec](mailto:jakub.kadlubiec@skyscanner.net) +- [Jakub Warczarek](mailto:jakub.warczarek@gmail.com) +- [Jan Hnátek](mailto:jan.hnatek@gmail.com) +- [Jason Fried](mailto:me@jasonfried.info) +- [Jason Friedland](mailto:jason@friedland.id.au) +- [jgirardet](mailto:ijkl@netc.fr) +- Jim Brännlund +- [Jimmy Jia](mailto:tesrin@gmail.com) +- [Joe Antonakakis](mailto:jma353@cornell.edu) +- [Jon Dufresne](mailto:jon.dufresne@gmail.com) +- [Jonas Obrist](mailto:ojiidotch@gmail.com) +- [Jonty Wareing](mailto:jonty@jonty.co.uk) +- [Jose Nazario](mailto:jose.monkey.org@gmail.com) +- [Joseph Larson](mailto:larson.joseph@gmail.com) +- [Josh Bode](mailto:joshbode@fastmail.com) +- [Josh Holland](mailto:anowlcalledjosh@gmail.com) +- [Joshua Cannon](mailto:joshdcannon@gmail.com) +- [José Padilla](mailto:jpadilla@webapplicate.com) +- [Juan Luis Cano Rodríguez](mailto:hello@juanlu.space) +- [kaiix](mailto:kvn.hou@gmail.com) +- [Katie McLaughlin](mailto:katie@glasnt.com) +- Katrin Leinweber +- [Keith Smiley](mailto:keithbsmiley@gmail.com) +- [Kenyon Ralph](mailto:kenyon@kenyonralph.com) +- [Kevin Kirsche](mailto:Kev.Kirsche+GitHub@gmail.com) +- [Kyle Hausmann](mailto:kyle.hausmann@gmail.com) +- [Kyle Sunden](mailto:sunden@wisc.edu) +- Lawrence Chan +- [Linus Groh](mailto:mail@linusgroh.de) +- [Loren Carvalho](mailto:comradeloren@gmail.com) +- [Luka Sterbic](mailto:luka.sterbic@gmail.com) +- [LukasDrude](mailto:mail@lukas-drude.de) +- Mahmoud Hossam +- Mariatta +- [Matt VanEseltine](mailto:vaneseltine@gmail.com) +- [Matthew Clapp](mailto:itsayellow+dev@gmail.com) +- [Matthew Walster](mailto:matthew@walster.org) +- Max Smolens +- [Michael Aquilina](mailto:michaelaquilina@gmail.com) +- [Michael Flaxman](mailto:michael.flaxman@gmail.com) +- [Michael J. Sullivan](mailto:sully@msully.net) +- [Michael McClimon](mailto:michael@mcclimon.org) +- [Miguel Gaiowski](mailto:miggaiowski@gmail.com) +- [Mike](mailto:roshi@fedoraproject.org) +- [mikehoyio](mailto:mikehoy@gmail.com) +- [Min ho Kim](mailto:minho42@gmail.com) +- [Miroslav Shubernetskiy](mailto:miroslav@miki725.com) +- MomIsBestFriend +- [Nathan Goldbaum](mailto:ngoldbau@illinois.edu) +- [Nathan Hunt](mailto:neighthan.hunt@gmail.com) +- [Neraste](mailto:neraste.herr10@gmail.com) +- [Nikolaus Waxweiler](mailto:madigens@gmail.com) +- [Ofek Lev](mailto:ofekmeister@gmail.com) +- [Osaetin Daniel](mailto:osaetindaniel@gmail.com) +- [otstrel](mailto:otstrel@gmail.com) +- [Pablo Galindo](mailto:Pablogsal@gmail.com) +- [Paul Ganssle](mailto:p.ganssle@gmail.com) +- [Paul Meinhardt](mailto:mnhrdt@gmail.com) +- [Peter Bengtsson](mailto:mail@peterbe.com) +- [Peter Grayson](mailto:pete@jpgrayson.net) +- [Peter Stensmyr](mailto:peter.stensmyr@gmail.com) +- pmacosta +- [Quentin Pradet](mailto:quentin@pradet.me) +- [Ralf Schmitt](mailto:ralf@systemexit.de) +- [Ramón Valles](mailto:mroutis@protonmail.com) +- [Richard Fearn](mailto:richardfearn@gmail.com) +- [Rishikesh Jha](mailto:rishijha424@gmail.com) +- [Rupert Bedford](mailto:rupert@rupertb.com) +- Russell Davis +- [Sagi Shadur](mailto:saroad2@gmail.com) +- [Rémi Verschelde](mailto:rverschelde@gmail.com) +- [Sami Salonen](mailto:sakki@iki.fi) +- [Samuel Cormier-Iijima](mailto:samuel@cormier-iijima.com) +- [Sanket Dasgupta](mailto:sanketdasgupta@gmail.com) +- Sergi +- [Scott Stevenson](mailto:scott@stevenson.io) +- Shantanu +- [shaoran](mailto:shaoran@sakuranohana.org) +- [Shinya Fujino](mailto:shf0811@gmail.com) +- springstan +- [Stavros Korokithakis](mailto:hi@stavros.io) +- [Stephen Rosen](mailto:sirosen@globus.org) +- [Steven M. Vascellaro](mailto:S.Vascellaro@gmail.com) +- [Sunil Kapil](mailto:snlkapil@gmail.com) +- [Sébastien Eustace](mailto:sebastien.eustace@gmail.com) +- [Tal Amuyal](mailto:TalAmuyal@gmail.com) +- [Terrance](mailto:git@terrance.allofti.me) +- [Thom Lu](mailto:thomas.c.lu@gmail.com) +- [Thomas Grainger](mailto:tagrain@gmail.com) +- [Tim Gates](mailto:tim.gates@iress.com) +- [Tim Swast](mailto:swast@google.com) +- [Timo](mailto:timo_tk@hotmail.com) +- Toby Fleming +- [Tom Christie](mailto:tom@tomchristie.com) +- [Tony Narlock](mailto:tony@git-pull.com) +- [Tsuyoshi Hombashi](mailto:tsuyoshi.hombashi@gmail.com) +- [Tushar Chandra](mailto:tusharchandra2018@u.northwestern.edu) +- [Tzu-ping Chung](mailto:uranusjr@gmail.com) +- [Utsav Shah](mailto:ukshah2@illinois.edu) +- utsav-dbx +- vezeli +- [Ville Skyttä](mailto:ville.skytta@iki.fi) +- [Vishwas B Sharma](mailto:sharma.vishwas88@gmail.com) +- [Vlad Emelianov](mailto:volshebnyi@gmail.com) +- [williamfzc](mailto:178894043@qq.com) +- [wouter bolsterlee](mailto:wouter@bolsterl.ee) +- Yazdan +- [Yngve Høiseth](mailto:yngve@hoiseth.net) +- [Yurii Karabas](mailto:1998uriyyo@gmail.com) +- [Zac Hatfield-Dodds](mailto:zac@zhd.dev) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black-24.4.2.dist-info/licenses/LICENSE b/.direnv/python-3.8.20/lib/python3.8/site-packages/black-24.4.2.dist-info/licenses/LICENSE new file mode 100644 index 000000000..7a9b891f7 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black-24.4.2.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2018 Łukasz Langa + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/__init__.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/__init__.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..2c4e73ef0 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/__init__.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/__init__.py new file mode 100644 index 000000000..942f31607 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/__init__.py @@ -0,0 +1,1606 @@ +import io +import json +import platform +import re +import sys +import tokenize +import traceback +from contextlib import contextmanager +from dataclasses import replace +from datetime import datetime, timezone +from enum import Enum +from json.decoder import JSONDecodeError +from pathlib import Path +from typing import ( + Any, + Collection, + Dict, + Generator, + Iterator, + List, + MutableMapping, + Optional, + Pattern, + Sequence, + Set, + Sized, + Tuple, + Union, +) + +import click +from click.core import ParameterSource +from mypy_extensions import mypyc_attr +from pathspec import PathSpec +from pathspec.patterns.gitwildmatch import GitWildMatchPatternError + +from _black_version import version as __version__ +from black.cache import Cache +from black.comments import normalize_fmt_off +from black.const import ( + DEFAULT_EXCLUDES, + DEFAULT_INCLUDES, + DEFAULT_LINE_LENGTH, + STDIN_PLACEHOLDER, +) +from black.files import ( + best_effort_relative_path, + find_project_root, + find_pyproject_toml, + find_user_pyproject_toml, + gen_python_files, + get_gitignore, + parse_pyproject_toml, + path_is_excluded, + resolves_outside_root_or_cannot_stat, + wrap_stream_for_windows, +) +from black.handle_ipynb_magics import ( + PYTHON_CELL_MAGICS, + TRANSFORMED_MAGICS, + jupyter_dependencies_are_installed, + mask_cell, + put_trailing_semicolon_back, + remove_trailing_semicolon, + unmask_cell, +) +from black.linegen import LN, LineGenerator, transform_line +from black.lines import EmptyLineTracker, LinesBlock +from black.mode import FUTURE_FLAG_TO_FEATURE, VERSION_TO_FEATURES, Feature +from black.mode import Mode as Mode # re-exported +from black.mode import Preview, TargetVersion, supports_feature +from black.nodes import STARS, is_number_token, is_simple_decorator_expression, syms +from black.output import color_diff, diff, dump_to_file, err, ipynb_diff, out +from black.parsing import ( # noqa F401 + ASTSafetyError, + InvalidInput, + lib2to3_parse, + parse_ast, + stringify_ast, +) +from black.ranges import ( + adjusted_lines, + convert_unchanged_lines, + parse_line_ranges, + sanitized_lines, +) +from black.report import Changed, NothingChanged, Report +from blib2to3.pgen2 import token +from blib2to3.pytree import Leaf, Node + +COMPILED = Path(__file__).suffix in (".pyd", ".so") + +# types +FileContent = str +Encoding = str +NewLine = str + + +class WriteBack(Enum): + NO = 0 + YES = 1 + DIFF = 2 + CHECK = 3 + COLOR_DIFF = 4 + + @classmethod + def from_configuration( + cls, *, check: bool, diff: bool, color: bool = False + ) -> "WriteBack": + if check and not diff: + return cls.CHECK + + if diff and color: + return cls.COLOR_DIFF + + return cls.DIFF if diff else cls.YES + + +# Legacy name, left for integrations. +FileMode = Mode + + +def read_pyproject_toml( + ctx: click.Context, param: click.Parameter, value: Optional[str] +) -> Optional[str]: + """Inject Black configuration from "pyproject.toml" into defaults in `ctx`. + + Returns the path to a successfully found and read configuration file, None + otherwise. + """ + if not value: + value = find_pyproject_toml( + ctx.params.get("src", ()), ctx.params.get("stdin_filename", None) + ) + if value is None: + return None + + try: + config = parse_pyproject_toml(value) + except (OSError, ValueError) as e: + raise click.FileError( + filename=value, hint=f"Error reading configuration file: {e}" + ) from None + + if not config: + return None + else: + spellcheck_pyproject_toml_keys(ctx, list(config), value) + # Sanitize the values to be Click friendly. For more information please see: + # https://github.com/psf/black/issues/1458 + # https://github.com/pallets/click/issues/1567 + config = { + k: str(v) if not isinstance(v, (list, dict)) else v + for k, v in config.items() + } + + target_version = config.get("target_version") + if target_version is not None and not isinstance(target_version, list): + raise click.BadOptionUsage( + "target-version", "Config key target-version must be a list" + ) + + exclude = config.get("exclude") + if exclude is not None and not isinstance(exclude, str): + raise click.BadOptionUsage("exclude", "Config key exclude must be a string") + + extend_exclude = config.get("extend_exclude") + if extend_exclude is not None and not isinstance(extend_exclude, str): + raise click.BadOptionUsage( + "extend-exclude", "Config key extend-exclude must be a string" + ) + + line_ranges = config.get("line_ranges") + if line_ranges is not None: + raise click.BadOptionUsage( + "line-ranges", "Cannot use line-ranges in the pyproject.toml file." + ) + + default_map: Dict[str, Any] = {} + if ctx.default_map: + default_map.update(ctx.default_map) + default_map.update(config) + + ctx.default_map = default_map + return value + + +def spellcheck_pyproject_toml_keys( + ctx: click.Context, config_keys: List[str], config_file_path: str +) -> None: + invalid_keys: List[str] = [] + available_config_options = {param.name for param in ctx.command.params} + for key in config_keys: + if key not in available_config_options: + invalid_keys.append(key) + if invalid_keys: + keys_str = ", ".join(map(repr, invalid_keys)) + out( + f"Invalid config keys detected: {keys_str} (in {config_file_path})", + fg="red", + ) + + +def target_version_option_callback( + c: click.Context, p: Union[click.Option, click.Parameter], v: Tuple[str, ...] +) -> List[TargetVersion]: + """Compute the target versions from a --target-version flag. + + This is its own function because mypy couldn't infer the type correctly + when it was a lambda, causing mypyc trouble. + """ + return [TargetVersion[val.upper()] for val in v] + + +def enable_unstable_feature_callback( + c: click.Context, p: Union[click.Option, click.Parameter], v: Tuple[str, ...] +) -> List[Preview]: + """Compute the features from an --enable-unstable-feature flag.""" + return [Preview[val] for val in v] + + +def re_compile_maybe_verbose(regex: str) -> Pattern[str]: + """Compile a regular expression string in `regex`. + + If it contains newlines, use verbose mode. + """ + if "\n" in regex: + regex = "(?x)" + regex + compiled: Pattern[str] = re.compile(regex) + return compiled + + +def validate_regex( + ctx: click.Context, + param: click.Parameter, + value: Optional[str], +) -> Optional[Pattern[str]]: + try: + return re_compile_maybe_verbose(value) if value is not None else None + except re.error as e: + raise click.BadParameter(f"Not a valid regular expression: {e}") from None + + +@click.command( + context_settings={"help_option_names": ["-h", "--help"]}, + # While Click does set this field automatically using the docstring, mypyc + # (annoyingly) strips 'em so we need to set it here too. + help="The uncompromising code formatter.", +) +@click.option("-c", "--code", type=str, help="Format the code passed in as a string.") +@click.option( + "-l", + "--line-length", + type=int, + default=DEFAULT_LINE_LENGTH, + help="How many characters per line to allow.", + show_default=True, +) +@click.option( + "-t", + "--target-version", + type=click.Choice([v.name.lower() for v in TargetVersion]), + callback=target_version_option_callback, + multiple=True, + help=( + "Python versions that should be supported by Black's output. You should" + " include all versions that your code supports. By default, Black will infer" + " target versions from the project metadata in pyproject.toml. If this does" + " not yield conclusive results, Black will use per-file auto-detection." + ), +) +@click.option( + "--pyi", + is_flag=True, + help=( + "Format all input files like typing stubs regardless of file extension. This" + " is useful when piping source on standard input." + ), +) +@click.option( + "--ipynb", + is_flag=True, + help=( + "Format all input files like Jupyter Notebooks regardless of file extension." + " This is useful when piping source on standard input." + ), +) +@click.option( + "--python-cell-magics", + multiple=True, + help=( + "When processing Jupyter Notebooks, add the given magic to the list" + f" of known python-magics ({', '.join(sorted(PYTHON_CELL_MAGICS))})." + " Useful for formatting cells with custom python magics." + ), + default=[], +) +@click.option( + "-x", + "--skip-source-first-line", + is_flag=True, + help="Skip the first line of the source code.", +) +@click.option( + "-S", + "--skip-string-normalization", + is_flag=True, + help="Don't normalize string quotes or prefixes.", +) +@click.option( + "-C", + "--skip-magic-trailing-comma", + is_flag=True, + help="Don't use trailing commas as a reason to split lines.", +) +@click.option( + "--preview", + is_flag=True, + help=( + "Enable potentially disruptive style changes that may be added to Black's main" + " functionality in the next major release." + ), +) +@click.option( + "--unstable", + is_flag=True, + help=( + "Enable potentially disruptive style changes that have known bugs or are not" + " currently expected to make it into the stable style Black's next major" + " release. Implies --preview." + ), +) +@click.option( + "--enable-unstable-feature", + type=click.Choice([v.name for v in Preview]), + callback=enable_unstable_feature_callback, + multiple=True, + help=( + "Enable specific features included in the `--unstable` style. Requires" + " `--preview`. No compatibility guarantees are provided on the behavior" + " or existence of any unstable features." + ), +) +@click.option( + "--check", + is_flag=True, + help=( + "Don't write the files back, just return the status. Return code 0 means" + " nothing would change. Return code 1 means some files would be reformatted." + " Return code 123 means there was an internal error." + ), +) +@click.option( + "--diff", + is_flag=True, + help=( + "Don't write the files back, just output a diff to indicate what changes" + " Black would've made. They are printed to stdout so capturing them is simple." + ), +) +@click.option( + "--color/--no-color", + is_flag=True, + help="Show (or do not show) colored diff. Only applies when --diff is given.", +) +@click.option( + "--line-ranges", + multiple=True, + metavar="START-END", + help=( + "When specified, Black will try its best to only format these lines. This" + " option can be specified multiple times, and a union of the lines will be" + " formatted. Each range must be specified as two integers connected by a `-`:" + " `-`. The `` and `` integer indices are 1-based and" + " inclusive on both ends." + ), + default=(), +) +@click.option( + "--fast/--safe", + is_flag=True, + help=( + "By default, Black performs an AST safety check after formatting your code." + " The --fast flag turns off this check and the --safe flag explicitly enables" + " it. [default: --safe]" + ), +) +@click.option( + "--required-version", + type=str, + help=( + "Require a specific version of Black to be running. This is useful for" + " ensuring that all contributors to your project are using the same" + " version, because different versions of Black may format code a little" + " differently. This option can be set in a configuration file for consistent" + " results across environments." + ), +) +@click.option( + "--exclude", + type=str, + callback=validate_regex, + help=( + "A regular expression that matches files and directories that should be" + " excluded on recursive searches. An empty value means no paths are excluded." + " Use forward slashes for directories on all platforms (Windows, too)." + " By default, Black also ignores all paths listed in .gitignore. Changing this" + f" value will override all default exclusions. [default: {DEFAULT_EXCLUDES}]" + ), + show_default=False, +) +@click.option( + "--extend-exclude", + type=str, + callback=validate_regex, + help=( + "Like --exclude, but adds additional files and directories on top of the" + " default values instead of overriding them." + ), +) +@click.option( + "--force-exclude", + type=str, + callback=validate_regex, + help=( + "Like --exclude, but files and directories matching this regex will be excluded" + " even when they are passed explicitly as arguments. This is useful when" + " invoking Black programmatically on changed files, such as in a pre-commit" + " hook or editor plugin." + ), +) +@click.option( + "--stdin-filename", + type=str, + is_eager=True, + help=( + "The name of the file when passing it through stdin. Useful to make sure Black" + " will respect the --force-exclude option on some editors that rely on using" + " stdin." + ), +) +@click.option( + "--include", + type=str, + default=DEFAULT_INCLUDES, + callback=validate_regex, + help=( + "A regular expression that matches files and directories that should be" + " included on recursive searches. An empty value means all files are included" + " regardless of the name. Use forward slashes for directories on all platforms" + " (Windows, too). Overrides all exclusions, including from .gitignore and" + " command line options." + ), + show_default=True, +) +@click.option( + "-W", + "--workers", + type=click.IntRange(min=1), + default=None, + help=( + "When Black formats multiple files, it may use a process pool to speed up" + " formatting. This option controls the number of parallel workers. This can" + " also be specified via the BLACK_NUM_WORKERS environment variable. Defaults" + " to the number of CPUs in the system." + ), +) +@click.option( + "-q", + "--quiet", + is_flag=True, + help=( + "Stop emitting all non-critical output. Error messages will still be emitted" + " (which can silenced by 2>/dev/null)." + ), +) +@click.option( + "-v", + "--verbose", + is_flag=True, + help=( + "Emit messages about files that were not changed or were ignored due to" + " exclusion patterns. If Black is using a configuration file, a message" + " detailing which one it is using will be emitted." + ), +) +@click.version_option( + version=__version__, + message=( + f"%(prog)s, %(version)s (compiled: {'yes' if COMPILED else 'no'})\n" + f"Python ({platform.python_implementation()}) {platform.python_version()}" + ), +) +@click.argument( + "src", + nargs=-1, + type=click.Path( + exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True + ), + is_eager=True, + metavar="SRC ...", +) +@click.option( + "--config", + type=click.Path( + exists=True, + file_okay=True, + dir_okay=False, + readable=True, + allow_dash=False, + path_type=str, + ), + is_eager=True, + callback=read_pyproject_toml, + help="Read configuration options from a configuration file.", +) +@click.pass_context +def main( # noqa: C901 + ctx: click.Context, + code: Optional[str], + line_length: int, + target_version: List[TargetVersion], + check: bool, + diff: bool, + line_ranges: Sequence[str], + color: bool, + fast: bool, + pyi: bool, + ipynb: bool, + python_cell_magics: Sequence[str], + skip_source_first_line: bool, + skip_string_normalization: bool, + skip_magic_trailing_comma: bool, + preview: bool, + unstable: bool, + enable_unstable_feature: List[Preview], + quiet: bool, + verbose: bool, + required_version: Optional[str], + include: Pattern[str], + exclude: Optional[Pattern[str]], + extend_exclude: Optional[Pattern[str]], + force_exclude: Optional[Pattern[str]], + stdin_filename: Optional[str], + workers: Optional[int], + src: Tuple[str, ...], + config: Optional[str], +) -> None: + """The uncompromising code formatter.""" + ctx.ensure_object(dict) + + if src and code is not None: + out( + main.get_usage(ctx) + + "\n\n'SRC' and 'code' cannot be passed simultaneously." + ) + ctx.exit(1) + if not src and code is None: + out(main.get_usage(ctx) + "\n\nOne of 'SRC' or 'code' is required.") + ctx.exit(1) + + # It doesn't do anything if --unstable is also passed, so just allow it. + if enable_unstable_feature and not (preview or unstable): + out( + main.get_usage(ctx) + + "\n\n'--enable-unstable-feature' requires '--preview'." + ) + ctx.exit(1) + + root, method = ( + find_project_root(src, stdin_filename) if code is None else (None, None) + ) + ctx.obj["root"] = root + + if verbose: + if root: + out( + f"Identified `{root}` as project root containing a {method}.", + fg="blue", + ) + + if config: + config_source = ctx.get_parameter_source("config") + user_level_config = str(find_user_pyproject_toml()) + if config == user_level_config: + out( + "Using configuration from user-level config at " + f"'{user_level_config}'.", + fg="blue", + ) + elif config_source in ( + ParameterSource.DEFAULT, + ParameterSource.DEFAULT_MAP, + ): + out("Using configuration from project root.", fg="blue") + else: + out(f"Using configuration in '{config}'.", fg="blue") + if ctx.default_map: + for param, value in ctx.default_map.items(): + out(f"{param}: {value}") + + error_msg = "Oh no! 💥 💔 💥" + if ( + required_version + and required_version != __version__ + and required_version != __version__.split(".")[0] + ): + err( + f"{error_msg} The required version `{required_version}` does not match" + f" the running version `{__version__}`!" + ) + ctx.exit(1) + if ipynb and pyi: + err("Cannot pass both `pyi` and `ipynb` flags!") + ctx.exit(1) + + write_back = WriteBack.from_configuration(check=check, diff=diff, color=color) + if target_version: + versions = set(target_version) + else: + # We'll autodetect later. + versions = set() + mode = Mode( + target_versions=versions, + line_length=line_length, + is_pyi=pyi, + is_ipynb=ipynb, + skip_source_first_line=skip_source_first_line, + string_normalization=not skip_string_normalization, + magic_trailing_comma=not skip_magic_trailing_comma, + preview=preview, + unstable=unstable, + python_cell_magics=set(python_cell_magics), + enabled_features=set(enable_unstable_feature), + ) + + lines: List[Tuple[int, int]] = [] + if line_ranges: + if ipynb: + err("Cannot use --line-ranges with ipynb files.") + ctx.exit(1) + + try: + lines = parse_line_ranges(line_ranges) + except ValueError as e: + err(str(e)) + ctx.exit(1) + + if code is not None: + # Run in quiet mode by default with -c; the extra output isn't useful. + # You can still pass -v to get verbose output. + quiet = True + + report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose) + + if code is not None: + reformat_code( + content=code, + fast=fast, + write_back=write_back, + mode=mode, + report=report, + lines=lines, + ) + else: + assert root is not None # root is only None if code is not None + try: + sources = get_sources( + root=root, + src=src, + quiet=quiet, + verbose=verbose, + include=include, + exclude=exclude, + extend_exclude=extend_exclude, + force_exclude=force_exclude, + report=report, + stdin_filename=stdin_filename, + ) + except GitWildMatchPatternError: + ctx.exit(1) + + path_empty( + sources, + "No Python files are present to be formatted. Nothing to do 😴", + quiet, + verbose, + ctx, + ) + + if len(sources) == 1: + reformat_one( + src=sources.pop(), + fast=fast, + write_back=write_back, + mode=mode, + report=report, + lines=lines, + ) + else: + from black.concurrency import reformat_many + + if lines: + err("Cannot use --line-ranges to format multiple files.") + ctx.exit(1) + reformat_many( + sources=sources, + fast=fast, + write_back=write_back, + mode=mode, + report=report, + workers=workers, + ) + + if verbose or not quiet: + if code is None and (verbose or report.change_count or report.failure_count): + out() + out(error_msg if report.return_code else "All done! ✨ 🍰 ✨") + if code is None: + click.echo(str(report), err=True) + ctx.exit(report.return_code) + + +def get_sources( + *, + root: Path, + src: Tuple[str, ...], + quiet: bool, + verbose: bool, + include: Pattern[str], + exclude: Optional[Pattern[str]], + extend_exclude: Optional[Pattern[str]], + force_exclude: Optional[Pattern[str]], + report: "Report", + stdin_filename: Optional[str], +) -> Set[Path]: + """Compute the set of files to be formatted.""" + sources: Set[Path] = set() + + assert root.is_absolute(), f"INTERNAL ERROR: `root` must be absolute but is {root}" + using_default_exclude = exclude is None + exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES) if exclude is None else exclude + gitignore: Optional[Dict[Path, PathSpec]] = None + root_gitignore = get_gitignore(root) + + for s in src: + if s == "-" and stdin_filename: + path = Path(stdin_filename) + is_stdin = True + else: + path = Path(s) + is_stdin = False + + # Compare the logic here to the logic in `gen_python_files`. + if is_stdin or path.is_file(): + if resolves_outside_root_or_cannot_stat(path, root, report): + if verbose: + out(f'Skipping invalid source: "{path}"', fg="red") + continue + + root_relative_path = best_effort_relative_path(path, root).as_posix() + root_relative_path = "/" + root_relative_path + + # Hard-exclude any files that matches the `--force-exclude` regex. + if path_is_excluded(root_relative_path, force_exclude): + report.path_ignored( + path, "matches the --force-exclude regular expression" + ) + continue + + if is_stdin: + path = Path(f"{STDIN_PLACEHOLDER}{str(path)}") + + if path.suffix == ".ipynb" and not jupyter_dependencies_are_installed( + warn=verbose or not quiet + ): + continue + + if verbose: + out(f'Found input source: "{path}"', fg="blue") + sources.add(path) + elif path.is_dir(): + path = root / (path.resolve().relative_to(root)) + if verbose: + out(f'Found input source directory: "{path}"', fg="blue") + + if using_default_exclude: + gitignore = { + root: root_gitignore, + path: get_gitignore(path), + } + sources.update( + gen_python_files( + path.iterdir(), + root, + include, + exclude, + extend_exclude, + force_exclude, + report, + gitignore, + verbose=verbose, + quiet=quiet, + ) + ) + elif s == "-": + if verbose: + out("Found input source stdin", fg="blue") + sources.add(path) + else: + err(f"invalid path: {s}") + + return sources + + +def path_empty( + src: Sized, msg: str, quiet: bool, verbose: bool, ctx: click.Context +) -> None: + """ + Exit if there is no `src` provided for formatting + """ + if not src: + if verbose or not quiet: + out(msg) + ctx.exit(0) + + +def reformat_code( + content: str, + fast: bool, + write_back: WriteBack, + mode: Mode, + report: Report, + *, + lines: Collection[Tuple[int, int]] = (), +) -> None: + """ + Reformat and print out `content` without spawning child processes. + Similar to `reformat_one`, but for string content. + + `fast`, `write_back`, and `mode` options are passed to + :func:`format_file_in_place` or :func:`format_stdin_to_stdout`. + """ + path = Path("") + try: + changed = Changed.NO + if format_stdin_to_stdout( + content=content, fast=fast, write_back=write_back, mode=mode, lines=lines + ): + changed = Changed.YES + report.done(path, changed) + except Exception as exc: + if report.verbose: + traceback.print_exc() + report.failed(path, str(exc)) + + +# diff-shades depends on being to monkeypatch this function to operate. I know it's +# not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26 +@mypyc_attr(patchable=True) +def reformat_one( + src: Path, + fast: bool, + write_back: WriteBack, + mode: Mode, + report: "Report", + *, + lines: Collection[Tuple[int, int]] = (), +) -> None: + """Reformat a single file under `src` without spawning child processes. + + `fast`, `write_back`, and `mode` options are passed to + :func:`format_file_in_place` or :func:`format_stdin_to_stdout`. + """ + try: + changed = Changed.NO + + if str(src) == "-": + is_stdin = True + elif str(src).startswith(STDIN_PLACEHOLDER): + is_stdin = True + # Use the original name again in case we want to print something + # to the user + src = Path(str(src)[len(STDIN_PLACEHOLDER) :]) + else: + is_stdin = False + + if is_stdin: + if src.suffix == ".pyi": + mode = replace(mode, is_pyi=True) + elif src.suffix == ".ipynb": + mode = replace(mode, is_ipynb=True) + if format_stdin_to_stdout( + fast=fast, write_back=write_back, mode=mode, lines=lines + ): + changed = Changed.YES + else: + cache = Cache.read(mode) + if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF): + if not cache.is_changed(src): + changed = Changed.CACHED + if changed is not Changed.CACHED and format_file_in_place( + src, fast=fast, write_back=write_back, mode=mode, lines=lines + ): + changed = Changed.YES + if (write_back is WriteBack.YES and changed is not Changed.CACHED) or ( + write_back is WriteBack.CHECK and changed is Changed.NO + ): + cache.write([src]) + report.done(src, changed) + except Exception as exc: + if report.verbose: + traceback.print_exc() + report.failed(src, str(exc)) + + +def format_file_in_place( + src: Path, + fast: bool, + mode: Mode, + write_back: WriteBack = WriteBack.NO, + lock: Any = None, # multiprocessing.Manager().Lock() is some crazy proxy + *, + lines: Collection[Tuple[int, int]] = (), +) -> bool: + """Format file under `src` path. Return True if changed. + + If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted + code to the file. + `mode` and `fast` options are passed to :func:`format_file_contents`. + """ + if src.suffix == ".pyi": + mode = replace(mode, is_pyi=True) + elif src.suffix == ".ipynb": + mode = replace(mode, is_ipynb=True) + + then = datetime.fromtimestamp(src.stat().st_mtime, timezone.utc) + header = b"" + with open(src, "rb") as buf: + if mode.skip_source_first_line: + header = buf.readline() + src_contents, encoding, newline = decode_bytes(buf.read()) + try: + dst_contents = format_file_contents( + src_contents, fast=fast, mode=mode, lines=lines + ) + except NothingChanged: + return False + except JSONDecodeError: + raise ValueError( + f"File '{src}' cannot be parsed as valid Jupyter notebook." + ) from None + src_contents = header.decode(encoding) + src_contents + dst_contents = header.decode(encoding) + dst_contents + + if write_back == WriteBack.YES: + with open(src, "w", encoding=encoding, newline=newline) as f: + f.write(dst_contents) + elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF): + now = datetime.now(timezone.utc) + src_name = f"{src}\t{then}" + dst_name = f"{src}\t{now}" + if mode.is_ipynb: + diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name) + else: + diff_contents = diff(src_contents, dst_contents, src_name, dst_name) + + if write_back == WriteBack.COLOR_DIFF: + diff_contents = color_diff(diff_contents) + + with lock or nullcontext(): + f = io.TextIOWrapper( + sys.stdout.buffer, + encoding=encoding, + newline=newline, + write_through=True, + ) + f = wrap_stream_for_windows(f) + f.write(diff_contents) + f.detach() + + return True + + +def format_stdin_to_stdout( + fast: bool, + *, + content: Optional[str] = None, + write_back: WriteBack = WriteBack.NO, + mode: Mode, + lines: Collection[Tuple[int, int]] = (), +) -> bool: + """Format file on stdin. Return True if changed. + + If content is None, it's read from sys.stdin. + + If `write_back` is YES, write reformatted code back to stdout. If it is DIFF, + write a diff to stdout. The `mode` argument is passed to + :func:`format_file_contents`. + """ + then = datetime.now(timezone.utc) + + if content is None: + src, encoding, newline = decode_bytes(sys.stdin.buffer.read()) + else: + src, encoding, newline = content, "utf-8", "" + + dst = src + try: + dst = format_file_contents(src, fast=fast, mode=mode, lines=lines) + return True + + except NothingChanged: + return False + + finally: + f = io.TextIOWrapper( + sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True + ) + if write_back == WriteBack.YES: + # Make sure there's a newline after the content + if dst and dst[-1] != "\n": + dst += "\n" + f.write(dst) + elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF): + now = datetime.now(timezone.utc) + src_name = f"STDIN\t{then}" + dst_name = f"STDOUT\t{now}" + d = diff(src, dst, src_name, dst_name) + if write_back == WriteBack.COLOR_DIFF: + d = color_diff(d) + f = wrap_stream_for_windows(f) + f.write(d) + f.detach() + + +def check_stability_and_equivalence( + src_contents: str, + dst_contents: str, + *, + mode: Mode, + lines: Collection[Tuple[int, int]] = (), +) -> None: + """Perform stability and equivalence checks. + + Raise AssertionError if source and destination contents are not + equivalent, or if a second pass of the formatter would format the + content differently. + """ + assert_equivalent(src_contents, dst_contents) + assert_stable(src_contents, dst_contents, mode=mode, lines=lines) + + +def format_file_contents( + src_contents: str, + *, + fast: bool, + mode: Mode, + lines: Collection[Tuple[int, int]] = (), +) -> FileContent: + """Reformat contents of a file and return new contents. + + If `fast` is False, additionally confirm that the reformatted code is + valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it. + `mode` is passed to :func:`format_str`. + """ + if mode.is_ipynb: + dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode) + else: + dst_contents = format_str(src_contents, mode=mode, lines=lines) + if src_contents == dst_contents: + raise NothingChanged + + if not fast and not mode.is_ipynb: + # Jupyter notebooks will already have been checked above. + check_stability_and_equivalence( + src_contents, dst_contents, mode=mode, lines=lines + ) + return dst_contents + + +def validate_cell(src: str, mode: Mode) -> None: + """Check that cell does not already contain TransformerManager transformations, + or non-Python cell magics, which might cause tokenizer_rt to break because of + indentations. + + If a cell contains ``!ls``, then it'll be transformed to + ``get_ipython().system('ls')``. However, if the cell originally contained + ``get_ipython().system('ls')``, then it would get transformed in the same way: + + >>> TransformerManager().transform_cell("get_ipython().system('ls')") + "get_ipython().system('ls')\n" + >>> TransformerManager().transform_cell("!ls") + "get_ipython().system('ls')\n" + + Due to the impossibility of safely roundtripping in such situations, cells + containing transformed magics will be ignored. + """ + if any(transformed_magic in src for transformed_magic in TRANSFORMED_MAGICS): + raise NothingChanged + if ( + src[:2] == "%%" + and src.split()[0][2:] not in PYTHON_CELL_MAGICS | mode.python_cell_magics + ): + raise NothingChanged + + +def format_cell(src: str, *, fast: bool, mode: Mode) -> str: + """Format code in given cell of Jupyter notebook. + + General idea is: + + - if cell has trailing semicolon, remove it; + - if cell has IPython magics, mask them; + - format cell; + - reinstate IPython magics; + - reinstate trailing semicolon (if originally present); + - strip trailing newlines. + + Cells with syntax errors will not be processed, as they + could potentially be automagics or multi-line magics, which + are currently not supported. + """ + validate_cell(src, mode) + src_without_trailing_semicolon, has_trailing_semicolon = remove_trailing_semicolon( + src + ) + try: + masked_src, replacements = mask_cell(src_without_trailing_semicolon) + except SyntaxError: + raise NothingChanged from None + masked_dst = format_str(masked_src, mode=mode) + if not fast: + check_stability_and_equivalence(masked_src, masked_dst, mode=mode) + dst_without_trailing_semicolon = unmask_cell(masked_dst, replacements) + dst = put_trailing_semicolon_back( + dst_without_trailing_semicolon, has_trailing_semicolon + ) + dst = dst.rstrip("\n") + if dst == src: + raise NothingChanged from None + return dst + + +def validate_metadata(nb: MutableMapping[str, Any]) -> None: + """If notebook is marked as non-Python, don't format it. + + All notebook metadata fields are optional, see + https://nbformat.readthedocs.io/en/latest/format_description.html. So + if a notebook has empty metadata, we will try to parse it anyway. + """ + language = nb.get("metadata", {}).get("language_info", {}).get("name", None) + if language is not None and language != "python": + raise NothingChanged from None + + +def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileContent: + """Format Jupyter notebook. + + Operate cell-by-cell, only on code cells, only for Python notebooks. + If the ``.ipynb`` originally had a trailing newline, it'll be preserved. + """ + if not src_contents: + raise NothingChanged + + trailing_newline = src_contents[-1] == "\n" + modified = False + nb = json.loads(src_contents) + validate_metadata(nb) + for cell in nb["cells"]: + if cell.get("cell_type", None) == "code": + try: + src = "".join(cell["source"]) + dst = format_cell(src, fast=fast, mode=mode) + except NothingChanged: + pass + else: + cell["source"] = dst.splitlines(keepends=True) + modified = True + if modified: + dst_contents = json.dumps(nb, indent=1, ensure_ascii=False) + if trailing_newline: + dst_contents = dst_contents + "\n" + return dst_contents + else: + raise NothingChanged + + +def format_str( + src_contents: str, *, mode: Mode, lines: Collection[Tuple[int, int]] = () +) -> str: + """Reformat a string and return new contents. + + `mode` determines formatting options, such as how many characters per line are + allowed. Example: + + >>> import black + >>> print(black.format_str("def f(arg:str='')->None:...", mode=black.Mode())) + def f(arg: str = "") -> None: + ... + + A more complex example: + + >>> print( + ... black.format_str( + ... "def f(arg:str='')->None: hey", + ... mode=black.Mode( + ... target_versions={black.TargetVersion.PY36}, + ... line_length=10, + ... string_normalization=False, + ... is_pyi=False, + ... ), + ... ), + ... ) + def f( + arg: str = '', + ) -> None: + hey + + """ + if lines: + lines = sanitized_lines(lines, src_contents) + if not lines: + return src_contents # Nothing to format + dst_contents = _format_str_once(src_contents, mode=mode, lines=lines) + # Forced second pass to work around optional trailing commas (becoming + # forced trailing commas on pass 2) interacting differently with optional + # parentheses. Admittedly ugly. + if src_contents != dst_contents: + if lines: + lines = adjusted_lines(lines, src_contents, dst_contents) + return _format_str_once(dst_contents, mode=mode, lines=lines) + return dst_contents + + +def _format_str_once( + src_contents: str, *, mode: Mode, lines: Collection[Tuple[int, int]] = () +) -> str: + src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions) + dst_blocks: List[LinesBlock] = [] + if mode.target_versions: + versions = mode.target_versions + else: + future_imports = get_future_imports(src_node) + versions = detect_target_versions(src_node, future_imports=future_imports) + + context_manager_features = { + feature + for feature in {Feature.PARENTHESIZED_CONTEXT_MANAGERS} + if supports_feature(versions, feature) + } + normalize_fmt_off(src_node, mode, lines) + if lines: + # This should be called after normalize_fmt_off. + convert_unchanged_lines(src_node, lines) + + line_generator = LineGenerator(mode=mode, features=context_manager_features) + elt = EmptyLineTracker(mode=mode) + split_line_features = { + feature + for feature in { + Feature.TRAILING_COMMA_IN_CALL, + Feature.TRAILING_COMMA_IN_DEF, + } + if supports_feature(versions, feature) + } + block: Optional[LinesBlock] = None + for current_line in line_generator.visit(src_node): + block = elt.maybe_empty_lines(current_line) + dst_blocks.append(block) + for line in transform_line( + current_line, mode=mode, features=split_line_features + ): + block.content_lines.append(str(line)) + if dst_blocks: + dst_blocks[-1].after = 0 + dst_contents = [] + for block in dst_blocks: + dst_contents.extend(block.all_lines()) + if not dst_contents: + # Use decode_bytes to retrieve the correct source newline (CRLF or LF), + # and check if normalized_content has more than one line + normalized_content, _, newline = decode_bytes(src_contents.encode("utf-8")) + if "\n" in normalized_content: + return newline + return "" + return "".join(dst_contents) + + +def decode_bytes(src: bytes) -> Tuple[FileContent, Encoding, NewLine]: + """Return a tuple of (decoded_contents, encoding, newline). + + `newline` is either CRLF or LF but `decoded_contents` is decoded with + universal newlines (i.e. only contains LF). + """ + srcbuf = io.BytesIO(src) + encoding, lines = tokenize.detect_encoding(srcbuf.readline) + if not lines: + return "", encoding, "\n" + + newline = "\r\n" if b"\r\n" == lines[0][-2:] else "\n" + srcbuf.seek(0) + with io.TextIOWrapper(srcbuf, encoding) as tiow: + return tiow.read(), encoding, newline + + +def get_features_used( # noqa: C901 + node: Node, *, future_imports: Optional[Set[str]] = None +) -> Set[Feature]: + """Return a set of (relatively) new Python features used in this file. + + Currently looking for: + - f-strings; + - self-documenting expressions in f-strings (f"{x=}"); + - underscores in numeric literals; + - trailing commas after * or ** in function signatures and calls; + - positional only arguments in function signatures and lambdas; + - assignment expression; + - relaxed decorator syntax; + - usage of __future__ flags (annotations); + - print / exec statements; + - parenthesized context managers; + - match statements; + - except* clause; + - variadic generics; + """ + features: Set[Feature] = set() + if future_imports: + features |= { + FUTURE_FLAG_TO_FEATURE[future_import] + for future_import in future_imports + if future_import in FUTURE_FLAG_TO_FEATURE + } + + for n in node.pre_order(): + if n.type == token.FSTRING_START: + features.add(Feature.F_STRINGS) + elif ( + n.type == token.RBRACE + and n.parent is not None + and any(child.type == token.EQUAL for child in n.parent.children) + ): + features.add(Feature.DEBUG_F_STRINGS) + + elif is_number_token(n): + if "_" in n.value: + features.add(Feature.NUMERIC_UNDERSCORES) + + elif n.type == token.SLASH: + if n.parent and n.parent.type in { + syms.typedargslist, + syms.arglist, + syms.varargslist, + }: + features.add(Feature.POS_ONLY_ARGUMENTS) + + elif n.type == token.COLONEQUAL: + features.add(Feature.ASSIGNMENT_EXPRESSIONS) + + elif n.type == syms.decorator: + if len(n.children) > 1 and not is_simple_decorator_expression( + n.children[1] + ): + features.add(Feature.RELAXED_DECORATORS) + + elif ( + n.type in {syms.typedargslist, syms.arglist} + and n.children + and n.children[-1].type == token.COMMA + ): + if n.type == syms.typedargslist: + feature = Feature.TRAILING_COMMA_IN_DEF + else: + feature = Feature.TRAILING_COMMA_IN_CALL + + for ch in n.children: + if ch.type in STARS: + features.add(feature) + + if ch.type == syms.argument: + for argch in ch.children: + if argch.type in STARS: + features.add(feature) + + elif ( + n.type in {syms.return_stmt, syms.yield_expr} + and len(n.children) >= 2 + and n.children[1].type == syms.testlist_star_expr + and any(child.type == syms.star_expr for child in n.children[1].children) + ): + features.add(Feature.UNPACKING_ON_FLOW) + + elif ( + n.type == syms.annassign + and len(n.children) >= 4 + and n.children[3].type == syms.testlist_star_expr + ): + features.add(Feature.ANN_ASSIGN_EXTENDED_RHS) + + elif ( + n.type == syms.with_stmt + and len(n.children) > 2 + and n.children[1].type == syms.atom + ): + atom_children = n.children[1].children + if ( + len(atom_children) == 3 + and atom_children[0].type == token.LPAR + and _contains_asexpr(atom_children[1]) + and atom_children[2].type == token.RPAR + ): + features.add(Feature.PARENTHESIZED_CONTEXT_MANAGERS) + + elif n.type == syms.match_stmt: + features.add(Feature.PATTERN_MATCHING) + + elif ( + n.type == syms.except_clause + and len(n.children) >= 2 + and n.children[1].type == token.STAR + ): + features.add(Feature.EXCEPT_STAR) + + elif n.type in {syms.subscriptlist, syms.trailer} and any( + child.type == syms.star_expr for child in n.children + ): + features.add(Feature.VARIADIC_GENERICS) + + elif ( + n.type == syms.tname_star + and len(n.children) == 3 + and n.children[2].type == syms.star_expr + ): + features.add(Feature.VARIADIC_GENERICS) + + elif n.type in (syms.type_stmt, syms.typeparams): + features.add(Feature.TYPE_PARAMS) + + elif ( + n.type in (syms.typevartuple, syms.paramspec, syms.typevar) + and n.children[-2].type == token.EQUAL + ): + features.add(Feature.TYPE_PARAM_DEFAULTS) + + return features + + +def _contains_asexpr(node: Union[Node, Leaf]) -> bool: + """Return True if `node` contains an as-pattern.""" + if node.type == syms.asexpr_test: + return True + elif node.type == syms.atom: + if ( + len(node.children) == 3 + and node.children[0].type == token.LPAR + and node.children[2].type == token.RPAR + ): + return _contains_asexpr(node.children[1]) + elif node.type == syms.testlist_gexp: + return any(_contains_asexpr(child) for child in node.children) + return False + + +def detect_target_versions( + node: Node, *, future_imports: Optional[Set[str]] = None +) -> Set[TargetVersion]: + """Detect the version to target based on the nodes used.""" + features = get_features_used(node, future_imports=future_imports) + return { + version for version in TargetVersion if features <= VERSION_TO_FEATURES[version] + } + + +def get_future_imports(node: Node) -> Set[str]: + """Return a set of __future__ imports in the file.""" + imports: Set[str] = set() + + def get_imports_from_children(children: List[LN]) -> Generator[str, None, None]: + for child in children: + if isinstance(child, Leaf): + if child.type == token.NAME: + yield child.value + + elif child.type == syms.import_as_name: + orig_name = child.children[0] + assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports" + assert orig_name.type == token.NAME, "Invalid syntax parsing imports" + yield orig_name.value + + elif child.type == syms.import_as_names: + yield from get_imports_from_children(child.children) + + else: + raise AssertionError("Invalid syntax parsing imports") + + for child in node.children: + if child.type != syms.simple_stmt: + break + + first_child = child.children[0] + if isinstance(first_child, Leaf): + # Continue looking if we see a docstring; otherwise stop. + if ( + len(child.children) == 2 + and first_child.type == token.STRING + and child.children[1].type == token.NEWLINE + ): + continue + + break + + elif first_child.type == syms.import_from: + module_name = first_child.children[1] + if not isinstance(module_name, Leaf) or module_name.value != "__future__": + break + + imports |= set(get_imports_from_children(first_child.children[3:])) + else: + break + + return imports + + +def assert_equivalent(src: str, dst: str) -> None: + """Raise AssertionError if `src` and `dst` aren't equivalent.""" + try: + src_ast = parse_ast(src) + except Exception as exc: + raise ASTSafetyError( + "cannot use --safe with this file; failed to parse source file AST: " + f"{exc}\n" + "This could be caused by running Black with an older Python version " + "that does not support new syntax used in your source file." + ) from exc + + try: + dst_ast = parse_ast(dst) + except Exception as exc: + log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst) + raise ASTSafetyError( + f"INTERNAL ERROR: Black produced invalid code: {exc}. " + "Please report a bug on https://github.com/psf/black/issues. " + f"This invalid output might be helpful: {log}" + ) from None + + src_ast_str = "\n".join(stringify_ast(src_ast)) + dst_ast_str = "\n".join(stringify_ast(dst_ast)) + if src_ast_str != dst_ast_str: + log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst")) + raise ASTSafetyError( + "INTERNAL ERROR: Black produced code that is not equivalent to the" + " source. Please report a bug on " + f"https://github.com/psf/black/issues. This diff might be helpful: {log}" + ) from None + + +def assert_stable( + src: str, dst: str, mode: Mode, *, lines: Collection[Tuple[int, int]] = () +) -> None: + """Raise AssertionError if `dst` reformats differently the second time.""" + if lines: + # Formatting specified lines requires `adjusted_lines` to map original lines + # to the formatted lines before re-formatting the previously formatted result. + # Due to less-ideal diff algorithm, some edge cases produce incorrect new line + # ranges. Hence for now, we skip the stable check. + # See https://github.com/psf/black/issues/4033 for context. + return + # We shouldn't call format_str() here, because that formats the string + # twice and may hide a bug where we bounce back and forth between two + # versions. + newdst = _format_str_once(dst, mode=mode, lines=lines) + if dst != newdst: + log = dump_to_file( + str(mode), + diff(src, dst, "source", "first pass"), + diff(dst, newdst, "first pass", "second pass"), + ) + raise AssertionError( + "INTERNAL ERROR: Black produced different code on the second pass of the" + " formatter. Please report a bug on https://github.com/psf/black/issues." + f" This diff might be helpful: {log}" + ) from None + + +@contextmanager +def nullcontext() -> Iterator[None]: + """Return an empty context manager. + + To be used like `nullcontext` in Python 3.7. + """ + yield + + +def patched_main() -> None: + # PyInstaller patches multiprocessing to need freeze_support() even in non-Windows + # environments so just assume we always need to call it if frozen. + if getattr(sys, "frozen", False): + from multiprocessing import freeze_support + + freeze_support() + + main() + + +if __name__ == "__main__": + patched_main() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/__main__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/__main__.py new file mode 100644 index 000000000..19b810b53 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/__main__.py @@ -0,0 +1,3 @@ +from black import patched_main + +patched_main() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/_width_table.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/_width_table.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..3e5430f6c Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/_width_table.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/_width_table.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/_width_table.py new file mode 100644 index 000000000..f3304e48e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/_width_table.py @@ -0,0 +1,478 @@ +# Generated by make_width_table.py +# wcwidth 0.2.6 +# Unicode 15.0.0 +from typing import Final, List, Tuple + +WIDTH_TABLE: Final[List[Tuple[int, int, int]]] = [ + (0, 0, 0), + (1, 31, -1), + (127, 159, -1), + (768, 879, 0), + (1155, 1161, 0), + (1425, 1469, 0), + (1471, 1471, 0), + (1473, 1474, 0), + (1476, 1477, 0), + (1479, 1479, 0), + (1552, 1562, 0), + (1611, 1631, 0), + (1648, 1648, 0), + (1750, 1756, 0), + (1759, 1764, 0), + (1767, 1768, 0), + (1770, 1773, 0), + (1809, 1809, 0), + (1840, 1866, 0), + (1958, 1968, 0), + (2027, 2035, 0), + (2045, 2045, 0), + (2070, 2073, 0), + (2075, 2083, 0), + (2085, 2087, 0), + (2089, 2093, 0), + (2137, 2139, 0), + (2200, 2207, 0), + (2250, 2273, 0), + (2275, 2306, 0), + (2362, 2362, 0), + (2364, 2364, 0), + (2369, 2376, 0), + (2381, 2381, 0), + (2385, 2391, 0), + (2402, 2403, 0), + (2433, 2433, 0), + (2492, 2492, 0), + (2497, 2500, 0), + (2509, 2509, 0), + (2530, 2531, 0), + (2558, 2558, 0), + (2561, 2562, 0), + (2620, 2620, 0), + (2625, 2626, 0), + (2631, 2632, 0), + (2635, 2637, 0), + (2641, 2641, 0), + (2672, 2673, 0), + (2677, 2677, 0), + (2689, 2690, 0), + (2748, 2748, 0), + (2753, 2757, 0), + (2759, 2760, 0), + (2765, 2765, 0), + (2786, 2787, 0), + (2810, 2815, 0), + (2817, 2817, 0), + (2876, 2876, 0), + (2879, 2879, 0), + (2881, 2884, 0), + (2893, 2893, 0), + (2901, 2902, 0), + (2914, 2915, 0), + (2946, 2946, 0), + (3008, 3008, 0), + (3021, 3021, 0), + (3072, 3072, 0), + (3076, 3076, 0), + (3132, 3132, 0), + (3134, 3136, 0), + (3142, 3144, 0), + (3146, 3149, 0), + (3157, 3158, 0), + (3170, 3171, 0), + (3201, 3201, 0), + (3260, 3260, 0), + (3263, 3263, 0), + (3270, 3270, 0), + (3276, 3277, 0), + (3298, 3299, 0), + (3328, 3329, 0), + (3387, 3388, 0), + (3393, 3396, 0), + (3405, 3405, 0), + (3426, 3427, 0), + (3457, 3457, 0), + (3530, 3530, 0), + (3538, 3540, 0), + (3542, 3542, 0), + (3633, 3633, 0), + (3636, 3642, 0), + (3655, 3662, 0), + (3761, 3761, 0), + (3764, 3772, 0), + (3784, 3790, 0), + (3864, 3865, 0), + (3893, 3893, 0), + (3895, 3895, 0), + (3897, 3897, 0), + (3953, 3966, 0), + (3968, 3972, 0), + (3974, 3975, 0), + (3981, 3991, 0), + (3993, 4028, 0), + (4038, 4038, 0), + (4141, 4144, 0), + (4146, 4151, 0), + (4153, 4154, 0), + (4157, 4158, 0), + (4184, 4185, 0), + (4190, 4192, 0), + (4209, 4212, 0), + (4226, 4226, 0), + (4229, 4230, 0), + (4237, 4237, 0), + (4253, 4253, 0), + (4352, 4447, 2), + (4957, 4959, 0), + (5906, 5908, 0), + (5938, 5939, 0), + (5970, 5971, 0), + (6002, 6003, 0), + (6068, 6069, 0), + (6071, 6077, 0), + (6086, 6086, 0), + (6089, 6099, 0), + (6109, 6109, 0), + (6155, 6157, 0), + (6159, 6159, 0), + (6277, 6278, 0), + (6313, 6313, 0), + (6432, 6434, 0), + (6439, 6440, 0), + (6450, 6450, 0), + (6457, 6459, 0), + (6679, 6680, 0), + (6683, 6683, 0), + (6742, 6742, 0), + (6744, 6750, 0), + (6752, 6752, 0), + (6754, 6754, 0), + (6757, 6764, 0), + (6771, 6780, 0), + (6783, 6783, 0), + (6832, 6862, 0), + (6912, 6915, 0), + (6964, 6964, 0), + (6966, 6970, 0), + (6972, 6972, 0), + (6978, 6978, 0), + (7019, 7027, 0), + (7040, 7041, 0), + (7074, 7077, 0), + (7080, 7081, 0), + (7083, 7085, 0), + (7142, 7142, 0), + (7144, 7145, 0), + (7149, 7149, 0), + (7151, 7153, 0), + (7212, 7219, 0), + (7222, 7223, 0), + (7376, 7378, 0), + (7380, 7392, 0), + (7394, 7400, 0), + (7405, 7405, 0), + (7412, 7412, 0), + (7416, 7417, 0), + (7616, 7679, 0), + (8203, 8207, 0), + (8232, 8238, 0), + (8288, 8291, 0), + (8400, 8432, 0), + (8986, 8987, 2), + (9001, 9002, 2), + (9193, 9196, 2), + (9200, 9200, 2), + (9203, 9203, 2), + (9725, 9726, 2), + (9748, 9749, 2), + (9800, 9811, 2), + (9855, 9855, 2), + (9875, 9875, 2), + (9889, 9889, 2), + (9898, 9899, 2), + (9917, 9918, 2), + (9924, 9925, 2), + (9934, 9934, 2), + (9940, 9940, 2), + (9962, 9962, 2), + (9970, 9971, 2), + (9973, 9973, 2), + (9978, 9978, 2), + (9981, 9981, 2), + (9989, 9989, 2), + (9994, 9995, 2), + (10024, 10024, 2), + (10060, 10060, 2), + (10062, 10062, 2), + (10067, 10069, 2), + (10071, 10071, 2), + (10133, 10135, 2), + (10160, 10160, 2), + (10175, 10175, 2), + (11035, 11036, 2), + (11088, 11088, 2), + (11093, 11093, 2), + (11503, 11505, 0), + (11647, 11647, 0), + (11744, 11775, 0), + (11904, 11929, 2), + (11931, 12019, 2), + (12032, 12245, 2), + (12272, 12283, 2), + (12288, 12329, 2), + (12330, 12333, 0), + (12334, 12350, 2), + (12353, 12438, 2), + (12441, 12442, 0), + (12443, 12543, 2), + (12549, 12591, 2), + (12593, 12686, 2), + (12688, 12771, 2), + (12784, 12830, 2), + (12832, 12871, 2), + (12880, 19903, 2), + (19968, 42124, 2), + (42128, 42182, 2), + (42607, 42610, 0), + (42612, 42621, 0), + (42654, 42655, 0), + (42736, 42737, 0), + (43010, 43010, 0), + (43014, 43014, 0), + (43019, 43019, 0), + (43045, 43046, 0), + (43052, 43052, 0), + (43204, 43205, 0), + (43232, 43249, 0), + (43263, 43263, 0), + (43302, 43309, 0), + (43335, 43345, 0), + (43360, 43388, 2), + (43392, 43394, 0), + (43443, 43443, 0), + (43446, 43449, 0), + (43452, 43453, 0), + (43493, 43493, 0), + (43561, 43566, 0), + (43569, 43570, 0), + (43573, 43574, 0), + (43587, 43587, 0), + (43596, 43596, 0), + (43644, 43644, 0), + (43696, 43696, 0), + (43698, 43700, 0), + (43703, 43704, 0), + (43710, 43711, 0), + (43713, 43713, 0), + (43756, 43757, 0), + (43766, 43766, 0), + (44005, 44005, 0), + (44008, 44008, 0), + (44013, 44013, 0), + (44032, 55203, 2), + (63744, 64255, 2), + (64286, 64286, 0), + (65024, 65039, 0), + (65040, 65049, 2), + (65056, 65071, 0), + (65072, 65106, 2), + (65108, 65126, 2), + (65128, 65131, 2), + (65281, 65376, 2), + (65504, 65510, 2), + (66045, 66045, 0), + (66272, 66272, 0), + (66422, 66426, 0), + (68097, 68099, 0), + (68101, 68102, 0), + (68108, 68111, 0), + (68152, 68154, 0), + (68159, 68159, 0), + (68325, 68326, 0), + (68900, 68903, 0), + (69291, 69292, 0), + (69373, 69375, 0), + (69446, 69456, 0), + (69506, 69509, 0), + (69633, 69633, 0), + (69688, 69702, 0), + (69744, 69744, 0), + (69747, 69748, 0), + (69759, 69761, 0), + (69811, 69814, 0), + (69817, 69818, 0), + (69826, 69826, 0), + (69888, 69890, 0), + (69927, 69931, 0), + (69933, 69940, 0), + (70003, 70003, 0), + (70016, 70017, 0), + (70070, 70078, 0), + (70089, 70092, 0), + (70095, 70095, 0), + (70191, 70193, 0), + (70196, 70196, 0), + (70198, 70199, 0), + (70206, 70206, 0), + (70209, 70209, 0), + (70367, 70367, 0), + (70371, 70378, 0), + (70400, 70401, 0), + (70459, 70460, 0), + (70464, 70464, 0), + (70502, 70508, 0), + (70512, 70516, 0), + (70712, 70719, 0), + (70722, 70724, 0), + (70726, 70726, 0), + (70750, 70750, 0), + (70835, 70840, 0), + (70842, 70842, 0), + (70847, 70848, 0), + (70850, 70851, 0), + (71090, 71093, 0), + (71100, 71101, 0), + (71103, 71104, 0), + (71132, 71133, 0), + (71219, 71226, 0), + (71229, 71229, 0), + (71231, 71232, 0), + (71339, 71339, 0), + (71341, 71341, 0), + (71344, 71349, 0), + (71351, 71351, 0), + (71453, 71455, 0), + (71458, 71461, 0), + (71463, 71467, 0), + (71727, 71735, 0), + (71737, 71738, 0), + (71995, 71996, 0), + (71998, 71998, 0), + (72003, 72003, 0), + (72148, 72151, 0), + (72154, 72155, 0), + (72160, 72160, 0), + (72193, 72202, 0), + (72243, 72248, 0), + (72251, 72254, 0), + (72263, 72263, 0), + (72273, 72278, 0), + (72281, 72283, 0), + (72330, 72342, 0), + (72344, 72345, 0), + (72752, 72758, 0), + (72760, 72765, 0), + (72767, 72767, 0), + (72850, 72871, 0), + (72874, 72880, 0), + (72882, 72883, 0), + (72885, 72886, 0), + (73009, 73014, 0), + (73018, 73018, 0), + (73020, 73021, 0), + (73023, 73029, 0), + (73031, 73031, 0), + (73104, 73105, 0), + (73109, 73109, 0), + (73111, 73111, 0), + (73459, 73460, 0), + (73472, 73473, 0), + (73526, 73530, 0), + (73536, 73536, 0), + (73538, 73538, 0), + (78912, 78912, 0), + (78919, 78933, 0), + (92912, 92916, 0), + (92976, 92982, 0), + (94031, 94031, 0), + (94095, 94098, 0), + (94176, 94179, 2), + (94180, 94180, 0), + (94192, 94193, 2), + (94208, 100343, 2), + (100352, 101589, 2), + (101632, 101640, 2), + (110576, 110579, 2), + (110581, 110587, 2), + (110589, 110590, 2), + (110592, 110882, 2), + (110898, 110898, 2), + (110928, 110930, 2), + (110933, 110933, 2), + (110948, 110951, 2), + (110960, 111355, 2), + (113821, 113822, 0), + (118528, 118573, 0), + (118576, 118598, 0), + (119143, 119145, 0), + (119163, 119170, 0), + (119173, 119179, 0), + (119210, 119213, 0), + (119362, 119364, 0), + (121344, 121398, 0), + (121403, 121452, 0), + (121461, 121461, 0), + (121476, 121476, 0), + (121499, 121503, 0), + (121505, 121519, 0), + (122880, 122886, 0), + (122888, 122904, 0), + (122907, 122913, 0), + (122915, 122916, 0), + (122918, 122922, 0), + (123023, 123023, 0), + (123184, 123190, 0), + (123566, 123566, 0), + (123628, 123631, 0), + (124140, 124143, 0), + (125136, 125142, 0), + (125252, 125258, 0), + (126980, 126980, 2), + (127183, 127183, 2), + (127374, 127374, 2), + (127377, 127386, 2), + (127488, 127490, 2), + (127504, 127547, 2), + (127552, 127560, 2), + (127568, 127569, 2), + (127584, 127589, 2), + (127744, 127776, 2), + (127789, 127797, 2), + (127799, 127868, 2), + (127870, 127891, 2), + (127904, 127946, 2), + (127951, 127955, 2), + (127968, 127984, 2), + (127988, 127988, 2), + (127992, 128062, 2), + (128064, 128064, 2), + (128066, 128252, 2), + (128255, 128317, 2), + (128331, 128334, 2), + (128336, 128359, 2), + (128378, 128378, 2), + (128405, 128406, 2), + (128420, 128420, 2), + (128507, 128591, 2), + (128640, 128709, 2), + (128716, 128716, 2), + (128720, 128722, 2), + (128725, 128727, 2), + (128732, 128735, 2), + (128747, 128748, 2), + (128756, 128764, 2), + (128992, 129003, 2), + (129008, 129008, 2), + (129292, 129338, 2), + (129340, 129349, 2), + (129351, 129535, 2), + (129648, 129660, 2), + (129664, 129672, 2), + (129680, 129725, 2), + (129727, 129733, 2), + (129742, 129755, 2), + (129760, 129768, 2), + (129776, 129784, 2), + (131072, 196605, 2), + (196608, 262141, 2), + (917760, 917999, 0), +] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/brackets.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/brackets.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..5d9e53513 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/brackets.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/brackets.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/brackets.py new file mode 100644 index 000000000..37e6b2590 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/brackets.py @@ -0,0 +1,382 @@ +"""Builds on top of nodes.py to track brackets.""" + +from dataclasses import dataclass, field +from typing import Dict, Final, Iterable, List, Optional, Sequence, Set, Tuple, Union + +from black.nodes import ( + BRACKET, + CLOSING_BRACKETS, + COMPARATORS, + LOGIC_OPERATORS, + MATH_OPERATORS, + OPENING_BRACKETS, + UNPACKING_PARENTS, + VARARGS_PARENTS, + is_vararg, + syms, +) +from blib2to3.pgen2 import token +from blib2to3.pytree import Leaf, Node + +# types +LN = Union[Leaf, Node] +Depth = int +LeafID = int +NodeType = int +Priority = int + + +COMPREHENSION_PRIORITY: Final = 20 +COMMA_PRIORITY: Final = 18 +TERNARY_PRIORITY: Final = 16 +LOGIC_PRIORITY: Final = 14 +STRING_PRIORITY: Final = 12 +COMPARATOR_PRIORITY: Final = 10 +MATH_PRIORITIES: Final = { + token.VBAR: 9, + token.CIRCUMFLEX: 8, + token.AMPER: 7, + token.LEFTSHIFT: 6, + token.RIGHTSHIFT: 6, + token.PLUS: 5, + token.MINUS: 5, + token.STAR: 4, + token.SLASH: 4, + token.DOUBLESLASH: 4, + token.PERCENT: 4, + token.AT: 4, + token.TILDE: 3, + token.DOUBLESTAR: 2, +} +DOT_PRIORITY: Final = 1 + + +class BracketMatchError(Exception): + """Raised when an opening bracket is unable to be matched to a closing bracket.""" + + +@dataclass +class BracketTracker: + """Keeps track of brackets on a line.""" + + depth: int = 0 + bracket_match: Dict[Tuple[Depth, NodeType], Leaf] = field(default_factory=dict) + delimiters: Dict[LeafID, Priority] = field(default_factory=dict) + previous: Optional[Leaf] = None + _for_loop_depths: List[int] = field(default_factory=list) + _lambda_argument_depths: List[int] = field(default_factory=list) + invisible: List[Leaf] = field(default_factory=list) + + def mark(self, leaf: Leaf) -> None: + """Mark `leaf` with bracket-related metadata. Keep track of delimiters. + + All leaves receive an int `bracket_depth` field that stores how deep + within brackets a given leaf is. 0 means there are no enclosing brackets + that started on this line. + + If a leaf is itself a closing bracket and there is a matching opening + bracket earlier, it receives an `opening_bracket` field with which it forms a + pair. This is a one-directional link to avoid reference cycles. Closing + bracket without opening happens on lines continued from previous + breaks, e.g. `) -> "ReturnType":` as part of a funcdef where we place + the return type annotation on its own line of the previous closing RPAR. + + If a leaf is a delimiter (a token on which Black can split the line if + needed) and it's on depth 0, its `id()` is stored in the tracker's + `delimiters` field. + """ + if leaf.type == token.COMMENT: + return + + if ( + self.depth == 0 + and leaf.type in CLOSING_BRACKETS + and (self.depth, leaf.type) not in self.bracket_match + ): + return + + self.maybe_decrement_after_for_loop_variable(leaf) + self.maybe_decrement_after_lambda_arguments(leaf) + if leaf.type in CLOSING_BRACKETS: + self.depth -= 1 + try: + opening_bracket = self.bracket_match.pop((self.depth, leaf.type)) + except KeyError as e: + raise BracketMatchError( + "Unable to match a closing bracket to the following opening" + f" bracket: {leaf}" + ) from e + leaf.opening_bracket = opening_bracket + if not leaf.value: + self.invisible.append(leaf) + leaf.bracket_depth = self.depth + if self.depth == 0: + delim = is_split_before_delimiter(leaf, self.previous) + if delim and self.previous is not None: + self.delimiters[id(self.previous)] = delim + else: + delim = is_split_after_delimiter(leaf) + if delim: + self.delimiters[id(leaf)] = delim + if leaf.type in OPENING_BRACKETS: + self.bracket_match[self.depth, BRACKET[leaf.type]] = leaf + self.depth += 1 + if not leaf.value: + self.invisible.append(leaf) + self.previous = leaf + self.maybe_increment_lambda_arguments(leaf) + self.maybe_increment_for_loop_variable(leaf) + + def any_open_for_or_lambda(self) -> bool: + """Return True if there is an open for or lambda expression on the line. + + See maybe_increment_for_loop_variable and maybe_increment_lambda_arguments + for details.""" + return bool(self._for_loop_depths or self._lambda_argument_depths) + + def any_open_brackets(self) -> bool: + """Return True if there is an yet unmatched open bracket on the line.""" + return bool(self.bracket_match) + + def max_delimiter_priority(self, exclude: Iterable[LeafID] = ()) -> Priority: + """Return the highest priority of a delimiter found on the line. + + Values are consistent with what `is_split_*_delimiter()` return. + Raises ValueError on no delimiters. + """ + return max(v for k, v in self.delimiters.items() if k not in exclude) + + def delimiter_count_with_priority(self, priority: Priority = 0) -> int: + """Return the number of delimiters with the given `priority`. + + If no `priority` is passed, defaults to max priority on the line. + """ + if not self.delimiters: + return 0 + + priority = priority or self.max_delimiter_priority() + return sum(1 for p in self.delimiters.values() if p == priority) + + def maybe_increment_for_loop_variable(self, leaf: Leaf) -> bool: + """In a for loop, or comprehension, the variables are often unpacks. + + To avoid splitting on the comma in this situation, increase the depth of + tokens between `for` and `in`. + """ + if leaf.type == token.NAME and leaf.value == "for": + self.depth += 1 + self._for_loop_depths.append(self.depth) + return True + + return False + + def maybe_decrement_after_for_loop_variable(self, leaf: Leaf) -> bool: + """See `maybe_increment_for_loop_variable` above for explanation.""" + if ( + self._for_loop_depths + and self._for_loop_depths[-1] == self.depth + and leaf.type == token.NAME + and leaf.value == "in" + ): + self.depth -= 1 + self._for_loop_depths.pop() + return True + + return False + + def maybe_increment_lambda_arguments(self, leaf: Leaf) -> bool: + """In a lambda expression, there might be more than one argument. + + To avoid splitting on the comma in this situation, increase the depth of + tokens between `lambda` and `:`. + """ + if leaf.type == token.NAME and leaf.value == "lambda": + self.depth += 1 + self._lambda_argument_depths.append(self.depth) + return True + + return False + + def maybe_decrement_after_lambda_arguments(self, leaf: Leaf) -> bool: + """See `maybe_increment_lambda_arguments` above for explanation.""" + if ( + self._lambda_argument_depths + and self._lambda_argument_depths[-1] == self.depth + and leaf.type == token.COLON + ): + self.depth -= 1 + self._lambda_argument_depths.pop() + return True + + return False + + def get_open_lsqb(self) -> Optional[Leaf]: + """Return the most recent opening square bracket (if any).""" + return self.bracket_match.get((self.depth - 1, token.RSQB)) + + +def is_split_after_delimiter(leaf: Leaf) -> Priority: + """Return the priority of the `leaf` delimiter, given a line break after it. + + The delimiter priorities returned here are from those delimiters that would + cause a line break after themselves. + + Higher numbers are higher priority. + """ + if leaf.type == token.COMMA: + return COMMA_PRIORITY + + return 0 + + +def is_split_before_delimiter(leaf: Leaf, previous: Optional[Leaf] = None) -> Priority: + """Return the priority of the `leaf` delimiter, given a line break before it. + + The delimiter priorities returned here are from those delimiters that would + cause a line break before themselves. + + Higher numbers are higher priority. + """ + if is_vararg(leaf, within=VARARGS_PARENTS | UNPACKING_PARENTS): + # * and ** might also be MATH_OPERATORS but in this case they are not. + # Don't treat them as a delimiter. + return 0 + + if ( + leaf.type == token.DOT + and leaf.parent + and leaf.parent.type not in {syms.import_from, syms.dotted_name} + and (previous is None or previous.type in CLOSING_BRACKETS) + ): + return DOT_PRIORITY + + if ( + leaf.type in MATH_OPERATORS + and leaf.parent + and leaf.parent.type not in {syms.factor, syms.star_expr} + ): + return MATH_PRIORITIES[leaf.type] + + if leaf.type in COMPARATORS: + return COMPARATOR_PRIORITY + + if ( + leaf.type == token.STRING + and previous is not None + and previous.type == token.STRING + ): + return STRING_PRIORITY + + if leaf.type not in {token.NAME, token.ASYNC}: + return 0 + + if ( + leaf.value == "for" + and leaf.parent + and leaf.parent.type in {syms.comp_for, syms.old_comp_for} + or leaf.type == token.ASYNC + ): + if ( + not isinstance(leaf.prev_sibling, Leaf) + or leaf.prev_sibling.value != "async" + ): + return COMPREHENSION_PRIORITY + + if ( + leaf.value == "if" + and leaf.parent + and leaf.parent.type in {syms.comp_if, syms.old_comp_if} + ): + return COMPREHENSION_PRIORITY + + if leaf.value in {"if", "else"} and leaf.parent and leaf.parent.type == syms.test: + return TERNARY_PRIORITY + + if leaf.value == "is": + return COMPARATOR_PRIORITY + + if ( + leaf.value == "in" + and leaf.parent + and leaf.parent.type in {syms.comp_op, syms.comparison} + and not ( + previous is not None + and previous.type == token.NAME + and previous.value == "not" + ) + ): + return COMPARATOR_PRIORITY + + if ( + leaf.value == "not" + and leaf.parent + and leaf.parent.type == syms.comp_op + and not ( + previous is not None + and previous.type == token.NAME + and previous.value == "is" + ) + ): + return COMPARATOR_PRIORITY + + if leaf.value in LOGIC_OPERATORS and leaf.parent: + return LOGIC_PRIORITY + + return 0 + + +def max_delimiter_priority_in_atom(node: LN) -> Priority: + """Return maximum delimiter priority inside `node`. + + This is specific to atoms with contents contained in a pair of parentheses. + If `node` isn't an atom or there are no enclosing parentheses, returns 0. + """ + if node.type != syms.atom: + return 0 + + first = node.children[0] + last = node.children[-1] + if not (first.type == token.LPAR and last.type == token.RPAR): + return 0 + + bt = BracketTracker() + for c in node.children[1:-1]: + if isinstance(c, Leaf): + bt.mark(c) + else: + for leaf in c.leaves(): + bt.mark(leaf) + try: + return bt.max_delimiter_priority() + + except ValueError: + return 0 + + +def get_leaves_inside_matching_brackets(leaves: Sequence[Leaf]) -> Set[LeafID]: + """Return leaves that are inside matching brackets. + + The input `leaves` can have non-matching brackets at the head or tail parts. + Matching brackets are included. + """ + try: + # Start with the first opening bracket and ignore closing brackets before. + start_index = next( + i for i, l in enumerate(leaves) if l.type in OPENING_BRACKETS + ) + except StopIteration: + return set() + bracket_stack = [] + ids = set() + for i in range(start_index, len(leaves)): + leaf = leaves[i] + if leaf.type in OPENING_BRACKETS: + bracket_stack.append((BRACKET[leaf.type], i)) + if leaf.type in CLOSING_BRACKETS: + if bracket_stack and leaf.type == bracket_stack[-1][0]: + _, start = bracket_stack.pop() + for j in range(start, i + 1): + ids.add(id(leaves[j])) + else: + break + return ids diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/cache.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/cache.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..0e8309c7b Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/cache.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/cache.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/cache.py new file mode 100644 index 000000000..35bddb573 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/cache.py @@ -0,0 +1,150 @@ +"""Caching of formatted files with feature-based invalidation.""" + +import hashlib +import os +import pickle +import sys +import tempfile +from dataclasses import dataclass, field +from pathlib import Path +from typing import Dict, Iterable, NamedTuple, Set, Tuple + +from platformdirs import user_cache_dir + +from _black_version import version as __version__ +from black.mode import Mode +from black.output import err + +if sys.version_info >= (3, 11): + from typing import Self +else: + from typing_extensions import Self + + +class FileData(NamedTuple): + st_mtime: float + st_size: int + hash: str + + +def get_cache_dir() -> Path: + """Get the cache directory used by black. + + Users can customize this directory on all systems using `BLACK_CACHE_DIR` + environment variable. By default, the cache directory is the user cache directory + under the black application. + + This result is immediately set to a constant `black.cache.CACHE_DIR` as to avoid + repeated calls. + """ + # NOTE: Function mostly exists as a clean way to test getting the cache directory. + default_cache_dir = user_cache_dir("black") + cache_dir = Path(os.environ.get("BLACK_CACHE_DIR", default_cache_dir)) + cache_dir = cache_dir / __version__ + return cache_dir + + +CACHE_DIR = get_cache_dir() + + +def get_cache_file(mode: Mode) -> Path: + return CACHE_DIR / f"cache.{mode.get_cache_key()}.pickle" + + +@dataclass +class Cache: + mode: Mode + cache_file: Path + file_data: Dict[str, FileData] = field(default_factory=dict) + + @classmethod + def read(cls, mode: Mode) -> Self: + """Read the cache if it exists and is well-formed. + + If it is not well-formed, the call to write later should + resolve the issue. + """ + cache_file = get_cache_file(mode) + try: + exists = cache_file.exists() + except OSError as e: + # Likely file too long; see #4172 and #4174 + err(f"Unable to read cache file {cache_file} due to {e}") + return cls(mode, cache_file) + if not exists: + return cls(mode, cache_file) + + with cache_file.open("rb") as fobj: + try: + data: Dict[str, Tuple[float, int, str]] = pickle.load(fobj) + file_data = {k: FileData(*v) for k, v in data.items()} + except (pickle.UnpicklingError, ValueError, IndexError): + return cls(mode, cache_file) + + return cls(mode, cache_file, file_data) + + @staticmethod + def hash_digest(path: Path) -> str: + """Return hash digest for path.""" + + data = path.read_bytes() + return hashlib.sha256(data).hexdigest() + + @staticmethod + def get_file_data(path: Path) -> FileData: + """Return file data for path.""" + + stat = path.stat() + hash = Cache.hash_digest(path) + return FileData(stat.st_mtime, stat.st_size, hash) + + def is_changed(self, source: Path) -> bool: + """Check if source has changed compared to cached version.""" + res_src = source.resolve() + old = self.file_data.get(str(res_src)) + if old is None: + return True + + st = res_src.stat() + if st.st_size != old.st_size: + return True + if st.st_mtime != old.st_mtime: + new_hash = Cache.hash_digest(res_src) + if new_hash != old.hash: + return True + return False + + def filtered_cached(self, sources: Iterable[Path]) -> Tuple[Set[Path], Set[Path]]: + """Split an iterable of paths in `sources` into two sets. + + The first contains paths of files that modified on disk or are not in the + cache. The other contains paths to non-modified files. + """ + changed: Set[Path] = set() + done: Set[Path] = set() + for src in sources: + if self.is_changed(src): + changed.add(src) + else: + done.add(src) + return changed, done + + def write(self, sources: Iterable[Path]) -> None: + """Update the cache file data and write a new cache file.""" + self.file_data.update( + **{str(src.resolve()): Cache.get_file_data(src) for src in sources} + ) + try: + CACHE_DIR.mkdir(parents=True, exist_ok=True) + with tempfile.NamedTemporaryFile( + dir=str(self.cache_file.parent), delete=False + ) as f: + # We store raw tuples in the cache because pickling NamedTuples + # doesn't work with mypyc on Python 3.8, and because it's faster. + data: Dict[str, Tuple[float, int, str]] = { + k: (*v,) for k, v in self.file_data.items() + } + pickle.dump(data, f, protocol=4) + os.replace(f.name, self.cache_file) + except OSError: + pass diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/comments.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/comments.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..a4c029cd1 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/comments.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/comments.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/comments.py new file mode 100644 index 000000000..ea54e2468 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/comments.py @@ -0,0 +1,415 @@ +import re +from dataclasses import dataclass +from functools import lru_cache +from typing import Collection, Final, Iterator, List, Optional, Tuple, Union + +from black.mode import Mode, Preview +from black.nodes import ( + CLOSING_BRACKETS, + STANDALONE_COMMENT, + WHITESPACE, + container_of, + first_leaf_of, + make_simple_prefix, + preceding_leaf, + syms, +) +from blib2to3.pgen2 import token +from blib2to3.pytree import Leaf, Node + +# types +LN = Union[Leaf, Node] + +FMT_OFF: Final = {"# fmt: off", "# fmt:off", "# yapf: disable"} +FMT_SKIP: Final = {"# fmt: skip", "# fmt:skip"} +FMT_ON: Final = {"# fmt: on", "# fmt:on", "# yapf: enable"} + +COMMENT_EXCEPTIONS = " !:#'" +_COMMENT_PREFIX = "# " +_COMMENT_LIST_SEPARATOR = ";" + + +@dataclass +class ProtoComment: + """Describes a piece of syntax that is a comment. + + It's not a :class:`blib2to3.pytree.Leaf` so that: + + * it can be cached (`Leaf` objects should not be reused more than once as + they store their lineno, column, prefix, and parent information); + * `newlines` and `consumed` fields are kept separate from the `value`. This + simplifies handling of special marker comments like ``# fmt: off/on``. + """ + + type: int # token.COMMENT or STANDALONE_COMMENT + value: str # content of the comment + newlines: int # how many newlines before the comment + consumed: int # how many characters of the original leaf's prefix did we consume + form_feed: bool # is there a form feed before the comment + leading_whitespace: str # leading whitespace before the comment, if any + + +def generate_comments(leaf: LN) -> Iterator[Leaf]: + """Clean the prefix of the `leaf` and generate comments from it, if any. + + Comments in lib2to3 are shoved into the whitespace prefix. This happens + in `pgen2/driver.py:Driver.parse_tokens()`. This was a brilliant implementation + move because it does away with modifying the grammar to include all the + possible places in which comments can be placed. + + The sad consequence for us though is that comments don't "belong" anywhere. + This is why this function generates simple parentless Leaf objects for + comments. We simply don't know what the correct parent should be. + + No matter though, we can live without this. We really only need to + differentiate between inline and standalone comments. The latter don't + share the line with any code. + + Inline comments are emitted as regular token.COMMENT leaves. Standalone + are emitted with a fake STANDALONE_COMMENT token identifier. + """ + total_consumed = 0 + for pc in list_comments(leaf.prefix, is_endmarker=leaf.type == token.ENDMARKER): + total_consumed = pc.consumed + prefix = make_simple_prefix(pc.newlines, pc.form_feed) + yield Leaf(pc.type, pc.value, prefix=prefix) + normalize_trailing_prefix(leaf, total_consumed) + + +@lru_cache(maxsize=4096) +def list_comments(prefix: str, *, is_endmarker: bool) -> List[ProtoComment]: + """Return a list of :class:`ProtoComment` objects parsed from the given `prefix`.""" + result: List[ProtoComment] = [] + if not prefix or "#" not in prefix: + return result + + consumed = 0 + nlines = 0 + ignored_lines = 0 + form_feed = False + for index, full_line in enumerate(re.split("\r?\n", prefix)): + consumed += len(full_line) + 1 # adding the length of the split '\n' + match = re.match(r"^(\s*)(\S.*|)$", full_line) + assert match + whitespace, line = match.groups() + if not line: + nlines += 1 + if "\f" in full_line: + form_feed = True + if not line.startswith("#"): + # Escaped newlines outside of a comment are not really newlines at + # all. We treat a single-line comment following an escaped newline + # as a simple trailing comment. + if line.endswith("\\"): + ignored_lines += 1 + continue + + if index == ignored_lines and not is_endmarker: + comment_type = token.COMMENT # simple trailing comment + else: + comment_type = STANDALONE_COMMENT + comment = make_comment(line) + result.append( + ProtoComment( + type=comment_type, + value=comment, + newlines=nlines, + consumed=consumed, + form_feed=form_feed, + leading_whitespace=whitespace, + ) + ) + form_feed = False + nlines = 0 + return result + + +def normalize_trailing_prefix(leaf: LN, total_consumed: int) -> None: + """Normalize the prefix that's left over after generating comments. + + Note: don't use backslashes for formatting or you'll lose your voting rights. + """ + remainder = leaf.prefix[total_consumed:] + if "\\" not in remainder: + nl_count = remainder.count("\n") + form_feed = "\f" in remainder and remainder.endswith("\n") + leaf.prefix = make_simple_prefix(nl_count, form_feed) + return + + leaf.prefix = "" + + +def make_comment(content: str) -> str: + """Return a consistently formatted comment from the given `content` string. + + All comments (except for "##", "#!", "#:", '#'") should have a single + space between the hash sign and the content. + + If `content` didn't start with a hash sign, one is provided. + """ + content = content.rstrip() + if not content: + return "#" + + if content[0] == "#": + content = content[1:] + NON_BREAKING_SPACE = " " + if ( + content + and content[0] == NON_BREAKING_SPACE + and not content.lstrip().startswith("type:") + ): + content = " " + content[1:] # Replace NBSP by a simple space + if content and content[0] not in COMMENT_EXCEPTIONS: + content = " " + content + return "#" + content + + +def normalize_fmt_off( + node: Node, mode: Mode, lines: Collection[Tuple[int, int]] +) -> None: + """Convert content between `# fmt: off`/`# fmt: on` into standalone comments.""" + try_again = True + while try_again: + try_again = convert_one_fmt_off_pair(node, mode, lines) + + +def convert_one_fmt_off_pair( + node: Node, mode: Mode, lines: Collection[Tuple[int, int]] +) -> bool: + """Convert content of a single `# fmt: off`/`# fmt: on` into a standalone comment. + + Returns True if a pair was converted. + """ + for leaf in node.leaves(): + previous_consumed = 0 + for comment in list_comments(leaf.prefix, is_endmarker=False): + should_pass_fmt = comment.value in FMT_OFF or _contains_fmt_skip_comment( + comment.value, mode + ) + if not should_pass_fmt: + previous_consumed = comment.consumed + continue + # We only want standalone comments. If there's no previous leaf or + # the previous leaf is indentation, it's a standalone comment in + # disguise. + if should_pass_fmt and comment.type != STANDALONE_COMMENT: + prev = preceding_leaf(leaf) + if prev: + if comment.value in FMT_OFF and prev.type not in WHITESPACE: + continue + if ( + _contains_fmt_skip_comment(comment.value, mode) + and prev.type in WHITESPACE + ): + continue + + ignored_nodes = list(generate_ignored_nodes(leaf, comment, mode)) + if not ignored_nodes: + continue + + first = ignored_nodes[0] # Can be a container node with the `leaf`. + parent = first.parent + prefix = first.prefix + if comment.value in FMT_OFF: + first.prefix = prefix[comment.consumed :] + if _contains_fmt_skip_comment(comment.value, mode): + first.prefix = "" + standalone_comment_prefix = prefix + else: + standalone_comment_prefix = ( + prefix[:previous_consumed] + "\n" * comment.newlines + ) + hidden_value = "".join(str(n) for n in ignored_nodes) + comment_lineno = leaf.lineno - comment.newlines + if comment.value in FMT_OFF: + fmt_off_prefix = "" + if len(lines) > 0 and not any( + line[0] <= comment_lineno <= line[1] for line in lines + ): + # keeping indentation of comment by preserving original whitespaces. + fmt_off_prefix = prefix.split(comment.value)[0] + if "\n" in fmt_off_prefix: + fmt_off_prefix = fmt_off_prefix.split("\n")[-1] + standalone_comment_prefix += fmt_off_prefix + hidden_value = comment.value + "\n" + hidden_value + if _contains_fmt_skip_comment(comment.value, mode): + hidden_value += ( + comment.leading_whitespace + if Preview.no_normalize_fmt_skip_whitespace in mode + else " " + ) + comment.value + if hidden_value.endswith("\n"): + # That happens when one of the `ignored_nodes` ended with a NEWLINE + # leaf (possibly followed by a DEDENT). + hidden_value = hidden_value[:-1] + first_idx: Optional[int] = None + for ignored in ignored_nodes: + index = ignored.remove() + if first_idx is None: + first_idx = index + assert parent is not None, "INTERNAL ERROR: fmt: on/off handling (1)" + assert first_idx is not None, "INTERNAL ERROR: fmt: on/off handling (2)" + parent.insert_child( + first_idx, + Leaf( + STANDALONE_COMMENT, + hidden_value, + prefix=standalone_comment_prefix, + fmt_pass_converted_first_leaf=first_leaf_of(first), + ), + ) + return True + + return False + + +def generate_ignored_nodes( + leaf: Leaf, comment: ProtoComment, mode: Mode +) -> Iterator[LN]: + """Starting from the container of `leaf`, generate all leaves until `# fmt: on`. + + If comment is skip, returns leaf only. + Stops at the end of the block. + """ + if _contains_fmt_skip_comment(comment.value, mode): + yield from _generate_ignored_nodes_from_fmt_skip(leaf, comment) + return + container: Optional[LN] = container_of(leaf) + while container is not None and container.type != token.ENDMARKER: + if is_fmt_on(container): + return + + # fix for fmt: on in children + if children_contains_fmt_on(container): + for index, child in enumerate(container.children): + if isinstance(child, Leaf) and is_fmt_on(child): + if child.type in CLOSING_BRACKETS: + # This means `# fmt: on` is placed at a different bracket level + # than `# fmt: off`. This is an invalid use, but as a courtesy, + # we include this closing bracket in the ignored nodes. + # The alternative is to fail the formatting. + yield child + return + if ( + child.type == token.INDENT + and index < len(container.children) - 1 + and children_contains_fmt_on(container.children[index + 1]) + ): + # This means `# fmt: on` is placed right after an indentation + # level, and we shouldn't swallow the previous INDENT token. + return + if children_contains_fmt_on(child): + return + yield child + else: + if container.type == token.DEDENT and container.next_sibling is None: + # This can happen when there is no matching `# fmt: on` comment at the + # same level as `# fmt: on`. We need to keep this DEDENT. + return + yield container + container = container.next_sibling + + +def _generate_ignored_nodes_from_fmt_skip( + leaf: Leaf, comment: ProtoComment +) -> Iterator[LN]: + """Generate all leaves that should be ignored by the `# fmt: skip` from `leaf`.""" + prev_sibling = leaf.prev_sibling + parent = leaf.parent + # Need to properly format the leaf prefix to compare it to comment.value, + # which is also formatted + comments = list_comments(leaf.prefix, is_endmarker=False) + if not comments or comment.value != comments[0].value: + return + if prev_sibling is not None: + leaf.prefix = "" + siblings = [prev_sibling] + while "\n" not in prev_sibling.prefix and prev_sibling.prev_sibling is not None: + prev_sibling = prev_sibling.prev_sibling + siblings.insert(0, prev_sibling) + yield from siblings + elif ( + parent is not None and parent.type == syms.suite and leaf.type == token.NEWLINE + ): + # The `# fmt: skip` is on the colon line of the if/while/def/class/... + # statements. The ignored nodes should be previous siblings of the + # parent suite node. + leaf.prefix = "" + ignored_nodes: List[LN] = [] + parent_sibling = parent.prev_sibling + while parent_sibling is not None and parent_sibling.type != syms.suite: + ignored_nodes.insert(0, parent_sibling) + parent_sibling = parent_sibling.prev_sibling + # Special case for `async_stmt` where the ASYNC token is on the + # grandparent node. + grandparent = parent.parent + if ( + grandparent is not None + and grandparent.prev_sibling is not None + and grandparent.prev_sibling.type == token.ASYNC + ): + ignored_nodes.insert(0, grandparent.prev_sibling) + yield from iter(ignored_nodes) + + +def is_fmt_on(container: LN) -> bool: + """Determine whether formatting is switched on within a container. + Determined by whether the last `# fmt:` comment is `on` or `off`. + """ + fmt_on = False + for comment in list_comments(container.prefix, is_endmarker=False): + if comment.value in FMT_ON: + fmt_on = True + elif comment.value in FMT_OFF: + fmt_on = False + return fmt_on + + +def children_contains_fmt_on(container: LN) -> bool: + """Determine if children have formatting switched on.""" + for child in container.children: + leaf = first_leaf_of(child) + if leaf is not None and is_fmt_on(leaf): + return True + + return False + + +def contains_pragma_comment(comment_list: List[Leaf]) -> bool: + """ + Returns: + True iff one of the comments in @comment_list is a pragma used by one + of the more common static analysis tools for python (e.g. mypy, flake8, + pylint). + """ + for comment in comment_list: + if comment.value.startswith(("# type:", "# noqa", "# pylint:")): + return True + + return False + + +def _contains_fmt_skip_comment(comment_line: str, mode: Mode) -> bool: + """ + Checks if the given comment contains FMT_SKIP alone or paired with other comments. + Matching styles: + # fmt:skip <-- single comment + # noqa:XXX # fmt:skip # a nice line <-- multiple comments (Preview) + # pylint:XXX; fmt:skip <-- list of comments (; separated, Preview) + """ + semantic_comment_blocks = [ + comment_line, + *[ + _COMMENT_PREFIX + comment.strip() + for comment in comment_line.split(_COMMENT_PREFIX)[1:] + ], + *[ + _COMMENT_PREFIX + comment.strip() + for comment in comment_line.strip(_COMMENT_PREFIX).split( + _COMMENT_LIST_SEPARATOR + ) + ], + ] + + return any(comment in FMT_SKIP for comment in semantic_comment_blocks) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/concurrency.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/concurrency.py new file mode 100644 index 000000000..ff0a8f5fd --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/concurrency.py @@ -0,0 +1,190 @@ +""" +Formatting many files at once via multiprocessing. Contains entrypoint and utilities. + +NOTE: this module is only imported if we need to format several files at once. +""" + +import asyncio +import logging +import os +import signal +import sys +import traceback +from concurrent.futures import Executor, ProcessPoolExecutor, ThreadPoolExecutor +from multiprocessing import Manager +from pathlib import Path +from typing import Any, Iterable, Optional, Set + +from mypy_extensions import mypyc_attr + +from black import WriteBack, format_file_in_place +from black.cache import Cache +from black.mode import Mode +from black.output import err +from black.report import Changed, Report + + +def maybe_install_uvloop() -> None: + """If our environment has uvloop installed we use it. + + This is called only from command-line entry points to avoid + interfering with the parent process if Black is used as a library. + """ + try: + import uvloop + + uvloop.install() + except ImportError: + pass + + +def cancel(tasks: Iterable["asyncio.Future[Any]"]) -> None: + """asyncio signal handler that cancels all `tasks` and reports to stderr.""" + err("Aborted!") + for task in tasks: + task.cancel() + + +def shutdown(loop: asyncio.AbstractEventLoop) -> None: + """Cancel all pending tasks on `loop`, wait for them, and close the loop.""" + try: + # This part is borrowed from asyncio/runners.py in Python 3.7b2. + to_cancel = [task for task in asyncio.all_tasks(loop) if not task.done()] + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True)) + finally: + # `concurrent.futures.Future` objects cannot be cancelled once they + # are already running. There might be some when the `shutdown()` happened. + # Silence their logger's spew about the event loop being closed. + cf_logger = logging.getLogger("concurrent.futures") + cf_logger.setLevel(logging.CRITICAL) + loop.close() + + +# diff-shades depends on being to monkeypatch this function to operate. I know it's +# not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26 +@mypyc_attr(patchable=True) +def reformat_many( + sources: Set[Path], + fast: bool, + write_back: WriteBack, + mode: Mode, + report: Report, + workers: Optional[int], +) -> None: + """Reformat multiple files using a ProcessPoolExecutor.""" + maybe_install_uvloop() + + executor: Executor + if workers is None: + workers = int(os.environ.get("BLACK_NUM_WORKERS", 0)) + workers = workers or os.cpu_count() or 1 + if sys.platform == "win32": + # Work around https://bugs.python.org/issue26903 + workers = min(workers, 60) + try: + executor = ProcessPoolExecutor(max_workers=workers) + except (ImportError, NotImplementedError, OSError): + # we arrive here if the underlying system does not support multi-processing + # like in AWS Lambda or Termux, in which case we gracefully fallback to + # a ThreadPoolExecutor with just a single worker (more workers would not do us + # any good due to the Global Interpreter Lock) + executor = ThreadPoolExecutor(max_workers=1) + + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + try: + loop.run_until_complete( + schedule_formatting( + sources=sources, + fast=fast, + write_back=write_back, + mode=mode, + report=report, + loop=loop, + executor=executor, + ) + ) + finally: + try: + shutdown(loop) + finally: + asyncio.set_event_loop(None) + if executor is not None: + executor.shutdown() + + +async def schedule_formatting( + sources: Set[Path], + fast: bool, + write_back: WriteBack, + mode: Mode, + report: "Report", + loop: asyncio.AbstractEventLoop, + executor: "Executor", +) -> None: + """Run formatting of `sources` in parallel using the provided `executor`. + + (Use ProcessPoolExecutors for actual parallelism.) + + `write_back`, `fast`, and `mode` options are passed to + :func:`format_file_in_place`. + """ + cache = Cache.read(mode) + if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF): + sources, cached = cache.filtered_cached(sources) + for src in sorted(cached): + report.done(src, Changed.CACHED) + if not sources: + return + + cancelled = [] + sources_to_cache = [] + lock = None + if write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF): + # For diff output, we need locks to ensure we don't interleave output + # from different processes. + manager = Manager() + lock = manager.Lock() + tasks = { + asyncio.ensure_future( + loop.run_in_executor( + executor, format_file_in_place, src, fast, mode, write_back, lock + ) + ): src + for src in sorted(sources) + } + pending = tasks.keys() + try: + loop.add_signal_handler(signal.SIGINT, cancel, pending) + loop.add_signal_handler(signal.SIGTERM, cancel, pending) + except NotImplementedError: + # There are no good alternatives for these on Windows. + pass + while pending: + done, _ = await asyncio.wait(pending, return_when=asyncio.FIRST_COMPLETED) + for task in done: + src = tasks.pop(task) + if task.cancelled(): + cancelled.append(task) + elif exc := task.exception(): + if report.verbose: + traceback.print_exception(type(exc), exc, exc.__traceback__) + report.failed(src, str(exc)) + else: + changed = Changed.YES if task.result() else Changed.NO + # If the file was written back or was successfully checked as + # well-formatted, store this information in the cache. + if write_back is WriteBack.YES or ( + write_back is WriteBack.CHECK and changed is Changed.NO + ): + sources_to_cache.append(src) + report.done(src, changed) + if cancelled: + await asyncio.gather(*cancelled, return_exceptions=True) + if sources_to_cache: + cache.write(sources_to_cache) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/const.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/const.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..d09c392bf Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/const.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/const.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/const.py new file mode 100644 index 000000000..ee466679c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/const.py @@ -0,0 +1,4 @@ +DEFAULT_LINE_LENGTH = 88 +DEFAULT_EXCLUDES = r"/(\.direnv|\.eggs|\.git|\.hg|\.ipynb_checkpoints|\.mypy_cache|\.nox|\.pytest_cache|\.ruff_cache|\.tox|\.svn|\.venv|\.vscode|__pypackages__|_build|buck-out|build|dist|venv)/" # noqa: B950 +DEFAULT_INCLUDES = r"(\.pyi?|\.ipynb)$" +STDIN_PLACEHOLDER = "__BLACK_STDIN_FILENAME__" diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/debug.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/debug.py new file mode 100644 index 000000000..cebc48765 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/debug.py @@ -0,0 +1,54 @@ +from dataclasses import dataclass, field +from typing import Any, Iterator, List, TypeVar, Union + +from black.nodes import Visitor +from black.output import out +from black.parsing import lib2to3_parse +from blib2to3.pgen2 import token +from blib2to3.pytree import Leaf, Node, type_repr + +LN = Union[Leaf, Node] +T = TypeVar("T") + + +@dataclass +class DebugVisitor(Visitor[T]): + tree_depth: int = 0 + list_output: List[str] = field(default_factory=list) + print_output: bool = True + + def out(self, message: str, *args: Any, **kwargs: Any) -> None: + self.list_output.append(message) + if self.print_output: + out(message, *args, **kwargs) + + def visit_default(self, node: LN) -> Iterator[T]: + indent = " " * (2 * self.tree_depth) + if isinstance(node, Node): + _type = type_repr(node.type) + self.out(f"{indent}{_type}", fg="yellow") + self.tree_depth += 1 + for child in node.children: + yield from self.visit(child) + + self.tree_depth -= 1 + self.out(f"{indent}/{_type}", fg="yellow", bold=False) + else: + _type = token.tok_name.get(node.type, str(node.type)) + self.out(f"{indent}{_type}", fg="blue", nl=False) + if node.prefix: + # We don't have to handle prefixes for `Node` objects since + # that delegates to the first child anyway. + self.out(f" {node.prefix!r}", fg="green", bold=False, nl=False) + self.out(f" {node.value!r}", fg="blue", bold=False) + + @classmethod + def show(cls, code: Union[str, Leaf, Node]) -> None: + """Pretty-print the lib2to3 AST of a given string of `code`. + + Convenience method for debugging. + """ + v: DebugVisitor[None] = DebugVisitor() + if isinstance(code, str): + code = lib2to3_parse(code) + list(v.visit(code)) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/files.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/files.py new file mode 100644 index 000000000..c0cadbfd8 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/files.py @@ -0,0 +1,433 @@ +import io +import os +import sys +from functools import lru_cache +from pathlib import Path +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Iterable, + Iterator, + List, + Optional, + Pattern, + Sequence, + Tuple, + Union, +) + +from mypy_extensions import mypyc_attr +from packaging.specifiers import InvalidSpecifier, Specifier, SpecifierSet +from packaging.version import InvalidVersion, Version +from pathspec import PathSpec +from pathspec.patterns.gitwildmatch import GitWildMatchPatternError + +if sys.version_info >= (3, 11): + try: + import tomllib + except ImportError: + # Help users on older alphas + if not TYPE_CHECKING: + import tomli as tomllib +else: + import tomli as tomllib + +from black.handle_ipynb_magics import jupyter_dependencies_are_installed +from black.mode import TargetVersion +from black.output import err +from black.report import Report + +if TYPE_CHECKING: + import colorama # noqa: F401 + + +@lru_cache +def _load_toml(path: Union[Path, str]) -> Dict[str, Any]: + with open(path, "rb") as f: + return tomllib.load(f) + + +@lru_cache +def _cached_resolve(path: Path) -> Path: + return path.resolve() + + +@lru_cache +def find_project_root( + srcs: Sequence[str], stdin_filename: Optional[str] = None +) -> Tuple[Path, str]: + """Return a directory containing .git, .hg, or pyproject.toml. + + That directory will be a common parent of all files and directories + passed in `srcs`. + + If no directory in the tree contains a marker that would specify it's the + project root, the root of the file system is returned. + + Returns a two-tuple with the first element as the project root path and + the second element as a string describing the method by which the + project root was discovered. + """ + if stdin_filename is not None: + srcs = tuple(stdin_filename if s == "-" else s for s in srcs) + if not srcs: + srcs = [str(_cached_resolve(Path.cwd()))] + + path_srcs = [_cached_resolve(Path(Path.cwd(), src)) for src in srcs] + + # A list of lists of parents for each 'src'. 'src' is included as a + # "parent" of itself if it is a directory + src_parents = [ + list(path.parents) + ([path] if path.is_dir() else []) for path in path_srcs + ] + + common_base = max( + set.intersection(*(set(parents) for parents in src_parents)), + key=lambda path: path.parts, + ) + + for directory in (common_base, *common_base.parents): + if (directory / ".git").exists(): + return directory, ".git directory" + + if (directory / ".hg").is_dir(): + return directory, ".hg directory" + + if (directory / "pyproject.toml").is_file(): + pyproject_toml = _load_toml(directory / "pyproject.toml") + if "black" in pyproject_toml.get("tool", {}): + return directory, "pyproject.toml" + + return directory, "file system root" + + +def find_pyproject_toml( + path_search_start: Tuple[str, ...], stdin_filename: Optional[str] = None +) -> Optional[str]: + """Find the absolute filepath to a pyproject.toml if it exists""" + path_project_root, _ = find_project_root(path_search_start, stdin_filename) + path_pyproject_toml = path_project_root / "pyproject.toml" + if path_pyproject_toml.is_file(): + return str(path_pyproject_toml) + + try: + path_user_pyproject_toml = find_user_pyproject_toml() + return ( + str(path_user_pyproject_toml) + if path_user_pyproject_toml.is_file() + else None + ) + except (PermissionError, RuntimeError) as e: + # We do not have access to the user-level config directory, so ignore it. + err(f"Ignoring user configuration directory due to {e!r}") + return None + + +@mypyc_attr(patchable=True) +def parse_pyproject_toml(path_config: str) -> Dict[str, Any]: + """Parse a pyproject toml file, pulling out relevant parts for Black. + + If parsing fails, will raise a tomllib.TOMLDecodeError. + """ + pyproject_toml = _load_toml(path_config) + config: Dict[str, Any] = pyproject_toml.get("tool", {}).get("black", {}) + config = {k.replace("--", "").replace("-", "_"): v for k, v in config.items()} + + if "target_version" not in config: + inferred_target_version = infer_target_version(pyproject_toml) + if inferred_target_version is not None: + config["target_version"] = [v.name.lower() for v in inferred_target_version] + + return config + + +def infer_target_version( + pyproject_toml: Dict[str, Any], +) -> Optional[List[TargetVersion]]: + """Infer Black's target version from the project metadata in pyproject.toml. + + Supports the PyPA standard format (PEP 621): + https://packaging.python.org/en/latest/specifications/declaring-project-metadata/#requires-python + + If the target version cannot be inferred, returns None. + """ + project_metadata = pyproject_toml.get("project", {}) + requires_python = project_metadata.get("requires-python", None) + if requires_python is not None: + try: + return parse_req_python_version(requires_python) + except InvalidVersion: + pass + try: + return parse_req_python_specifier(requires_python) + except (InvalidSpecifier, InvalidVersion): + pass + + return None + + +def parse_req_python_version(requires_python: str) -> Optional[List[TargetVersion]]: + """Parse a version string (i.e. ``"3.7"``) to a list of TargetVersion. + + If parsing fails, will raise a packaging.version.InvalidVersion error. + If the parsed version cannot be mapped to a valid TargetVersion, returns None. + """ + version = Version(requires_python) + if version.release[0] != 3: + return None + try: + return [TargetVersion(version.release[1])] + except (IndexError, ValueError): + return None + + +def parse_req_python_specifier(requires_python: str) -> Optional[List[TargetVersion]]: + """Parse a specifier string (i.e. ``">=3.7,<3.10"``) to a list of TargetVersion. + + If parsing fails, will raise a packaging.specifiers.InvalidSpecifier error. + If the parsed specifier cannot be mapped to a valid TargetVersion, returns None. + """ + specifier_set = strip_specifier_set(SpecifierSet(requires_python)) + if not specifier_set: + return None + + target_version_map = {f"3.{v.value}": v for v in TargetVersion} + compatible_versions: List[str] = list(specifier_set.filter(target_version_map)) + if compatible_versions: + return [target_version_map[v] for v in compatible_versions] + return None + + +def strip_specifier_set(specifier_set: SpecifierSet) -> SpecifierSet: + """Strip minor versions for some specifiers in the specifier set. + + For background on version specifiers, see PEP 440: + https://peps.python.org/pep-0440/#version-specifiers + """ + specifiers = [] + for s in specifier_set: + if "*" in str(s): + specifiers.append(s) + elif s.operator in ["~=", "==", ">=", "==="]: + version = Version(s.version) + stripped = Specifier(f"{s.operator}{version.major}.{version.minor}") + specifiers.append(stripped) + elif s.operator == ">": + version = Version(s.version) + if len(version.release) > 2: + s = Specifier(f">={version.major}.{version.minor}") + specifiers.append(s) + else: + specifiers.append(s) + + return SpecifierSet(",".join(str(s) for s in specifiers)) + + +@lru_cache +def find_user_pyproject_toml() -> Path: + r"""Return the path to the top-level user configuration for black. + + This looks for ~\.black on Windows and ~/.config/black on Linux and other + Unix systems. + + May raise: + - RuntimeError: if the current user has no homedir + - PermissionError: if the current process cannot access the user's homedir + """ + if sys.platform == "win32": + # Windows + user_config_path = Path.home() / ".black" + else: + config_root = os.environ.get("XDG_CONFIG_HOME", "~/.config") + user_config_path = Path(config_root).expanduser() / "black" + return _cached_resolve(user_config_path) + + +@lru_cache +def get_gitignore(root: Path) -> PathSpec: + """Return a PathSpec matching gitignore content if present.""" + gitignore = root / ".gitignore" + lines: List[str] = [] + if gitignore.is_file(): + with gitignore.open(encoding="utf-8") as gf: + lines = gf.readlines() + try: + return PathSpec.from_lines("gitwildmatch", lines) + except GitWildMatchPatternError as e: + err(f"Could not parse {gitignore}: {e}") + raise + + +def resolves_outside_root_or_cannot_stat( + path: Path, + root: Path, + report: Optional[Report] = None, +) -> bool: + """ + Returns whether the path is a symbolic link that points outside the + root directory. Also returns True if we failed to resolve the path. + """ + try: + if sys.version_info < (3, 8, 6): + path = path.absolute() # https://bugs.python.org/issue33660 + resolved_path = _cached_resolve(path) + except OSError as e: + if report: + report.path_ignored(path, f"cannot be read because {e}") + return True + try: + resolved_path.relative_to(root) + except ValueError: + if report: + report.path_ignored(path, f"is a symbolic link that points outside {root}") + return True + return False + + +def best_effort_relative_path(path: Path, root: Path) -> Path: + # Precondition: resolves_outside_root_or_cannot_stat(path, root) is False + try: + return path.absolute().relative_to(root) + except ValueError: + pass + root_parent = next((p for p in path.parents if _cached_resolve(p) == root), None) + if root_parent is not None: + return path.relative_to(root_parent) + # something adversarial, fallback to path guaranteed by precondition + return _cached_resolve(path).relative_to(root) + + +def _path_is_ignored( + root_relative_path: str, + root: Path, + gitignore_dict: Dict[Path, PathSpec], +) -> bool: + path = root / root_relative_path + # Note that this logic is sensitive to the ordering of gitignore_dict. Callers must + # ensure that gitignore_dict is ordered from least specific to most specific. + for gitignore_path, pattern in gitignore_dict.items(): + try: + relative_path = path.relative_to(gitignore_path).as_posix() + except ValueError: + break + if pattern.match_file(relative_path): + return True + return False + + +def path_is_excluded( + normalized_path: str, + pattern: Optional[Pattern[str]], +) -> bool: + match = pattern.search(normalized_path) if pattern else None + return bool(match and match.group(0)) + + +def gen_python_files( + paths: Iterable[Path], + root: Path, + include: Pattern[str], + exclude: Pattern[str], + extend_exclude: Optional[Pattern[str]], + force_exclude: Optional[Pattern[str]], + report: Report, + gitignore_dict: Optional[Dict[Path, PathSpec]], + *, + verbose: bool, + quiet: bool, +) -> Iterator[Path]: + """Generate all files under `path` whose paths are not excluded by the + `exclude_regex`, `extend_exclude`, or `force_exclude` regexes, + but are included by the `include` regex. + + Symbolic links pointing outside of the `root` directory are ignored. + + `report` is where output about exclusions goes. + """ + + assert root.is_absolute(), f"INTERNAL ERROR: `root` must be absolute but is {root}" + for child in paths: + assert child.is_absolute() + root_relative_path = child.relative_to(root).as_posix() + + # First ignore files matching .gitignore, if passed + if gitignore_dict and _path_is_ignored( + root_relative_path, root, gitignore_dict + ): + report.path_ignored(child, "matches a .gitignore file content") + continue + + # Then ignore with `--exclude` `--extend-exclude` and `--force-exclude` options. + root_relative_path = "/" + root_relative_path + if child.is_dir(): + root_relative_path += "/" + + if path_is_excluded(root_relative_path, exclude): + report.path_ignored(child, "matches the --exclude regular expression") + continue + + if path_is_excluded(root_relative_path, extend_exclude): + report.path_ignored( + child, "matches the --extend-exclude regular expression" + ) + continue + + if path_is_excluded(root_relative_path, force_exclude): + report.path_ignored(child, "matches the --force-exclude regular expression") + continue + + if resolves_outside_root_or_cannot_stat(child, root, report): + continue + + if child.is_dir(): + # If gitignore is None, gitignore usage is disabled, while a Falsey + # gitignore is when the directory doesn't have a .gitignore file. + if gitignore_dict is not None: + new_gitignore_dict = { + **gitignore_dict, + root / child: get_gitignore(child), + } + else: + new_gitignore_dict = None + yield from gen_python_files( + child.iterdir(), + root, + include, + exclude, + extend_exclude, + force_exclude, + report, + new_gitignore_dict, + verbose=verbose, + quiet=quiet, + ) + + elif child.is_file(): + if child.suffix == ".ipynb" and not jupyter_dependencies_are_installed( + warn=verbose or not quiet + ): + continue + include_match = include.search(root_relative_path) if include else True + if include_match: + yield child + + +def wrap_stream_for_windows( + f: io.TextIOWrapper, +) -> Union[io.TextIOWrapper, "colorama.AnsiToWin32"]: + """ + Wrap stream with colorama's wrap_stream so colors are shown on Windows. + + If `colorama` is unavailable, the original stream is returned unmodified. + Otherwise, the `wrap_stream()` function determines whether the stream needs + to be wrapped for a Windows environment and will accordingly either return + an `AnsiToWin32` wrapper or the original stream. + """ + try: + from colorama.initialise import wrap_stream + except ImportError: + return f + else: + # Set `strip=False` to avoid needing to modify test_express_diff_with_color. + return wrap_stream(f, convert=None, strip=False, autoreset=False, wrap=True) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/handle_ipynb_magics.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/handle_ipynb_magics.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..9cb97db80 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/handle_ipynb_magics.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/handle_ipynb_magics.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/handle_ipynb_magics.py new file mode 100644 index 000000000..5b2847cb0 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/handle_ipynb_magics.py @@ -0,0 +1,452 @@ +"""Functions to process IPython magics with.""" + +import ast +import collections +import dataclasses +import secrets +import sys +from functools import lru_cache +from importlib.util import find_spec +from typing import Dict, List, Optional, Tuple + +if sys.version_info >= (3, 10): + from typing import TypeGuard +else: + from typing_extensions import TypeGuard + +from black.output import out +from black.report import NothingChanged + +TRANSFORMED_MAGICS = frozenset(( + "get_ipython().run_cell_magic", + "get_ipython().system", + "get_ipython().getoutput", + "get_ipython().run_line_magic", +)) +TOKENS_TO_IGNORE = frozenset(( + "ENDMARKER", + "NL", + "NEWLINE", + "COMMENT", + "DEDENT", + "UNIMPORTANT_WS", + "ESCAPED_NL", +)) +PYTHON_CELL_MAGICS = frozenset(( + "capture", + "prun", + "pypy", + "python", + "python3", + "time", + "timeit", +)) +TOKEN_HEX = secrets.token_hex + + +@dataclasses.dataclass(frozen=True) +class Replacement: + mask: str + src: str + + +@lru_cache +def jupyter_dependencies_are_installed(*, warn: bool) -> bool: + installed = ( + find_spec("tokenize_rt") is not None and find_spec("IPython") is not None + ) + if not installed and warn: + msg = ( + "Skipping .ipynb files as Jupyter dependencies are not installed.\n" + 'You can fix this by running ``pip install "black[jupyter]"``' + ) + out(msg) + return installed + + +def remove_trailing_semicolon(src: str) -> Tuple[str, bool]: + """Remove trailing semicolon from Jupyter notebook cell. + + For example, + + fig, ax = plt.subplots() + ax.plot(x_data, y_data); # plot data + + would become + + fig, ax = plt.subplots() + ax.plot(x_data, y_data) # plot data + + Mirrors the logic in `quiet` from `IPython.core.displayhook`, but uses + ``tokenize_rt`` so that round-tripping works fine. + """ + from tokenize_rt import reversed_enumerate, src_to_tokens, tokens_to_src + + tokens = src_to_tokens(src) + trailing_semicolon = False + for idx, token in reversed_enumerate(tokens): + if token.name in TOKENS_TO_IGNORE: + continue + if token.name == "OP" and token.src == ";": + del tokens[idx] + trailing_semicolon = True + break + if not trailing_semicolon: + return src, False + return tokens_to_src(tokens), True + + +def put_trailing_semicolon_back(src: str, has_trailing_semicolon: bool) -> str: + """Put trailing semicolon back if cell originally had it. + + Mirrors the logic in `quiet` from `IPython.core.displayhook`, but uses + ``tokenize_rt`` so that round-tripping works fine. + """ + if not has_trailing_semicolon: + return src + from tokenize_rt import reversed_enumerate, src_to_tokens, tokens_to_src + + tokens = src_to_tokens(src) + for idx, token in reversed_enumerate(tokens): + if token.name in TOKENS_TO_IGNORE: + continue + tokens[idx] = token._replace(src=token.src + ";") + break + else: # pragma: nocover + raise AssertionError( + "INTERNAL ERROR: Was not able to reinstate trailing semicolon. " + "Please report a bug on https://github.com/psf/black/issues. " + ) from None + return str(tokens_to_src(tokens)) + + +def mask_cell(src: str) -> Tuple[str, List[Replacement]]: + """Mask IPython magics so content becomes parseable Python code. + + For example, + + %matplotlib inline + 'foo' + + becomes + + "25716f358c32750e" + 'foo' + + The replacements are returned, along with the transformed code. + """ + replacements: List[Replacement] = [] + try: + ast.parse(src) + except SyntaxError: + # Might have IPython magics, will process below. + pass + else: + # Syntax is fine, nothing to mask, early return. + return src, replacements + + from IPython.core.inputtransformer2 import TransformerManager + + transformer_manager = TransformerManager() + transformed = transformer_manager.transform_cell(src) + transformed, cell_magic_replacements = replace_cell_magics(transformed) + replacements += cell_magic_replacements + transformed = transformer_manager.transform_cell(transformed) + transformed, magic_replacements = replace_magics(transformed) + if len(transformed.splitlines()) != len(src.splitlines()): + # Multi-line magic, not supported. + raise NothingChanged + replacements += magic_replacements + return transformed, replacements + + +def get_token(src: str, magic: str) -> str: + """Return randomly generated token to mask IPython magic with. + + For example, if 'magic' was `%matplotlib inline`, then a possible + token to mask it with would be `"43fdd17f7e5ddc83"`. The token + will be the same length as the magic, and we make sure that it was + not already present anywhere else in the cell. + """ + assert magic + nbytes = max(len(magic) // 2 - 1, 1) + token = TOKEN_HEX(nbytes) + counter = 0 + while token in src: + token = TOKEN_HEX(nbytes) + counter += 1 + if counter > 100: + raise AssertionError( + "INTERNAL ERROR: Black was not able to replace IPython magic. " + "Please report a bug on https://github.com/psf/black/issues. " + f"The magic might be helpful: {magic}" + ) from None + if len(token) + 2 < len(magic): + token = f"{token}." + return f'"{token}"' + + +def replace_cell_magics(src: str) -> Tuple[str, List[Replacement]]: + """Replace cell magic with token. + + Note that 'src' will already have been processed by IPython's + TransformerManager().transform_cell. + + Example, + + get_ipython().run_cell_magic('t', '-n1', 'ls =!ls\\n') + + becomes + + "a794." + ls =!ls + + The replacement, along with the transformed code, is returned. + """ + replacements: List[Replacement] = [] + + tree = ast.parse(src) + + cell_magic_finder = CellMagicFinder() + cell_magic_finder.visit(tree) + if cell_magic_finder.cell_magic is None: + return src, replacements + header = cell_magic_finder.cell_magic.header + mask = get_token(src, header) + replacements.append(Replacement(mask=mask, src=header)) + return f"{mask}\n{cell_magic_finder.cell_magic.body}", replacements + + +def replace_magics(src: str) -> Tuple[str, List[Replacement]]: + """Replace magics within body of cell. + + Note that 'src' will already have been processed by IPython's + TransformerManager().transform_cell. + + Example, this + + get_ipython().run_line_magic('matplotlib', 'inline') + 'foo' + + becomes + + "5e67db56d490fd39" + 'foo' + + The replacement, along with the transformed code, are returned. + """ + replacements = [] + magic_finder = MagicFinder() + magic_finder.visit(ast.parse(src)) + new_srcs = [] + for i, line in enumerate(src.splitlines(), start=1): + if i in magic_finder.magics: + offsets_and_magics = magic_finder.magics[i] + if len(offsets_and_magics) != 1: # pragma: nocover + raise AssertionError( + f"Expecting one magic per line, got: {offsets_and_magics}\n" + "Please report a bug on https://github.com/psf/black/issues." + ) + col_offset, magic = ( + offsets_and_magics[0].col_offset, + offsets_and_magics[0].magic, + ) + mask = get_token(src, magic) + replacements.append(Replacement(mask=mask, src=magic)) + line = line[:col_offset] + mask + new_srcs.append(line) + return "\n".join(new_srcs), replacements + + +def unmask_cell(src: str, replacements: List[Replacement]) -> str: + """Remove replacements from cell. + + For example + + "9b20" + foo = bar + + becomes + + %%time + foo = bar + """ + for replacement in replacements: + src = src.replace(replacement.mask, replacement.src) + return src + + +def _is_ipython_magic(node: ast.expr) -> TypeGuard[ast.Attribute]: + """Check if attribute is IPython magic. + + Note that the source of the abstract syntax tree + will already have been processed by IPython's + TransformerManager().transform_cell. + """ + return ( + isinstance(node, ast.Attribute) + and isinstance(node.value, ast.Call) + and isinstance(node.value.func, ast.Name) + and node.value.func.id == "get_ipython" + ) + + +def _get_str_args(args: List[ast.expr]) -> List[str]: + str_args = [] + for arg in args: + assert isinstance(arg, ast.Str) + str_args.append(arg.s) + return str_args + + +@dataclasses.dataclass(frozen=True) +class CellMagic: + name: str + params: Optional[str] + body: str + + @property + def header(self) -> str: + if self.params: + return f"%%{self.name} {self.params}" + return f"%%{self.name}" + + +# ast.NodeVisitor + dataclass = breakage under mypyc. +class CellMagicFinder(ast.NodeVisitor): + """Find cell magics. + + Note that the source of the abstract syntax tree + will already have been processed by IPython's + TransformerManager().transform_cell. + + For example, + + %%time\n + foo() + + would have been transformed to + + get_ipython().run_cell_magic('time', '', 'foo()\\n') + + and we look for instances of the latter. + """ + + def __init__(self, cell_magic: Optional[CellMagic] = None) -> None: + self.cell_magic = cell_magic + + def visit_Expr(self, node: ast.Expr) -> None: + """Find cell magic, extract header and body.""" + if ( + isinstance(node.value, ast.Call) + and _is_ipython_magic(node.value.func) + and node.value.func.attr == "run_cell_magic" + ): + args = _get_str_args(node.value.args) + self.cell_magic = CellMagic(name=args[0], params=args[1], body=args[2]) + self.generic_visit(node) + + +@dataclasses.dataclass(frozen=True) +class OffsetAndMagic: + col_offset: int + magic: str + + +# Unsurprisingly, subclassing ast.NodeVisitor means we can't use dataclasses here +# as mypyc will generate broken code. +class MagicFinder(ast.NodeVisitor): + """Visit cell to look for get_ipython calls. + + Note that the source of the abstract syntax tree + will already have been processed by IPython's + TransformerManager().transform_cell. + + For example, + + %matplotlib inline + + would have been transformed to + + get_ipython().run_line_magic('matplotlib', 'inline') + + and we look for instances of the latter (and likewise for other + types of magics). + """ + + def __init__(self) -> None: + self.magics: Dict[int, List[OffsetAndMagic]] = collections.defaultdict(list) + + def visit_Assign(self, node: ast.Assign) -> None: + """Look for system assign magics. + + For example, + + black_version = !black --version + env = %env var + + would have been (respectively) transformed to + + black_version = get_ipython().getoutput('black --version') + env = get_ipython().run_line_magic('env', 'var') + + and we look for instances of any of the latter. + """ + if isinstance(node.value, ast.Call) and _is_ipython_magic(node.value.func): + args = _get_str_args(node.value.args) + if node.value.func.attr == "getoutput": + src = f"!{args[0]}" + elif node.value.func.attr == "run_line_magic": + src = f"%{args[0]}" + if args[1]: + src += f" {args[1]}" + else: + raise AssertionError( + f"Unexpected IPython magic {node.value.func.attr!r} found. " + "Please report a bug on https://github.com/psf/black/issues." + ) from None + self.magics[node.value.lineno].append( + OffsetAndMagic(node.value.col_offset, src) + ) + self.generic_visit(node) + + def visit_Expr(self, node: ast.Expr) -> None: + """Look for magics in body of cell. + + For examples, + + !ls + !!ls + ?ls + ??ls + + would (respectively) get transformed to + + get_ipython().system('ls') + get_ipython().getoutput('ls') + get_ipython().run_line_magic('pinfo', 'ls') + get_ipython().run_line_magic('pinfo2', 'ls') + + and we look for instances of any of the latter. + """ + if isinstance(node.value, ast.Call) and _is_ipython_magic(node.value.func): + args = _get_str_args(node.value.args) + if node.value.func.attr == "run_line_magic": + if args[0] == "pinfo": + src = f"?{args[1]}" + elif args[0] == "pinfo2": + src = f"??{args[1]}" + else: + src = f"%{args[0]}" + if args[1]: + src += f" {args[1]}" + elif node.value.func.attr == "system": + src = f"!{args[0]}" + elif node.value.func.attr == "getoutput": + src = f"!!{args[0]}" + else: + raise NothingChanged # unsupported magic. + self.magics[node.value.lineno].append( + OffsetAndMagic(node.value.col_offset, src) + ) + self.generic_visit(node) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/linegen.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/linegen.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..9fb6c3c63 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/linegen.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/linegen.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/linegen.py new file mode 100644 index 000000000..9d22a7e78 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/linegen.py @@ -0,0 +1,1789 @@ +""" +Generating lines of code. +""" + +import re +import sys +from dataclasses import replace +from enum import Enum, auto +from functools import partial, wraps +from typing import Collection, Iterator, List, Optional, Set, Union, cast + +from black.brackets import ( + COMMA_PRIORITY, + DOT_PRIORITY, + STRING_PRIORITY, + get_leaves_inside_matching_brackets, + max_delimiter_priority_in_atom, +) +from black.comments import FMT_OFF, generate_comments, list_comments +from black.lines import ( + Line, + RHSResult, + append_leaves, + can_be_split, + can_omit_invisible_parens, + is_line_short_enough, + line_to_string, +) +from black.mode import Feature, Mode, Preview +from black.nodes import ( + ASSIGNMENTS, + BRACKETS, + CLOSING_BRACKETS, + OPENING_BRACKETS, + STANDALONE_COMMENT, + STATEMENT, + WHITESPACE, + Visitor, + ensure_visible, + fstring_to_string, + get_annotation_type, + is_arith_like, + is_async_stmt_or_funcdef, + is_atom_with_invisible_parens, + is_docstring, + is_empty_tuple, + is_lpar_token, + is_multiline_string, + is_name_token, + is_one_sequence_between, + is_one_tuple, + is_parent_function_or_class, + is_part_of_annotation, + is_rpar_token, + is_stub_body, + is_stub_suite, + is_tuple_containing_walrus, + is_type_ignore_comment_string, + is_vararg, + is_walrus_assignment, + is_yield, + syms, + wrap_in_parentheses, +) +from black.numerics import normalize_numeric_literal +from black.strings import ( + fix_docstring, + get_string_prefix, + normalize_string_prefix, + normalize_string_quotes, + normalize_unicode_escape_sequences, +) +from black.trans import ( + CannotTransform, + StringMerger, + StringParenStripper, + StringParenWrapper, + StringSplitter, + Transformer, + hug_power_op, +) +from blib2to3.pgen2 import token +from blib2to3.pytree import Leaf, Node + +# types +LeafID = int +LN = Union[Leaf, Node] + + +class CannotSplit(CannotTransform): + """A readable split that fits the allotted line length is impossible.""" + + +# This isn't a dataclass because @dataclass + Generic breaks mypyc. +# See also https://github.com/mypyc/mypyc/issues/827. +class LineGenerator(Visitor[Line]): + """Generates reformatted Line objects. Empty lines are not emitted. + + Note: destroys the tree it's visiting by mutating prefixes of its leaves + in ways that will no longer stringify to valid Python code on the tree. + """ + + def __init__(self, mode: Mode, features: Collection[Feature]) -> None: + self.mode = mode + self.features = features + self.current_line: Line + self.__post_init__() + + def line(self, indent: int = 0) -> Iterator[Line]: + """Generate a line. + + If the line is empty, only emit if it makes sense. + If the line is too long, split it first and then generate. + + If any lines were generated, set up a new current_line. + """ + if not self.current_line: + self.current_line.depth += indent + return # Line is empty, don't emit. Creating a new one unnecessary. + + if len(self.current_line.leaves) == 1 and is_async_stmt_or_funcdef( + self.current_line.leaves[0] + ): + # Special case for async def/for/with statements. `visit_async_stmt` + # adds an `ASYNC` leaf then visits the child def/for/with statement + # nodes. Line yields from those nodes shouldn't treat the former + # `ASYNC` leaf as a complete line. + return + + complete_line = self.current_line + self.current_line = Line(mode=self.mode, depth=complete_line.depth + indent) + yield complete_line + + def visit_default(self, node: LN) -> Iterator[Line]: + """Default `visit_*()` implementation. Recurses to children of `node`.""" + if isinstance(node, Leaf): + any_open_brackets = self.current_line.bracket_tracker.any_open_brackets() + for comment in generate_comments(node): + if any_open_brackets: + # any comment within brackets is subject to splitting + self.current_line.append(comment) + elif comment.type == token.COMMENT: + # regular trailing comment + self.current_line.append(comment) + yield from self.line() + + else: + # regular standalone comment + yield from self.line() + + self.current_line.append(comment) + yield from self.line() + + if any_open_brackets: + node.prefix = "" + if node.type not in WHITESPACE: + self.current_line.append(node) + yield from super().visit_default(node) + + def visit_test(self, node: Node) -> Iterator[Line]: + """Visit an `x if y else z` test""" + + already_parenthesized = ( + node.prev_sibling and node.prev_sibling.type == token.LPAR + ) + + if not already_parenthesized: + # Similar to logic in wrap_in_parentheses + lpar = Leaf(token.LPAR, "") + rpar = Leaf(token.RPAR, "") + prefix = node.prefix + node.prefix = "" + lpar.prefix = prefix + node.insert_child(0, lpar) + node.append_child(rpar) + + yield from self.visit_default(node) + + def visit_INDENT(self, node: Leaf) -> Iterator[Line]: + """Increase indentation level, maybe yield a line.""" + # In blib2to3 INDENT never holds comments. + yield from self.line(+1) + yield from self.visit_default(node) + + def visit_DEDENT(self, node: Leaf) -> Iterator[Line]: + """Decrease indentation level, maybe yield a line.""" + # The current line might still wait for trailing comments. At DEDENT time + # there won't be any (they would be prefixes on the preceding NEWLINE). + # Emit the line then. + yield from self.line() + + # While DEDENT has no value, its prefix may contain standalone comments + # that belong to the current indentation level. Get 'em. + yield from self.visit_default(node) + + # Finally, emit the dedent. + yield from self.line(-1) + + def visit_stmt( + self, node: Node, keywords: Set[str], parens: Set[str] + ) -> Iterator[Line]: + """Visit a statement. + + This implementation is shared for `if`, `while`, `for`, `try`, `except`, + `def`, `with`, `class`, `assert`, and assignments. + + The relevant Python language `keywords` for a given statement will be + NAME leaves within it. This methods puts those on a separate line. + + `parens` holds a set of string leaf values immediately after which + invisible parens should be put. + """ + normalize_invisible_parens( + node, parens_after=parens, mode=self.mode, features=self.features + ) + for child in node.children: + if is_name_token(child) and child.value in keywords: + yield from self.line() + + yield from self.visit(child) + + def visit_typeparams(self, node: Node) -> Iterator[Line]: + yield from self.visit_default(node) + node.children[0].prefix = "" + + def visit_typevartuple(self, node: Node) -> Iterator[Line]: + yield from self.visit_default(node) + node.children[1].prefix = "" + + def visit_paramspec(self, node: Node) -> Iterator[Line]: + yield from self.visit_default(node) + node.children[1].prefix = "" + + def visit_dictsetmaker(self, node: Node) -> Iterator[Line]: + if Preview.wrap_long_dict_values_in_parens in self.mode: + for i, child in enumerate(node.children): + if i == 0: + continue + if node.children[i - 1].type == token.COLON: + if ( + child.type == syms.atom + and child.children[0].type in OPENING_BRACKETS + and not is_walrus_assignment(child) + ): + maybe_make_parens_invisible_in_atom( + child, + parent=node, + remove_brackets_around_comma=False, + ) + else: + wrap_in_parentheses(node, child, visible=False) + yield from self.visit_default(node) + + def visit_funcdef(self, node: Node) -> Iterator[Line]: + """Visit function definition.""" + yield from self.line() + + # Remove redundant brackets around return type annotation. + is_return_annotation = False + for child in node.children: + if child.type == token.RARROW: + is_return_annotation = True + elif is_return_annotation: + if child.type == syms.atom and child.children[0].type == token.LPAR: + if maybe_make_parens_invisible_in_atom( + child, + parent=node, + remove_brackets_around_comma=False, + ): + wrap_in_parentheses(node, child, visible=False) + else: + wrap_in_parentheses(node, child, visible=False) + is_return_annotation = False + + for child in node.children: + yield from self.visit(child) + + def visit_match_case(self, node: Node) -> Iterator[Line]: + """Visit either a match or case statement.""" + normalize_invisible_parens( + node, parens_after=set(), mode=self.mode, features=self.features + ) + + yield from self.line() + for child in node.children: + yield from self.visit(child) + + def visit_suite(self, node: Node) -> Iterator[Line]: + """Visit a suite.""" + if is_stub_suite(node): + yield from self.visit(node.children[2]) + else: + yield from self.visit_default(node) + + def visit_simple_stmt(self, node: Node) -> Iterator[Line]: + """Visit a statement without nested statements.""" + prev_type: Optional[int] = None + for child in node.children: + if (prev_type is None or prev_type == token.SEMI) and is_arith_like(child): + wrap_in_parentheses(node, child, visible=False) + prev_type = child.type + + if node.parent and node.parent.type in STATEMENT: + if is_parent_function_or_class(node) and is_stub_body(node): + yield from self.visit_default(node) + else: + yield from self.line(+1) + yield from self.visit_default(node) + yield from self.line(-1) + + else: + if node.parent and is_stub_suite(node.parent): + node.prefix = "" + yield from self.visit_default(node) + return + yield from self.line() + yield from self.visit_default(node) + + def visit_async_stmt(self, node: Node) -> Iterator[Line]: + """Visit `async def`, `async for`, `async with`.""" + yield from self.line() + + children = iter(node.children) + for child in children: + yield from self.visit(child) + + if child.type == token.ASYNC or child.type == STANDALONE_COMMENT: + # STANDALONE_COMMENT happens when `# fmt: skip` is applied on the async + # line. + break + + internal_stmt = next(children) + yield from self.visit(internal_stmt) + + def visit_decorators(self, node: Node) -> Iterator[Line]: + """Visit decorators.""" + for child in node.children: + yield from self.line() + yield from self.visit(child) + + def visit_power(self, node: Node) -> Iterator[Line]: + for idx, leaf in enumerate(node.children[:-1]): + next_leaf = node.children[idx + 1] + + if not isinstance(leaf, Leaf): + continue + + value = leaf.value.lower() + if ( + leaf.type == token.NUMBER + and next_leaf.type == syms.trailer + # Ensure that we are in an attribute trailer + and next_leaf.children[0].type == token.DOT + # It shouldn't wrap hexadecimal, binary and octal literals + and not value.startswith(("0x", "0b", "0o")) + # It shouldn't wrap complex literals + and "j" not in value + ): + wrap_in_parentheses(node, leaf) + + remove_await_parens(node) + + yield from self.visit_default(node) + + def visit_SEMI(self, leaf: Leaf) -> Iterator[Line]: + """Remove a semicolon and put the other statement on a separate line.""" + yield from self.line() + + def visit_ENDMARKER(self, leaf: Leaf) -> Iterator[Line]: + """End of file. Process outstanding comments and end with a newline.""" + yield from self.visit_default(leaf) + yield from self.line() + + def visit_STANDALONE_COMMENT(self, leaf: Leaf) -> Iterator[Line]: + if not self.current_line.bracket_tracker.any_open_brackets(): + yield from self.line() + yield from self.visit_default(leaf) + + def visit_factor(self, node: Node) -> Iterator[Line]: + """Force parentheses between a unary op and a binary power: + + -2 ** 8 -> -(2 ** 8) + """ + _operator, operand = node.children + if ( + operand.type == syms.power + and len(operand.children) == 3 + and operand.children[1].type == token.DOUBLESTAR + ): + lpar = Leaf(token.LPAR, "(") + rpar = Leaf(token.RPAR, ")") + index = operand.remove() or 0 + node.insert_child(index, Node(syms.atom, [lpar, operand, rpar])) + yield from self.visit_default(node) + + def visit_tname(self, node: Node) -> Iterator[Line]: + """ + Add potential parentheses around types in function parameter lists to be made + into real parentheses in case the type hint is too long to fit on a line + Examples: + def foo(a: int, b: float = 7): ... + + -> + + def foo(a: (int), b: (float) = 7): ... + """ + assert len(node.children) == 3 + if maybe_make_parens_invisible_in_atom(node.children[2], parent=node): + wrap_in_parentheses(node, node.children[2], visible=False) + + yield from self.visit_default(node) + + def visit_STRING(self, leaf: Leaf) -> Iterator[Line]: + if Preview.hex_codes_in_unicode_sequences in self.mode: + normalize_unicode_escape_sequences(leaf) + + if is_docstring(leaf, self.mode) and not re.search(r"\\\s*\n", leaf.value): + # We're ignoring docstrings with backslash newline escapes because changing + # indentation of those changes the AST representation of the code. + if self.mode.string_normalization: + docstring = normalize_string_prefix(leaf.value) + # We handle string normalization at the end of this method, but since + # what we do right now acts differently depending on quote style (ex. + # see padding logic below), there's a possibility for unstable + # formatting. To avoid a situation where this function formats a + # docstring differently on the second pass, normalize it early. + docstring = normalize_string_quotes(docstring) + else: + docstring = leaf.value + prefix = get_string_prefix(docstring) + docstring = docstring[len(prefix) :] # Remove the prefix + quote_char = docstring[0] + # A natural way to remove the outer quotes is to do: + # docstring = docstring.strip(quote_char) + # but that breaks on """""x""" (which is '""x'). + # So we actually need to remove the first character and the next two + # characters but only if they are the same as the first. + quote_len = 1 if docstring[1] != quote_char else 3 + docstring = docstring[quote_len:-quote_len] + docstring_started_empty = not docstring + indent = " " * 4 * self.current_line.depth + + if is_multiline_string(leaf): + docstring = fix_docstring(docstring, indent) + else: + docstring = docstring.strip() + + has_trailing_backslash = False + if docstring: + # Add some padding if the docstring starts / ends with a quote mark. + if docstring[0] == quote_char: + docstring = " " + docstring + if docstring[-1] == quote_char: + docstring += " " + if docstring[-1] == "\\": + backslash_count = len(docstring) - len(docstring.rstrip("\\")) + if backslash_count % 2: + # Odd number of tailing backslashes, add some padding to + # avoid escaping the closing string quote. + docstring += " " + has_trailing_backslash = True + elif not docstring_started_empty: + docstring = " " + + # We could enforce triple quotes at this point. + quote = quote_char * quote_len + + # It's invalid to put closing single-character quotes on a new line. + if quote_len == 3: + # We need to find the length of the last line of the docstring + # to find if we can add the closing quotes to the line without + # exceeding the maximum line length. + # If docstring is one line, we don't put the closing quotes on a + # separate line because it looks ugly (#3320). + lines = docstring.splitlines() + last_line_length = len(lines[-1]) if docstring else 0 + + # If adding closing quotes would cause the last line to exceed + # the maximum line length, and the closing quote is not + # prefixed by a newline then put a line break before + # the closing quotes + if ( + len(lines) > 1 + and last_line_length + quote_len > self.mode.line_length + and len(indent) + quote_len <= self.mode.line_length + and not has_trailing_backslash + ): + if ( + Preview.docstring_check_for_newline in self.mode + and leaf.value[-1 - quote_len] == "\n" + ): + leaf.value = prefix + quote + docstring + quote + else: + leaf.value = prefix + quote + docstring + "\n" + indent + quote + else: + leaf.value = prefix + quote + docstring + quote + else: + leaf.value = prefix + quote + docstring + quote + + if self.mode.string_normalization and leaf.type == token.STRING: + leaf.value = normalize_string_prefix(leaf.value) + leaf.value = normalize_string_quotes(leaf.value) + yield from self.visit_default(leaf) + + def visit_NUMBER(self, leaf: Leaf) -> Iterator[Line]: + normalize_numeric_literal(leaf) + yield from self.visit_default(leaf) + + def visit_fstring(self, node: Node) -> Iterator[Line]: + # currently we don't want to format and split f-strings at all. + string_leaf = fstring_to_string(node) + node.replace(string_leaf) + yield from self.visit_STRING(string_leaf) + + # TODO: Uncomment Implementation to format f-string children + # fstring_start = node.children[0] + # fstring_end = node.children[-1] + # assert isinstance(fstring_start, Leaf) + # assert isinstance(fstring_end, Leaf) + + # quote_char = fstring_end.value[0] + # quote_idx = fstring_start.value.index(quote_char) + # prefix, quote = ( + # fstring_start.value[:quote_idx], + # fstring_start.value[quote_idx:] + # ) + + # if not is_docstring(node, self.mode): + # prefix = normalize_string_prefix(prefix) + + # assert quote == fstring_end.value + + # is_raw_fstring = "r" in prefix or "R" in prefix + # middles = [ + # leaf + # for leaf in node.leaves() + # if leaf.type == token.FSTRING_MIDDLE + # ] + + # if self.mode.string_normalization: + # middles, quote = normalize_fstring_quotes(quote, middles, is_raw_fstring) + + # fstring_start.value = prefix + quote + # fstring_end.value = quote + + # yield from self.visit_default(node) + + def __post_init__(self) -> None: + """You are in a twisty little maze of passages.""" + self.current_line = Line(mode=self.mode) + + v = self.visit_stmt + Ø: Set[str] = set() + self.visit_assert_stmt = partial(v, keywords={"assert"}, parens={"assert", ","}) + self.visit_if_stmt = partial( + v, keywords={"if", "else", "elif"}, parens={"if", "elif"} + ) + self.visit_while_stmt = partial(v, keywords={"while", "else"}, parens={"while"}) + self.visit_for_stmt = partial(v, keywords={"for", "else"}, parens={"for", "in"}) + self.visit_try_stmt = partial( + v, keywords={"try", "except", "else", "finally"}, parens=Ø + ) + self.visit_except_clause = partial(v, keywords={"except"}, parens={"except"}) + self.visit_with_stmt = partial(v, keywords={"with"}, parens={"with"}) + self.visit_classdef = partial(v, keywords={"class"}, parens=Ø) + + self.visit_expr_stmt = partial(v, keywords=Ø, parens=ASSIGNMENTS) + self.visit_return_stmt = partial(v, keywords={"return"}, parens={"return"}) + self.visit_import_from = partial(v, keywords=Ø, parens={"import"}) + self.visit_del_stmt = partial(v, keywords=Ø, parens={"del"}) + self.visit_async_funcdef = self.visit_async_stmt + self.visit_decorated = self.visit_decorators + + # PEP 634 + self.visit_match_stmt = self.visit_match_case + self.visit_case_block = self.visit_match_case + if Preview.remove_redundant_guard_parens in self.mode: + self.visit_guard = partial(v, keywords=Ø, parens={"if"}) + + +def _hugging_power_ops_line_to_string( + line: Line, + features: Collection[Feature], + mode: Mode, +) -> Optional[str]: + try: + return line_to_string(next(hug_power_op(line, features, mode))) + except CannotTransform: + return None + + +def transform_line( + line: Line, mode: Mode, features: Collection[Feature] = () +) -> Iterator[Line]: + """Transform a `line`, potentially splitting it into many lines. + + They should fit in the allotted `line_length` but might not be able to. + + `features` are syntactical features that may be used in the output. + """ + if line.is_comment: + yield line + return + + line_str = line_to_string(line) + + # We need the line string when power operators are hugging to determine if we should + # split the line. Default to line_str, if no power operator are present on the line. + line_str_hugging_power_ops = ( + _hugging_power_ops_line_to_string(line, features, mode) or line_str + ) + + ll = mode.line_length + sn = mode.string_normalization + string_merge = StringMerger(ll, sn) + string_paren_strip = StringParenStripper(ll, sn) + string_split = StringSplitter(ll, sn) + string_paren_wrap = StringParenWrapper(ll, sn) + + transformers: List[Transformer] + if ( + not line.contains_uncollapsable_type_comments() + and not line.should_split_rhs + and not line.magic_trailing_comma + and ( + is_line_short_enough(line, mode=mode, line_str=line_str_hugging_power_ops) + or line.contains_unsplittable_type_ignore() + ) + and not (line.inside_brackets and line.contains_standalone_comments()) + and not line.contains_implicit_multiline_string_with_comments() + ): + # Only apply basic string preprocessing, since lines shouldn't be split here. + if Preview.string_processing in mode: + transformers = [string_merge, string_paren_strip] + else: + transformers = [] + elif line.is_def and not should_split_funcdef_with_rhs(line, mode): + transformers = [left_hand_split] + else: + + def _rhs( + self: object, line: Line, features: Collection[Feature], mode: Mode + ) -> Iterator[Line]: + """Wraps calls to `right_hand_split`. + + The calls increasingly `omit` right-hand trailers (bracket pairs with + content), meaning the trailers get glued together to split on another + bracket pair instead. + """ + for omit in generate_trailers_to_omit(line, mode.line_length): + lines = list(right_hand_split(line, mode, features, omit=omit)) + # Note: this check is only able to figure out if the first line of the + # *current* transformation fits in the line length. This is true only + # for simple cases. All others require running more transforms via + # `transform_line()`. This check doesn't know if those would succeed. + if is_line_short_enough(lines[0], mode=mode): + yield from lines + return + + # All splits failed, best effort split with no omits. + # This mostly happens to multiline strings that are by definition + # reported as not fitting a single line, as well as lines that contain + # trailing commas (those have to be exploded). + yield from right_hand_split(line, mode, features=features) + + # HACK: nested functions (like _rhs) compiled by mypyc don't retain their + # __name__ attribute which is needed in `run_transformer` further down. + # Unfortunately a nested class breaks mypyc too. So a class must be created + # via type ... https://github.com/mypyc/mypyc/issues/884 + rhs = type("rhs", (), {"__call__": _rhs})() + + if Preview.string_processing in mode: + if line.inside_brackets: + transformers = [ + string_merge, + string_paren_strip, + string_split, + delimiter_split, + standalone_comment_split, + string_paren_wrap, + rhs, + ] + else: + transformers = [ + string_merge, + string_paren_strip, + string_split, + string_paren_wrap, + rhs, + ] + else: + if line.inside_brackets: + transformers = [delimiter_split, standalone_comment_split, rhs] + else: + transformers = [rhs] + # It's always safe to attempt hugging of power operations and pretty much every line + # could match. + transformers.append(hug_power_op) + + for transform in transformers: + # We are accumulating lines in `result` because we might want to abort + # mission and return the original line in the end, or attempt a different + # split altogether. + try: + result = run_transformer(line, transform, mode, features, line_str=line_str) + except CannotTransform: + continue + else: + yield from result + break + + else: + yield line + + +def should_split_funcdef_with_rhs(line: Line, mode: Mode) -> bool: + """If a funcdef has a magic trailing comma in the return type, then we should first + split the line with rhs to respect the comma. + """ + return_type_leaves: List[Leaf] = [] + in_return_type = False + + for leaf in line.leaves: + if leaf.type == token.COLON: + in_return_type = False + if in_return_type: + return_type_leaves.append(leaf) + if leaf.type == token.RARROW: + in_return_type = True + + # using `bracket_split_build_line` will mess with whitespace, so we duplicate a + # couple lines from it. + result = Line(mode=line.mode, depth=line.depth) + leaves_to_track = get_leaves_inside_matching_brackets(return_type_leaves) + for leaf in return_type_leaves: + result.append( + leaf, + preformatted=True, + track_bracket=id(leaf) in leaves_to_track, + ) + + # we could also return true if the line is too long, and the return type is longer + # than the param list. Or if `should_split_rhs` returns True. + return result.magic_trailing_comma is not None + + +class _BracketSplitComponent(Enum): + head = auto() + body = auto() + tail = auto() + + +def left_hand_split( + line: Line, _features: Collection[Feature], mode: Mode +) -> Iterator[Line]: + """Split line into many lines, starting with the first matching bracket pair. + + Note: this usually looks weird, only use this for function definitions. + Prefer RHS otherwise. This is why this function is not symmetrical with + :func:`right_hand_split` which also handles optional parentheses. + """ + tail_leaves: List[Leaf] = [] + body_leaves: List[Leaf] = [] + head_leaves: List[Leaf] = [] + current_leaves = head_leaves + matching_bracket: Optional[Leaf] = None + for leaf in line.leaves: + if ( + current_leaves is body_leaves + and leaf.type in CLOSING_BRACKETS + and leaf.opening_bracket is matching_bracket + and isinstance(matching_bracket, Leaf) + ): + ensure_visible(leaf) + ensure_visible(matching_bracket) + current_leaves = tail_leaves if body_leaves else head_leaves + current_leaves.append(leaf) + if current_leaves is head_leaves: + if leaf.type in OPENING_BRACKETS: + matching_bracket = leaf + current_leaves = body_leaves + if not matching_bracket or not tail_leaves: + raise CannotSplit("No brackets found") + + head = bracket_split_build_line( + head_leaves, line, matching_bracket, component=_BracketSplitComponent.head + ) + body = bracket_split_build_line( + body_leaves, line, matching_bracket, component=_BracketSplitComponent.body + ) + tail = bracket_split_build_line( + tail_leaves, line, matching_bracket, component=_BracketSplitComponent.tail + ) + bracket_split_succeeded_or_raise(head, body, tail) + for result in (head, body, tail): + if result: + yield result + + +def right_hand_split( + line: Line, + mode: Mode, + features: Collection[Feature] = (), + omit: Collection[LeafID] = (), +) -> Iterator[Line]: + """Split line into many lines, starting with the last matching bracket pair. + + If the split was by optional parentheses, attempt splitting without them, too. + `omit` is a collection of closing bracket IDs that shouldn't be considered for + this split. + + Note: running this function modifies `bracket_depth` on the leaves of `line`. + """ + rhs_result = _first_right_hand_split(line, omit=omit) + yield from _maybe_split_omitting_optional_parens( + rhs_result, line, mode, features=features, omit=omit + ) + + +def _first_right_hand_split( + line: Line, + omit: Collection[LeafID] = (), +) -> RHSResult: + """Split the line into head, body, tail starting with the last bracket pair. + + Note: this function should not have side effects. It's relied upon by + _maybe_split_omitting_optional_parens to get an opinion whether to prefer + splitting on the right side of an assignment statement. + """ + tail_leaves: List[Leaf] = [] + body_leaves: List[Leaf] = [] + head_leaves: List[Leaf] = [] + current_leaves = tail_leaves + opening_bracket: Optional[Leaf] = None + closing_bracket: Optional[Leaf] = None + for leaf in reversed(line.leaves): + if current_leaves is body_leaves: + if leaf is opening_bracket: + current_leaves = head_leaves if body_leaves else tail_leaves + current_leaves.append(leaf) + if current_leaves is tail_leaves: + if leaf.type in CLOSING_BRACKETS and id(leaf) not in omit: + opening_bracket = leaf.opening_bracket + closing_bracket = leaf + current_leaves = body_leaves + if not (opening_bracket and closing_bracket and head_leaves): + # If there is no opening or closing_bracket that means the split failed and + # all content is in the tail. Otherwise, if `head_leaves` are empty, it means + # the matching `opening_bracket` wasn't available on `line` anymore. + raise CannotSplit("No brackets found") + + tail_leaves.reverse() + body_leaves.reverse() + head_leaves.reverse() + + body: Optional[Line] = None + if ( + Preview.hug_parens_with_braces_and_square_brackets in line.mode + and tail_leaves[0].value + and tail_leaves[0].opening_bracket is head_leaves[-1] + ): + inner_body_leaves = list(body_leaves) + hugged_opening_leaves: List[Leaf] = [] + hugged_closing_leaves: List[Leaf] = [] + is_unpacking = body_leaves[0].type in [token.STAR, token.DOUBLESTAR] + unpacking_offset: int = 1 if is_unpacking else 0 + while ( + len(inner_body_leaves) >= 2 + unpacking_offset + and inner_body_leaves[-1].type in CLOSING_BRACKETS + and inner_body_leaves[-1].opening_bracket + is inner_body_leaves[unpacking_offset] + ): + if unpacking_offset: + hugged_opening_leaves.append(inner_body_leaves.pop(0)) + unpacking_offset = 0 + hugged_opening_leaves.append(inner_body_leaves.pop(0)) + hugged_closing_leaves.insert(0, inner_body_leaves.pop()) + + if hugged_opening_leaves and inner_body_leaves: + inner_body = bracket_split_build_line( + inner_body_leaves, + line, + hugged_opening_leaves[-1], + component=_BracketSplitComponent.body, + ) + if ( + line.mode.magic_trailing_comma + and inner_body_leaves[-1].type == token.COMMA + ): + should_hug = True + else: + line_length = line.mode.line_length - sum( + len(str(leaf)) + for leaf in hugged_opening_leaves + hugged_closing_leaves + ) + if is_line_short_enough( + inner_body, mode=replace(line.mode, line_length=line_length) + ): + # Do not hug if it fits on a single line. + should_hug = False + else: + should_hug = True + if should_hug: + body_leaves = inner_body_leaves + head_leaves.extend(hugged_opening_leaves) + tail_leaves = hugged_closing_leaves + tail_leaves + body = inner_body # No need to re-calculate the body again later. + + head = bracket_split_build_line( + head_leaves, line, opening_bracket, component=_BracketSplitComponent.head + ) + if body is None: + body = bracket_split_build_line( + body_leaves, line, opening_bracket, component=_BracketSplitComponent.body + ) + tail = bracket_split_build_line( + tail_leaves, line, opening_bracket, component=_BracketSplitComponent.tail + ) + bracket_split_succeeded_or_raise(head, body, tail) + return RHSResult(head, body, tail, opening_bracket, closing_bracket) + + +def _maybe_split_omitting_optional_parens( + rhs: RHSResult, + line: Line, + mode: Mode, + features: Collection[Feature] = (), + omit: Collection[LeafID] = (), +) -> Iterator[Line]: + if ( + Feature.FORCE_OPTIONAL_PARENTHESES not in features + # the opening bracket is an optional paren + and rhs.opening_bracket.type == token.LPAR + and not rhs.opening_bracket.value + # the closing bracket is an optional paren + and rhs.closing_bracket.type == token.RPAR + and not rhs.closing_bracket.value + # it's not an import (optional parens are the only thing we can split on + # in this case; attempting a split without them is a waste of time) + and not line.is_import + # and we can actually remove the parens + and can_omit_invisible_parens(rhs, mode.line_length) + ): + omit = {id(rhs.closing_bracket), *omit} + try: + # The RHSResult Omitting Optional Parens. + rhs_oop = _first_right_hand_split(line, omit=omit) + is_split_right_after_equal = ( + len(rhs.head.leaves) >= 2 and rhs.head.leaves[-2].type == token.EQUAL + ) + rhs_head_contains_brackets = any( + leaf.type in BRACKETS for leaf in rhs.head.leaves[:-1] + ) + # the -1 is for the ending optional paren + rhs_head_short_enough = is_line_short_enough( + rhs.head, mode=replace(mode, line_length=mode.line_length - 1) + ) + rhs_head_explode_blocked_by_magic_trailing_comma = ( + rhs.head.magic_trailing_comma is None + ) + if ( + not ( + is_split_right_after_equal + and rhs_head_contains_brackets + and rhs_head_short_enough + and rhs_head_explode_blocked_by_magic_trailing_comma + ) + # the omit optional parens split is preferred by some other reason + or _prefer_split_rhs_oop_over_rhs(rhs_oop, rhs, mode) + ): + yield from _maybe_split_omitting_optional_parens( + rhs_oop, line, mode, features=features, omit=omit + ) + return + + except CannotSplit as e: + # For chained assignments we want to use the previous successful split + if line.is_chained_assignment: + pass + + elif not can_be_split(rhs.body) and not is_line_short_enough( + rhs.body, mode=mode + ): + raise CannotSplit( + "Splitting failed, body is still too long and can't be split." + ) from e + + elif ( + rhs.head.contains_multiline_strings() + or rhs.tail.contains_multiline_strings() + ): + raise CannotSplit( + "The current optional pair of parentheses is bound to fail to" + " satisfy the splitting algorithm because the head or the tail" + " contains multiline strings which by definition never fit one" + " line." + ) from e + + ensure_visible(rhs.opening_bracket) + ensure_visible(rhs.closing_bracket) + for result in (rhs.head, rhs.body, rhs.tail): + if result: + yield result + + +def _prefer_split_rhs_oop_over_rhs( + rhs_oop: RHSResult, rhs: RHSResult, mode: Mode +) -> bool: + """ + Returns whether we should prefer the result from a split omitting optional parens + (rhs_oop) over the original (rhs). + """ + # If we have multiple targets, we prefer more `=`s on the head vs pushing them to + # the body + rhs_head_equal_count = [leaf.type for leaf in rhs.head.leaves].count(token.EQUAL) + rhs_oop_head_equal_count = [leaf.type for leaf in rhs_oop.head.leaves].count( + token.EQUAL + ) + if rhs_head_equal_count > 1 and rhs_head_equal_count > rhs_oop_head_equal_count: + return False + + has_closing_bracket_after_assign = False + for leaf in reversed(rhs_oop.head.leaves): + if leaf.type == token.EQUAL: + break + if leaf.type in CLOSING_BRACKETS: + has_closing_bracket_after_assign = True + break + return ( + # contains matching brackets after the `=` (done by checking there is a + # closing bracket) + has_closing_bracket_after_assign + or ( + # the split is actually from inside the optional parens (done by checking + # the first line still contains the `=`) + any(leaf.type == token.EQUAL for leaf in rhs_oop.head.leaves) + # the first line is short enough + and is_line_short_enough(rhs_oop.head, mode=mode) + ) + # contains unsplittable type ignore + or rhs_oop.head.contains_unsplittable_type_ignore() + or rhs_oop.body.contains_unsplittable_type_ignore() + or rhs_oop.tail.contains_unsplittable_type_ignore() + ) + + +def bracket_split_succeeded_or_raise(head: Line, body: Line, tail: Line) -> None: + """Raise :exc:`CannotSplit` if the last left- or right-hand split failed. + + Do nothing otherwise. + + A left- or right-hand split is based on a pair of brackets. Content before + (and including) the opening bracket is left on one line, content inside the + brackets is put on a separate line, and finally content starting with and + following the closing bracket is put on a separate line. + + Those are called `head`, `body`, and `tail`, respectively. If the split + produced the same line (all content in `head`) or ended up with an empty `body` + and the `tail` is just the closing bracket, then it's considered failed. + """ + tail_len = len(str(tail).strip()) + if not body: + if tail_len == 0: + raise CannotSplit("Splitting brackets produced the same line") + + elif tail_len < 3: + raise CannotSplit( + f"Splitting brackets on an empty body to save {tail_len} characters is" + " not worth it" + ) + + +def bracket_split_build_line( + leaves: List[Leaf], + original: Line, + opening_bracket: Leaf, + *, + component: _BracketSplitComponent, +) -> Line: + """Return a new line with given `leaves` and respective comments from `original`. + + If it's the head component, brackets will be tracked so trailing commas are + respected. + + If it's the body component, the result line is one-indented inside brackets and as + such has its first leaf's prefix normalized and a trailing comma added when + expected. + """ + result = Line(mode=original.mode, depth=original.depth) + if component is _BracketSplitComponent.body: + result.inside_brackets = True + result.depth += 1 + if leaves: + no_commas = ( + # Ensure a trailing comma for imports and standalone function arguments + original.is_def + # Don't add one after any comments or within type annotations + and opening_bracket.value == "(" + # Don't add one if there's already one there + and not any( + leaf.type == token.COMMA + and ( + Preview.typed_params_trailing_comma not in original.mode + or not is_part_of_annotation(leaf) + ) + for leaf in leaves + ) + # Don't add one inside parenthesized return annotations + and get_annotation_type(leaves[0]) != "return" + # Don't add one inside PEP 604 unions + and not ( + leaves[0].parent + and leaves[0].parent.next_sibling + and leaves[0].parent.next_sibling.type == token.VBAR + ) + ) + + if original.is_import or no_commas: + for i in range(len(leaves) - 1, -1, -1): + if leaves[i].type == STANDALONE_COMMENT: + continue + + if leaves[i].type != token.COMMA: + new_comma = Leaf(token.COMMA, ",") + leaves.insert(i + 1, new_comma) + break + + leaves_to_track: Set[LeafID] = set() + if component is _BracketSplitComponent.head: + leaves_to_track = get_leaves_inside_matching_brackets(leaves) + # Populate the line + for leaf in leaves: + result.append( + leaf, + preformatted=True, + track_bracket=id(leaf) in leaves_to_track, + ) + for comment_after in original.comments_after(leaf): + result.append(comment_after, preformatted=True) + if component is _BracketSplitComponent.body and should_split_line( + result, opening_bracket + ): + result.should_split_rhs = True + return result + + +def dont_increase_indentation(split_func: Transformer) -> Transformer: + """Normalize prefix of the first leaf in every line returned by `split_func`. + + This is a decorator over relevant split functions. + """ + + @wraps(split_func) + def split_wrapper( + line: Line, features: Collection[Feature], mode: Mode + ) -> Iterator[Line]: + for split_line in split_func(line, features, mode): + split_line.leaves[0].prefix = "" + yield split_line + + return split_wrapper + + +def _get_last_non_comment_leaf(line: Line) -> Optional[int]: + for leaf_idx in range(len(line.leaves) - 1, 0, -1): + if line.leaves[leaf_idx].type != STANDALONE_COMMENT: + return leaf_idx + return None + + +def _can_add_trailing_comma(leaf: Leaf, features: Collection[Feature]) -> bool: + if is_vararg(leaf, within={syms.typedargslist}): + return Feature.TRAILING_COMMA_IN_DEF in features + if is_vararg(leaf, within={syms.arglist, syms.argument}): + return Feature.TRAILING_COMMA_IN_CALL in features + return True + + +def _safe_add_trailing_comma(safe: bool, delimiter_priority: int, line: Line) -> Line: + if ( + safe + and delimiter_priority == COMMA_PRIORITY + and line.leaves[-1].type != token.COMMA + and line.leaves[-1].type != STANDALONE_COMMENT + ): + new_comma = Leaf(token.COMMA, ",") + line.append(new_comma) + return line + + +MIGRATE_COMMENT_DELIMITERS = {STRING_PRIORITY, COMMA_PRIORITY} + + +@dont_increase_indentation +def delimiter_split( + line: Line, features: Collection[Feature], mode: Mode +) -> Iterator[Line]: + """Split according to delimiters of the highest priority. + + If the appropriate Features are given, the split will add trailing commas + also in function signatures and calls that contain `*` and `**`. + """ + if len(line.leaves) == 0: + raise CannotSplit("Line empty") from None + last_leaf = line.leaves[-1] + + bt = line.bracket_tracker + try: + delimiter_priority = bt.max_delimiter_priority(exclude={id(last_leaf)}) + except ValueError: + raise CannotSplit("No delimiters found") from None + + if ( + delimiter_priority == DOT_PRIORITY + and bt.delimiter_count_with_priority(delimiter_priority) == 1 + ): + raise CannotSplit("Splitting a single attribute from its owner looks wrong") + + current_line = Line( + mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets + ) + lowest_depth = sys.maxsize + trailing_comma_safe = True + + def append_to_line(leaf: Leaf) -> Iterator[Line]: + """Append `leaf` to current line or to new line if appending impossible.""" + nonlocal current_line + try: + current_line.append_safe(leaf, preformatted=True) + except ValueError: + yield current_line + + current_line = Line( + mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets + ) + current_line.append(leaf) + + def append_comments(leaf: Leaf) -> Iterator[Line]: + for comment_after in line.comments_after(leaf): + yield from append_to_line(comment_after) + + last_non_comment_leaf = _get_last_non_comment_leaf(line) + for leaf_idx, leaf in enumerate(line.leaves): + yield from append_to_line(leaf) + + previous_priority = leaf_idx > 0 and bt.delimiters.get( + id(line.leaves[leaf_idx - 1]) + ) + if ( + previous_priority != delimiter_priority + or delimiter_priority in MIGRATE_COMMENT_DELIMITERS + ): + yield from append_comments(leaf) + + lowest_depth = min(lowest_depth, leaf.bracket_depth) + if trailing_comma_safe and leaf.bracket_depth == lowest_depth: + trailing_comma_safe = _can_add_trailing_comma(leaf, features) + + if last_leaf.type == STANDALONE_COMMENT and leaf_idx == last_non_comment_leaf: + current_line = _safe_add_trailing_comma( + trailing_comma_safe, delimiter_priority, current_line + ) + + leaf_priority = bt.delimiters.get(id(leaf)) + if leaf_priority == delimiter_priority: + if ( + leaf_idx + 1 < len(line.leaves) + and delimiter_priority not in MIGRATE_COMMENT_DELIMITERS + ): + yield from append_comments(line.leaves[leaf_idx + 1]) + + yield current_line + current_line = Line( + mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets + ) + + if current_line: + current_line = _safe_add_trailing_comma( + trailing_comma_safe, delimiter_priority, current_line + ) + yield current_line + + +@dont_increase_indentation +def standalone_comment_split( + line: Line, features: Collection[Feature], mode: Mode +) -> Iterator[Line]: + """Split standalone comments from the rest of the line.""" + if not line.contains_standalone_comments(): + raise CannotSplit("Line does not have any standalone comments") + + current_line = Line( + mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets + ) + + def append_to_line(leaf: Leaf) -> Iterator[Line]: + """Append `leaf` to current line or to new line if appending impossible.""" + nonlocal current_line + try: + current_line.append_safe(leaf, preformatted=True) + except ValueError: + yield current_line + + current_line = Line( + line.mode, depth=line.depth, inside_brackets=line.inside_brackets + ) + current_line.append(leaf) + + for leaf in line.leaves: + yield from append_to_line(leaf) + + for comment_after in line.comments_after(leaf): + yield from append_to_line(comment_after) + + if current_line: + yield current_line + + +def normalize_invisible_parens( # noqa: C901 + node: Node, parens_after: Set[str], *, mode: Mode, features: Collection[Feature] +) -> None: + """Make existing optional parentheses invisible or create new ones. + + `parens_after` is a set of string leaf values immediately after which parens + should be put. + + Standardizes on visible parentheses for single-element tuples, and keeps + existing visible parentheses for other tuples and generator expressions. + """ + for pc in list_comments(node.prefix, is_endmarker=False): + if pc.value in FMT_OFF: + # This `node` has a prefix with `# fmt: off`, don't mess with parens. + return + + # The multiple context managers grammar has a different pattern, thus this is + # separate from the for-loop below. This possibly wraps them in invisible parens, + # and later will be removed in remove_with_parens when needed. + if node.type == syms.with_stmt: + _maybe_wrap_cms_in_parens(node, mode, features) + + check_lpar = False + for index, child in enumerate(list(node.children)): + # Fixes a bug where invisible parens are not properly stripped from + # assignment statements that contain type annotations. + if isinstance(child, Node) and child.type == syms.annassign: + normalize_invisible_parens( + child, parens_after=parens_after, mode=mode, features=features + ) + + # Fixes a bug where invisible parens are not properly wrapped around + # case blocks. + if isinstance(child, Node) and child.type == syms.case_block: + normalize_invisible_parens( + child, parens_after={"case"}, mode=mode, features=features + ) + + # Add parentheses around if guards in case blocks + if ( + isinstance(child, Node) + and child.type == syms.guard + and Preview.parens_for_long_if_clauses_in_case_block in mode + ): + normalize_invisible_parens( + child, parens_after={"if"}, mode=mode, features=features + ) + + # Add parentheses around long tuple unpacking in assignments. + if ( + index == 0 + and isinstance(child, Node) + and child.type == syms.testlist_star_expr + ): + check_lpar = True + + if check_lpar: + if ( + child.type == syms.atom + and node.type == syms.for_stmt + and isinstance(child.prev_sibling, Leaf) + and child.prev_sibling.type == token.NAME + and child.prev_sibling.value == "for" + ): + if maybe_make_parens_invisible_in_atom( + child, + parent=node, + remove_brackets_around_comma=True, + ): + wrap_in_parentheses(node, child, visible=False) + elif isinstance(child, Node) and node.type == syms.with_stmt: + remove_with_parens(child, node) + elif child.type == syms.atom: + if maybe_make_parens_invisible_in_atom( + child, + parent=node, + ): + wrap_in_parentheses(node, child, visible=False) + elif is_one_tuple(child): + wrap_in_parentheses(node, child, visible=True) + elif node.type == syms.import_from: + _normalize_import_from(node, child, index) + break + elif ( + index == 1 + and child.type == token.STAR + and node.type == syms.except_clause + ): + # In except* (PEP 654), the star is actually part of + # of the keyword. So we need to skip the insertion of + # invisible parentheses to work more precisely. + continue + + elif ( + isinstance(child, Leaf) + and child.next_sibling is not None + and child.next_sibling.type == token.COLON + and child.value == "case" + ): + # A special patch for "case case:" scenario, the second occurrence + # of case will be not parsed as a Python keyword. + break + + elif not is_multiline_string(child): + wrap_in_parentheses(node, child, visible=False) + + comma_check = child.type == token.COMMA + + check_lpar = isinstance(child, Leaf) and ( + child.value in parens_after or comma_check + ) + + +def _normalize_import_from(parent: Node, child: LN, index: int) -> None: + # "import from" nodes store parentheses directly as part of + # the statement + if is_lpar_token(child): + assert is_rpar_token(parent.children[-1]) + # make parentheses invisible + child.value = "" + parent.children[-1].value = "" + elif child.type != token.STAR: + # insert invisible parentheses + parent.insert_child(index, Leaf(token.LPAR, "")) + parent.append_child(Leaf(token.RPAR, "")) + + +def remove_await_parens(node: Node) -> None: + if node.children[0].type == token.AWAIT and len(node.children) > 1: + if ( + node.children[1].type == syms.atom + and node.children[1].children[0].type == token.LPAR + ): + if maybe_make_parens_invisible_in_atom( + node.children[1], + parent=node, + remove_brackets_around_comma=True, + ): + wrap_in_parentheses(node, node.children[1], visible=False) + + # Since await is an expression we shouldn't remove + # brackets in cases where this would change + # the AST due to operator precedence. + # Therefore we only aim to remove brackets around + # power nodes that aren't also await expressions themselves. + # https://peps.python.org/pep-0492/#updated-operator-precedence-table + # N.B. We've still removed any redundant nested brackets though :) + opening_bracket = cast(Leaf, node.children[1].children[0]) + closing_bracket = cast(Leaf, node.children[1].children[-1]) + bracket_contents = node.children[1].children[1] + if isinstance(bracket_contents, Node) and ( + bracket_contents.type != syms.power + or bracket_contents.children[0].type == token.AWAIT + or any( + isinstance(child, Leaf) and child.type == token.DOUBLESTAR + for child in bracket_contents.children + ) + ): + ensure_visible(opening_bracket) + ensure_visible(closing_bracket) + + +def _maybe_wrap_cms_in_parens( + node: Node, mode: Mode, features: Collection[Feature] +) -> None: + """When enabled and safe, wrap the multiple context managers in invisible parens. + + It is only safe when `features` contain Feature.PARENTHESIZED_CONTEXT_MANAGERS. + """ + if ( + Feature.PARENTHESIZED_CONTEXT_MANAGERS not in features + or len(node.children) <= 2 + # If it's an atom, it's already wrapped in parens. + or node.children[1].type == syms.atom + ): + return + colon_index: Optional[int] = None + for i in range(2, len(node.children)): + if node.children[i].type == token.COLON: + colon_index = i + break + if colon_index is not None: + lpar = Leaf(token.LPAR, "") + rpar = Leaf(token.RPAR, "") + context_managers = node.children[1:colon_index] + for child in context_managers: + child.remove() + # After wrapping, the with_stmt will look like this: + # with_stmt + # NAME 'with' + # atom + # LPAR '' + # testlist_gexp + # ... <-- context_managers + # /testlist_gexp + # RPAR '' + # /atom + # COLON ':' + new_child = Node( + syms.atom, [lpar, Node(syms.testlist_gexp, context_managers), rpar] + ) + node.insert_child(1, new_child) + + +def remove_with_parens(node: Node, parent: Node) -> None: + """Recursively hide optional parens in `with` statements.""" + # Removing all unnecessary parentheses in with statements in one pass is a tad + # complex as different variations of bracketed statements result in pretty + # different parse trees: + # + # with (open("file")) as f: # this is an asexpr_test + # ... + # + # with (open("file") as f): # this is an atom containing an + # ... # asexpr_test + # + # with (open("file")) as f, (open("file")) as f: # this is asexpr_test, COMMA, + # ... # asexpr_test + # + # with (open("file") as f, open("file") as f): # an atom containing a + # ... # testlist_gexp which then + # # contains multiple asexpr_test(s) + if node.type == syms.atom: + if maybe_make_parens_invisible_in_atom( + node, + parent=parent, + remove_brackets_around_comma=True, + ): + wrap_in_parentheses(parent, node, visible=False) + if isinstance(node.children[1], Node): + remove_with_parens(node.children[1], node) + elif node.type == syms.testlist_gexp: + for child in node.children: + if isinstance(child, Node): + remove_with_parens(child, node) + elif node.type == syms.asexpr_test and not any( + leaf.type == token.COLONEQUAL for leaf in node.leaves() + ): + if maybe_make_parens_invisible_in_atom( + node.children[0], + parent=node, + remove_brackets_around_comma=True, + ): + wrap_in_parentheses(node, node.children[0], visible=False) + + +def maybe_make_parens_invisible_in_atom( + node: LN, + parent: LN, + remove_brackets_around_comma: bool = False, +) -> bool: + """If it's safe, make the parens in the atom `node` invisible, recursively. + Additionally, remove repeated, adjacent invisible parens from the atom `node` + as they are redundant. + + Returns whether the node should itself be wrapped in invisible parentheses. + """ + if ( + node.type not in (syms.atom, syms.expr) + or is_empty_tuple(node) + or is_one_tuple(node) + or (is_yield(node) and parent.type != syms.expr_stmt) + or ( + # This condition tries to prevent removing non-optional brackets + # around a tuple, however, can be a bit overzealous so we provide + # and option to skip this check for `for` and `with` statements. + not remove_brackets_around_comma + and max_delimiter_priority_in_atom(node) >= COMMA_PRIORITY + ) + or is_tuple_containing_walrus(node) + ): + return False + + if is_walrus_assignment(node): + if parent.type in [ + syms.annassign, + syms.expr_stmt, + syms.assert_stmt, + syms.return_stmt, + syms.except_clause, + syms.funcdef, + syms.with_stmt, + syms.tname, + # these ones aren't useful to end users, but they do please fuzzers + syms.for_stmt, + syms.del_stmt, + syms.for_stmt, + ]: + return False + + first = node.children[0] + last = node.children[-1] + if is_lpar_token(first) and is_rpar_token(last): + middle = node.children[1] + # make parentheses invisible + if ( + # If the prefix of `middle` includes a type comment with + # ignore annotation, then we do not remove the parentheses + not is_type_ignore_comment_string(middle.prefix.strip()) + ): + first.value = "" + if first.prefix.strip(): + # Preserve comments before first paren + middle.prefix = first.prefix + middle.prefix + last.value = "" + maybe_make_parens_invisible_in_atom( + middle, + parent=parent, + remove_brackets_around_comma=remove_brackets_around_comma, + ) + + if is_atom_with_invisible_parens(middle): + # Strip the invisible parens from `middle` by replacing + # it with the child in-between the invisible parens + middle.replace(middle.children[1]) + if middle.children[-1].prefix.strip(): + # Preserve comments before last paren + last.prefix = middle.children[-1].prefix + last.prefix + + return False + + return True + + +def should_split_line(line: Line, opening_bracket: Leaf) -> bool: + """Should `line` be immediately split with `delimiter_split()` after RHS?""" + + if not (opening_bracket.parent and opening_bracket.value in "[{("): + return False + + # We're essentially checking if the body is delimited by commas and there's more + # than one of them (we're excluding the trailing comma and if the delimiter priority + # is still commas, that means there's more). + exclude = set() + trailing_comma = False + try: + last_leaf = line.leaves[-1] + if last_leaf.type == token.COMMA: + trailing_comma = True + exclude.add(id(last_leaf)) + max_priority = line.bracket_tracker.max_delimiter_priority(exclude=exclude) + except (IndexError, ValueError): + return False + + return max_priority == COMMA_PRIORITY and ( + (line.mode.magic_trailing_comma and trailing_comma) + # always explode imports + or opening_bracket.parent.type in {syms.atom, syms.import_from} + ) + + +def generate_trailers_to_omit(line: Line, line_length: int) -> Iterator[Set[LeafID]]: + """Generate sets of closing bracket IDs that should be omitted in a RHS. + + Brackets can be omitted if the entire trailer up to and including + a preceding closing bracket fits in one line. + + Yielded sets are cumulative (contain results of previous yields, too). First + set is empty, unless the line should explode, in which case bracket pairs until + the one that needs to explode are omitted. + """ + + omit: Set[LeafID] = set() + if not line.magic_trailing_comma: + yield omit + + length = 4 * line.depth + opening_bracket: Optional[Leaf] = None + closing_bracket: Optional[Leaf] = None + inner_brackets: Set[LeafID] = set() + for index, leaf, leaf_length in line.enumerate_with_length(is_reversed=True): + length += leaf_length + if length > line_length: + break + + has_inline_comment = leaf_length > len(leaf.value) + len(leaf.prefix) + if leaf.type == STANDALONE_COMMENT or has_inline_comment: + break + + if opening_bracket: + if leaf is opening_bracket: + opening_bracket = None + elif leaf.type in CLOSING_BRACKETS: + prev = line.leaves[index - 1] if index > 0 else None + if ( + prev + and prev.type == token.COMMA + and leaf.opening_bracket is not None + and not is_one_sequence_between( + leaf.opening_bracket, leaf, line.leaves + ) + ): + # Never omit bracket pairs with trailing commas. + # We need to explode on those. + break + + inner_brackets.add(id(leaf)) + elif leaf.type in CLOSING_BRACKETS: + prev = line.leaves[index - 1] if index > 0 else None + if prev and prev.type in OPENING_BRACKETS: + # Empty brackets would fail a split so treat them as "inner" + # brackets (e.g. only add them to the `omit` set if another + # pair of brackets was good enough. + inner_brackets.add(id(leaf)) + continue + + if closing_bracket: + omit.add(id(closing_bracket)) + omit.update(inner_brackets) + inner_brackets.clear() + yield omit + + if ( + prev + and prev.type == token.COMMA + and leaf.opening_bracket is not None + and not is_one_sequence_between(leaf.opening_bracket, leaf, line.leaves) + ): + # Never omit bracket pairs with trailing commas. + # We need to explode on those. + break + + if leaf.value: + opening_bracket = leaf.opening_bracket + closing_bracket = leaf + + +def run_transformer( + line: Line, + transform: Transformer, + mode: Mode, + features: Collection[Feature], + *, + line_str: str = "", +) -> List[Line]: + if not line_str: + line_str = line_to_string(line) + result: List[Line] = [] + for transformed_line in transform(line, features, mode): + if str(transformed_line).strip("\n") == line_str: + raise CannotTransform("Line transformer returned an unchanged result") + + result.extend(transform_line(transformed_line, mode=mode, features=features)) + + features_set = set(features) + if ( + Feature.FORCE_OPTIONAL_PARENTHESES in features_set + or transform.__class__.__name__ != "rhs" + or not line.bracket_tracker.invisible + or any(bracket.value for bracket in line.bracket_tracker.invisible) + or line.contains_multiline_strings() + or result[0].contains_uncollapsable_type_comments() + or result[0].contains_unsplittable_type_ignore() + or is_line_short_enough(result[0], mode=mode) + # If any leaves have no parents (which _can_ occur since + # `transform(line)` potentially destroys the line's underlying node + # structure), then we can't proceed. Doing so would cause the below + # call to `append_leaves()` to fail. + or any(leaf.parent is None for leaf in line.leaves) + ): + return result + + line_copy = line.clone() + append_leaves(line_copy, line, line.leaves) + features_fop = features_set | {Feature.FORCE_OPTIONAL_PARENTHESES} + second_opinion = run_transformer( + line_copy, transform, mode, features_fop, line_str=line_str + ) + if all(is_line_short_enough(ln, mode=mode) for ln in second_opinion): + result = second_opinion + return result diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/lines.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/lines.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..06dc772bc Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/lines.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/lines.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/lines.py new file mode 100644 index 000000000..b9b3add9e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/lines.py @@ -0,0 +1,1078 @@ +import itertools +import math +from dataclasses import dataclass, field +from typing import ( + Callable, + Dict, + Iterator, + List, + Optional, + Sequence, + Tuple, + TypeVar, + Union, + cast, +) + +from black.brackets import COMMA_PRIORITY, DOT_PRIORITY, BracketTracker +from black.mode import Mode, Preview +from black.nodes import ( + BRACKETS, + CLOSING_BRACKETS, + OPENING_BRACKETS, + STANDALONE_COMMENT, + TEST_DESCENDANTS, + child_towards, + is_docstring, + is_import, + is_multiline_string, + is_one_sequence_between, + is_type_comment, + is_type_ignore_comment, + is_with_or_async_with_stmt, + make_simple_prefix, + replace_child, + syms, + whitespace, +) +from black.strings import str_width +from blib2to3.pgen2 import token +from blib2to3.pytree import Leaf, Node + +# types +T = TypeVar("T") +Index = int +LeafID = int +LN = Union[Leaf, Node] + + +@dataclass +class Line: + """Holds leaves and comments. Can be printed with `str(line)`.""" + + mode: Mode = field(repr=False) + depth: int = 0 + leaves: List[Leaf] = field(default_factory=list) + # keys ordered like `leaves` + comments: Dict[LeafID, List[Leaf]] = field(default_factory=dict) + bracket_tracker: BracketTracker = field(default_factory=BracketTracker) + inside_brackets: bool = False + should_split_rhs: bool = False + magic_trailing_comma: Optional[Leaf] = None + + def append( + self, leaf: Leaf, preformatted: bool = False, track_bracket: bool = False + ) -> None: + """Add a new `leaf` to the end of the line. + + Unless `preformatted` is True, the `leaf` will receive a new consistent + whitespace prefix and metadata applied by :class:`BracketTracker`. + Trailing commas are maybe removed, unpacked for loop variables are + demoted from being delimiters. + + Inline comments are put aside. + """ + has_value = ( + leaf.type in BRACKETS + # empty fstring-middles must not be truncated + or leaf.type == token.FSTRING_MIDDLE + or bool(leaf.value.strip()) + ) + if not has_value: + return + + if token.COLON == leaf.type and self.is_class_paren_empty: + del self.leaves[-2:] + if self.leaves and not preformatted: + # Note: at this point leaf.prefix should be empty except for + # imports, for which we only preserve newlines. + leaf.prefix += whitespace( + leaf, + complex_subscript=self.is_complex_subscript(leaf), + mode=self.mode, + ) + if self.inside_brackets or not preformatted or track_bracket: + self.bracket_tracker.mark(leaf) + if self.mode.magic_trailing_comma: + if self.has_magic_trailing_comma(leaf): + self.magic_trailing_comma = leaf + elif self.has_magic_trailing_comma(leaf): + self.remove_trailing_comma() + if not self.append_comment(leaf): + self.leaves.append(leaf) + + def append_safe(self, leaf: Leaf, preformatted: bool = False) -> None: + """Like :func:`append()` but disallow invalid standalone comment structure. + + Raises ValueError when any `leaf` is appended after a standalone comment + or when a standalone comment is not the first leaf on the line. + """ + if ( + self.bracket_tracker.depth == 0 + or self.bracket_tracker.any_open_for_or_lambda() + ): + if self.is_comment: + raise ValueError("cannot append to standalone comments") + + if self.leaves and leaf.type == STANDALONE_COMMENT: + raise ValueError( + "cannot append standalone comments to a populated line" + ) + + self.append(leaf, preformatted=preformatted) + + @property + def is_comment(self) -> bool: + """Is this line a standalone comment?""" + return len(self.leaves) == 1 and self.leaves[0].type == STANDALONE_COMMENT + + @property + def is_decorator(self) -> bool: + """Is this line a decorator?""" + return bool(self) and self.leaves[0].type == token.AT + + @property + def is_import(self) -> bool: + """Is this an import line?""" + return bool(self) and is_import(self.leaves[0]) + + @property + def is_with_or_async_with_stmt(self) -> bool: + """Is this a with_stmt line?""" + return bool(self) and is_with_or_async_with_stmt(self.leaves[0]) + + @property + def is_class(self) -> bool: + """Is this line a class definition?""" + return ( + bool(self) + and self.leaves[0].type == token.NAME + and self.leaves[0].value == "class" + ) + + @property + def is_stub_class(self) -> bool: + """Is this line a class definition with a body consisting only of "..."?""" + return self.is_class and self.leaves[-3:] == [ + Leaf(token.DOT, ".") for _ in range(3) + ] + + @property + def is_def(self) -> bool: + """Is this a function definition? (Also returns True for async defs.)""" + try: + first_leaf = self.leaves[0] + except IndexError: + return False + + try: + second_leaf: Optional[Leaf] = self.leaves[1] + except IndexError: + second_leaf = None + return (first_leaf.type == token.NAME and first_leaf.value == "def") or ( + first_leaf.type == token.ASYNC + and second_leaf is not None + and second_leaf.type == token.NAME + and second_leaf.value == "def" + ) + + @property + def is_stub_def(self) -> bool: + """Is this line a function definition with a body consisting only of "..."?""" + return self.is_def and self.leaves[-4:] == [Leaf(token.COLON, ":")] + [ + Leaf(token.DOT, ".") for _ in range(3) + ] + + @property + def is_class_paren_empty(self) -> bool: + """Is this a class with no base classes but using parentheses? + + Those are unnecessary and should be removed. + """ + return ( + bool(self) + and len(self.leaves) == 4 + and self.is_class + and self.leaves[2].type == token.LPAR + and self.leaves[2].value == "(" + and self.leaves[3].type == token.RPAR + and self.leaves[3].value == ")" + ) + + @property + def _is_triple_quoted_string(self) -> bool: + """Is the line a triple quoted string?""" + if not self or self.leaves[0].type != token.STRING: + return False + value = self.leaves[0].value + if value.startswith(('"""', "'''")): + return True + if value.startswith(("r'''", 'r"""', "R'''", 'R"""')): + return True + return False + + @property + def is_docstring(self) -> bool: + """Is the line a docstring?""" + if Preview.unify_docstring_detection not in self.mode: + return self._is_triple_quoted_string + return bool(self) and is_docstring(self.leaves[0], self.mode) + + @property + def is_chained_assignment(self) -> bool: + """Is the line a chained assignment""" + return [leaf.type for leaf in self.leaves].count(token.EQUAL) > 1 + + @property + def opens_block(self) -> bool: + """Does this line open a new level of indentation.""" + if len(self.leaves) == 0: + return False + return self.leaves[-1].type == token.COLON + + def is_fmt_pass_converted( + self, *, first_leaf_matches: Optional[Callable[[Leaf], bool]] = None + ) -> bool: + """Is this line converted from fmt off/skip code? + + If first_leaf_matches is not None, it only returns True if the first + leaf of converted code matches. + """ + if len(self.leaves) != 1: + return False + leaf = self.leaves[0] + if ( + leaf.type != STANDALONE_COMMENT + or leaf.fmt_pass_converted_first_leaf is None + ): + return False + return first_leaf_matches is None or first_leaf_matches( + leaf.fmt_pass_converted_first_leaf + ) + + def contains_standalone_comments(self) -> bool: + """If so, needs to be split before emitting.""" + for leaf in self.leaves: + if leaf.type == STANDALONE_COMMENT: + return True + + return False + + def contains_implicit_multiline_string_with_comments(self) -> bool: + """Chck if we have an implicit multiline string with comments on the line""" + for leaf_type, leaf_group_iterator in itertools.groupby( + self.leaves, lambda leaf: leaf.type + ): + if leaf_type != token.STRING: + continue + leaf_list = list(leaf_group_iterator) + if len(leaf_list) == 1: + continue + for leaf in leaf_list: + if self.comments_after(leaf): + return True + return False + + def contains_uncollapsable_type_comments(self) -> bool: + ignored_ids = set() + try: + last_leaf = self.leaves[-1] + ignored_ids.add(id(last_leaf)) + if last_leaf.type == token.COMMA or ( + last_leaf.type == token.RPAR and not last_leaf.value + ): + # When trailing commas or optional parens are inserted by Black for + # consistency, comments after the previous last element are not moved + # (they don't have to, rendering will still be correct). So we ignore + # trailing commas and invisible. + last_leaf = self.leaves[-2] + ignored_ids.add(id(last_leaf)) + except IndexError: + return False + + # A type comment is uncollapsable if it is attached to a leaf + # that isn't at the end of the line (since that could cause it + # to get associated to a different argument) or if there are + # comments before it (since that could cause it to get hidden + # behind a comment. + comment_seen = False + for leaf_id, comments in self.comments.items(): + for comment in comments: + if is_type_comment(comment): + if comment_seen or ( + not is_type_ignore_comment(comment) + and leaf_id not in ignored_ids + ): + return True + + comment_seen = True + + return False + + def contains_unsplittable_type_ignore(self) -> bool: + if not self.leaves: + return False + + # If a 'type: ignore' is attached to the end of a line, we + # can't split the line, because we can't know which of the + # subexpressions the ignore was meant to apply to. + # + # We only want this to apply to actual physical lines from the + # original source, though: we don't want the presence of a + # 'type: ignore' at the end of a multiline expression to + # justify pushing it all onto one line. Thus we + # (unfortunately) need to check the actual source lines and + # only report an unsplittable 'type: ignore' if this line was + # one line in the original code. + + # Grab the first and last line numbers, skipping generated leaves + first_line = next((leaf.lineno for leaf in self.leaves if leaf.lineno != 0), 0) + last_line = next( + (leaf.lineno for leaf in reversed(self.leaves) if leaf.lineno != 0), 0 + ) + + if first_line == last_line: + # We look at the last two leaves since a comma or an + # invisible paren could have been added at the end of the + # line. + for node in self.leaves[-2:]: + for comment in self.comments.get(id(node), []): + if is_type_ignore_comment(comment): + return True + + return False + + def contains_multiline_strings(self) -> bool: + return any(is_multiline_string(leaf) for leaf in self.leaves) + + def has_magic_trailing_comma(self, closing: Leaf) -> bool: + """Return True if we have a magic trailing comma, that is when: + - there's a trailing comma here + - it's not from single-element square bracket indexing + - it's not a one-tuple + """ + if not ( + closing.type in CLOSING_BRACKETS + and self.leaves + and self.leaves[-1].type == token.COMMA + ): + return False + + if closing.type == token.RBRACE: + return True + + if closing.type == token.RSQB: + if ( + closing.parent is not None + and closing.parent.type == syms.trailer + and closing.opening_bracket is not None + and is_one_sequence_between( + closing.opening_bracket, + closing, + self.leaves, + brackets=(token.LSQB, token.RSQB), + ) + ): + assert closing.prev_sibling is not None + assert closing.prev_sibling.type == syms.subscriptlist + return False + + return True + + if self.is_import: + return True + + if closing.opening_bracket is not None and not is_one_sequence_between( + closing.opening_bracket, closing, self.leaves + ): + return True + + return False + + def append_comment(self, comment: Leaf) -> bool: + """Add an inline or standalone comment to the line.""" + if ( + comment.type == STANDALONE_COMMENT + and self.bracket_tracker.any_open_brackets() + ): + comment.prefix = "" + return False + + if comment.type != token.COMMENT: + return False + + if not self.leaves: + comment.type = STANDALONE_COMMENT + comment.prefix = "" + return False + + last_leaf = self.leaves[-1] + if ( + last_leaf.type == token.RPAR + and not last_leaf.value + and last_leaf.parent + and len(list(last_leaf.parent.leaves())) <= 3 + and not is_type_comment(comment) + ): + # Comments on an optional parens wrapping a single leaf should belong to + # the wrapped node except if it's a type comment. Pinning the comment like + # this avoids unstable formatting caused by comment migration. + if len(self.leaves) < 2: + comment.type = STANDALONE_COMMENT + comment.prefix = "" + return False + + last_leaf = self.leaves[-2] + self.comments.setdefault(id(last_leaf), []).append(comment) + return True + + def comments_after(self, leaf: Leaf) -> List[Leaf]: + """Generate comments that should appear directly after `leaf`.""" + return self.comments.get(id(leaf), []) + + def remove_trailing_comma(self) -> None: + """Remove the trailing comma and moves the comments attached to it.""" + trailing_comma = self.leaves.pop() + trailing_comma_comments = self.comments.pop(id(trailing_comma), []) + self.comments.setdefault(id(self.leaves[-1]), []).extend( + trailing_comma_comments + ) + + def is_complex_subscript(self, leaf: Leaf) -> bool: + """Return True iff `leaf` is part of a slice with non-trivial exprs.""" + open_lsqb = self.bracket_tracker.get_open_lsqb() + if open_lsqb is None: + return False + + subscript_start = open_lsqb.next_sibling + + if isinstance(subscript_start, Node): + if subscript_start.type == syms.listmaker: + return False + + if subscript_start.type == syms.subscriptlist: + subscript_start = child_towards(subscript_start, leaf) + + return subscript_start is not None and any( + n.type in TEST_DESCENDANTS for n in subscript_start.pre_order() + ) + + def enumerate_with_length( + self, is_reversed: bool = False + ) -> Iterator[Tuple[Index, Leaf, int]]: + """Return an enumeration of leaves with their length. + + Stops prematurely on multiline strings and standalone comments. + """ + op = cast( + Callable[[Sequence[Leaf]], Iterator[Tuple[Index, Leaf]]], + enumerate_reversed if is_reversed else enumerate, + ) + for index, leaf in op(self.leaves): + length = len(leaf.prefix) + len(leaf.value) + if "\n" in leaf.value: + return # Multiline strings, we can't continue. + + for comment in self.comments_after(leaf): + length += len(comment.value) + + yield index, leaf, length + + def clone(self) -> "Line": + return Line( + mode=self.mode, + depth=self.depth, + inside_brackets=self.inside_brackets, + should_split_rhs=self.should_split_rhs, + magic_trailing_comma=self.magic_trailing_comma, + ) + + def __str__(self) -> str: + """Render the line.""" + if not self: + return "\n" + + indent = " " * self.depth + leaves = iter(self.leaves) + first = next(leaves) + res = f"{first.prefix}{indent}{first.value}" + for leaf in leaves: + res += str(leaf) + for comment in itertools.chain.from_iterable(self.comments.values()): + res += str(comment) + + return res + "\n" + + def __bool__(self) -> bool: + """Return True if the line has leaves or comments.""" + return bool(self.leaves or self.comments) + + +@dataclass +class RHSResult: + """Intermediate split result from a right hand split.""" + + head: Line + body: Line + tail: Line + opening_bracket: Leaf + closing_bracket: Leaf + + +@dataclass +class LinesBlock: + """Class that holds information about a block of formatted lines. + + This is introduced so that the EmptyLineTracker can look behind the standalone + comments and adjust their empty lines for class or def lines. + """ + + mode: Mode + previous_block: Optional["LinesBlock"] + original_line: Line + before: int = 0 + content_lines: List[str] = field(default_factory=list) + after: int = 0 + form_feed: bool = False + + def all_lines(self) -> List[str]: + empty_line = str(Line(mode=self.mode)) + prefix = make_simple_prefix(self.before, self.form_feed, empty_line) + return [prefix] + self.content_lines + [empty_line * self.after] + + +@dataclass +class EmptyLineTracker: + """Provides a stateful method that returns the number of potential extra + empty lines needed before and after the currently processed line. + + Note: this tracker works on lines that haven't been split yet. It assumes + the prefix of the first leaf consists of optional newlines. Those newlines + are consumed by `maybe_empty_lines()` and included in the computation. + """ + + mode: Mode + previous_line: Optional[Line] = None + previous_block: Optional[LinesBlock] = None + previous_defs: List[Line] = field(default_factory=list) + semantic_leading_comment: Optional[LinesBlock] = None + + def maybe_empty_lines(self, current_line: Line) -> LinesBlock: + """Return the number of extra empty lines before and after the `current_line`. + + This is for separating `def`, `async def` and `class` with extra empty + lines (two on module-level). + """ + form_feed = ( + current_line.depth == 0 + and bool(current_line.leaves) + and "\f\n" in current_line.leaves[0].prefix + ) + before, after = self._maybe_empty_lines(current_line) + previous_after = self.previous_block.after if self.previous_block else 0 + before = max(0, before - previous_after) + if ( + # Always have one empty line after a module docstring + self.previous_block + and self.previous_block.previous_block is None + and len(self.previous_block.original_line.leaves) == 1 + and self.previous_block.original_line.is_docstring + and not (current_line.is_class or current_line.is_def) + ): + before = 1 + + block = LinesBlock( + mode=self.mode, + previous_block=self.previous_block, + original_line=current_line, + before=before, + after=after, + form_feed=form_feed, + ) + + # Maintain the semantic_leading_comment state. + if current_line.is_comment: + if self.previous_line is None or ( + not self.previous_line.is_decorator + # `or before` means this comment already has an empty line before + and (not self.previous_line.is_comment or before) + and (self.semantic_leading_comment is None or before) + ): + self.semantic_leading_comment = block + # `or before` means this decorator already has an empty line before + elif not current_line.is_decorator or before: + self.semantic_leading_comment = None + + self.previous_line = current_line + self.previous_block = block + return block + + def _maybe_empty_lines(self, current_line: Line) -> Tuple[int, int]: # noqa: C901 + max_allowed = 1 + if current_line.depth == 0: + max_allowed = 1 if self.mode.is_pyi else 2 + + if current_line.leaves: + # Consume the first leaf's extra newlines. + first_leaf = current_line.leaves[0] + before = first_leaf.prefix.count("\n") + before = min(before, max_allowed) + first_leaf.prefix = "" + else: + before = 0 + + user_had_newline = bool(before) + depth = current_line.depth + + # Mutate self.previous_defs, remainder of this function should be pure + previous_def = None + while self.previous_defs and self.previous_defs[-1].depth >= depth: + previous_def = self.previous_defs.pop() + if current_line.is_def or current_line.is_class: + self.previous_defs.append(current_line) + + if self.previous_line is None: + # Don't insert empty lines before the first line in the file. + return 0, 0 + + if current_line.is_docstring: + if self.previous_line.is_class: + return 0, 1 + if self.previous_line.opens_block and self.previous_line.is_def: + return 0, 0 + + if previous_def is not None: + assert self.previous_line is not None + if self.mode.is_pyi: + if previous_def.is_class and not previous_def.is_stub_class: + before = 1 + elif depth and not current_line.is_def and self.previous_line.is_def: + # Empty lines between attributes and methods should be preserved. + before = 1 if user_had_newline else 0 + elif depth: + before = 0 + else: + before = 1 + else: + if depth: + before = 1 + elif ( + not depth + and previous_def.depth + and current_line.leaves[-1].type == token.COLON + and ( + current_line.leaves[0].value + not in ("with", "try", "for", "while", "if", "match") + ) + ): + # We shouldn't add two newlines between an indented function and + # a dependent non-indented clause. This is to avoid issues with + # conditional function definitions that are technically top-level + # and therefore get two trailing newlines, but look weird and + # inconsistent when they're followed by elif, else, etc. This is + # worse because these functions only get *one* preceding newline + # already. + before = 1 + else: + before = 2 + + if current_line.is_decorator or current_line.is_def or current_line.is_class: + return self._maybe_empty_lines_for_class_or_def( + current_line, before, user_had_newline + ) + + if ( + self.previous_line.is_import + and not current_line.is_import + and not current_line.is_fmt_pass_converted(first_leaf_matches=is_import) + and depth == self.previous_line.depth + ): + return (before or 1), 0 + + return before, 0 + + def _maybe_empty_lines_for_class_or_def( # noqa: C901 + self, current_line: Line, before: int, user_had_newline: bool + ) -> Tuple[int, int]: + assert self.previous_line is not None + + if self.previous_line.is_decorator: + if self.mode.is_pyi and current_line.is_stub_class: + # Insert an empty line after a decorated stub class + return 0, 1 + return 0, 0 + + if self.previous_line.depth < current_line.depth and ( + self.previous_line.is_class or self.previous_line.is_def + ): + if self.mode.is_pyi: + return 0, 0 + return 1 if user_had_newline else 0, 0 + + comment_to_add_newlines: Optional[LinesBlock] = None + if ( + self.previous_line.is_comment + and self.previous_line.depth == current_line.depth + and before == 0 + ): + slc = self.semantic_leading_comment + if ( + slc is not None + and slc.previous_block is not None + and not slc.previous_block.original_line.is_class + and not slc.previous_block.original_line.opens_block + and slc.before <= 1 + ): + comment_to_add_newlines = slc + else: + return 0, 0 + + if self.mode.is_pyi: + if current_line.is_class or self.previous_line.is_class: + if self.previous_line.depth < current_line.depth: + newlines = 0 + elif self.previous_line.depth > current_line.depth: + newlines = 1 + elif current_line.is_stub_class and self.previous_line.is_stub_class: + # No blank line between classes with an empty body + newlines = 0 + else: + newlines = 1 + # Don't inspect the previous line if it's part of the body of the previous + # statement in the same level, we always want a blank line if there's + # something with a body preceding. + elif self.previous_line.depth > current_line.depth: + newlines = 1 + elif ( + current_line.is_def or current_line.is_decorator + ) and not self.previous_line.is_def: + if current_line.depth: + # In classes empty lines between attributes and methods should + # be preserved. + newlines = min(1, before) + else: + # Blank line between a block of functions (maybe with preceding + # decorators) and a block of non-functions + newlines = 1 + else: + newlines = 0 + else: + newlines = 1 if current_line.depth else 2 + # If a user has left no space after a dummy implementation, don't insert + # new lines. This is useful for instance for @overload or Protocols. + if self.previous_line.is_stub_def and not user_had_newline: + newlines = 0 + if comment_to_add_newlines is not None: + previous_block = comment_to_add_newlines.previous_block + if previous_block is not None: + comment_to_add_newlines.before = ( + max(comment_to_add_newlines.before, newlines) - previous_block.after + ) + newlines = 0 + return newlines, 0 + + +def enumerate_reversed(sequence: Sequence[T]) -> Iterator[Tuple[Index, T]]: + """Like `reversed(enumerate(sequence))` if that were possible.""" + index = len(sequence) - 1 + for element in reversed(sequence): + yield (index, element) + index -= 1 + + +def append_leaves( + new_line: Line, old_line: Line, leaves: List[Leaf], preformatted: bool = False +) -> None: + """ + Append leaves (taken from @old_line) to @new_line, making sure to fix the + underlying Node structure where appropriate. + + All of the leaves in @leaves are duplicated. The duplicates are then + appended to @new_line and used to replace their originals in the underlying + Node structure. Any comments attached to the old leaves are reattached to + the new leaves. + + Pre-conditions: + set(@leaves) is a subset of set(@old_line.leaves). + """ + for old_leaf in leaves: + new_leaf = Leaf(old_leaf.type, old_leaf.value) + replace_child(old_leaf, new_leaf) + new_line.append(new_leaf, preformatted=preformatted) + + for comment_leaf in old_line.comments_after(old_leaf): + new_line.append(comment_leaf, preformatted=True) + + +def is_line_short_enough( # noqa: C901 + line: Line, *, mode: Mode, line_str: str = "" +) -> bool: + """For non-multiline strings, return True if `line` is no longer than `line_length`. + For multiline strings, looks at the context around `line` to determine + if it should be inlined or split up. + Uses the provided `line_str` rendering, if any, otherwise computes a new one. + """ + if not line_str: + line_str = line_to_string(line) + + if Preview.multiline_string_handling not in mode: + return ( + str_width(line_str) <= mode.line_length + and "\n" not in line_str # multiline strings + and not line.contains_standalone_comments() + ) + + if line.contains_standalone_comments(): + return False + if "\n" not in line_str: + # No multiline strings (MLS) present + return str_width(line_str) <= mode.line_length + + first, *_, last = line_str.split("\n") + if str_width(first) > mode.line_length or str_width(last) > mode.line_length: + return False + + # Traverse the AST to examine the context of the multiline string (MLS), + # tracking aspects such as depth and comma existence, + # to determine whether to split the MLS or keep it together. + # Depth (which is based on the existing bracket_depth concept) + # is needed to determine nesting level of the MLS. + # Includes special case for trailing commas. + commas: List[int] = [] # tracks number of commas per depth level + multiline_string: Optional[Leaf] = None + # store the leaves that contain parts of the MLS + multiline_string_contexts: List[LN] = [] + + max_level_to_update: Union[int, float] = math.inf # track the depth of the MLS + for i, leaf in enumerate(line.leaves): + if max_level_to_update == math.inf: + had_comma: Optional[int] = None + if leaf.bracket_depth + 1 > len(commas): + commas.append(0) + elif leaf.bracket_depth + 1 < len(commas): + had_comma = commas.pop() + if ( + had_comma is not None + and multiline_string is not None + and multiline_string.bracket_depth == leaf.bracket_depth + 1 + ): + # Have left the level with the MLS, stop tracking commas + max_level_to_update = leaf.bracket_depth + if had_comma > 0: + # MLS was in parens with at least one comma - force split + return False + + if leaf.bracket_depth <= max_level_to_update and leaf.type == token.COMMA: + # Inside brackets, ignore trailing comma + # directly after MLS/MLS-containing expression + ignore_ctxs: List[Optional[LN]] = [None] + ignore_ctxs += multiline_string_contexts + if (line.inside_brackets or leaf.bracket_depth > 0) and ( + i != len(line.leaves) - 1 or leaf.prev_sibling not in ignore_ctxs + ): + commas[leaf.bracket_depth] += 1 + if max_level_to_update != math.inf: + max_level_to_update = min(max_level_to_update, leaf.bracket_depth) + + if is_multiline_string(leaf): + if len(multiline_string_contexts) > 0: + # >1 multiline string cannot fit on a single line - force split + return False + multiline_string = leaf + ctx: LN = leaf + # fetch the leaf components of the MLS in the AST + while str(ctx) in line_str: + multiline_string_contexts.append(ctx) + if ctx.parent is None: + break + ctx = ctx.parent + + # May not have a triple-quoted multiline string at all, + # in case of a regular string with embedded newlines and line continuations + if len(multiline_string_contexts) == 0: + return True + + return all(val == 0 for val in commas) + + +def can_be_split(line: Line) -> bool: + """Return False if the line cannot be split *for sure*. + + This is not an exhaustive search but a cheap heuristic that we can use to + avoid some unfortunate formattings (mostly around wrapping unsplittable code + in unnecessary parentheses). + """ + leaves = line.leaves + if len(leaves) < 2: + return False + + if leaves[0].type == token.STRING and leaves[1].type == token.DOT: + call_count = 0 + dot_count = 0 + next = leaves[-1] + for leaf in leaves[-2::-1]: + if leaf.type in OPENING_BRACKETS: + if next.type not in CLOSING_BRACKETS: + return False + + call_count += 1 + elif leaf.type == token.DOT: + dot_count += 1 + elif leaf.type == token.NAME: + if not (next.type == token.DOT or next.type in OPENING_BRACKETS): + return False + + elif leaf.type not in CLOSING_BRACKETS: + return False + + if dot_count > 1 and call_count > 1: + return False + + return True + + +def can_omit_invisible_parens( + rhs: RHSResult, + line_length: int, +) -> bool: + """Does `rhs.body` have a shape safe to reformat without optional parens around it? + + Returns True for only a subset of potentially nice looking formattings but + the point is to not return false positives that end up producing lines that + are too long. + """ + line = rhs.body + + # We need optional parens in order to split standalone comments to their own lines + # if there are no nested parens around the standalone comments + closing_bracket: Optional[Leaf] = None + for leaf in reversed(line.leaves): + if closing_bracket and leaf is closing_bracket.opening_bracket: + closing_bracket = None + if leaf.type == STANDALONE_COMMENT and not closing_bracket: + return False + if ( + not closing_bracket + and leaf.type in CLOSING_BRACKETS + and leaf.opening_bracket in line.leaves + and leaf.value + ): + closing_bracket = leaf + + bt = line.bracket_tracker + if not bt.delimiters: + # Without delimiters the optional parentheses are useless. + return True + + max_priority = bt.max_delimiter_priority() + delimiter_count = bt.delimiter_count_with_priority(max_priority) + if delimiter_count > 1: + # With more than one delimiter of a kind the optional parentheses read better. + return False + + if delimiter_count == 1: + if max_priority == COMMA_PRIORITY and rhs.head.is_with_or_async_with_stmt: + # For two context manager with statements, the optional parentheses read + # better. In this case, `rhs.body` is the context managers part of + # the with statement. `rhs.head` is the `with (` part on the previous + # line. + return False + # Otherwise it may also read better, but we don't do it today and requires + # careful considerations for all possible cases. See + # https://github.com/psf/black/issues/2156. + + if max_priority == DOT_PRIORITY: + # A single stranded method call doesn't require optional parentheses. + return True + + assert len(line.leaves) >= 2, "Stranded delimiter" + + # With a single delimiter, omit if the expression starts or ends with + # a bracket. + first = line.leaves[0] + second = line.leaves[1] + if first.type in OPENING_BRACKETS and second.type not in CLOSING_BRACKETS: + if _can_omit_opening_paren(line, first=first, line_length=line_length): + return True + + # Note: we are not returning False here because a line might have *both* + # a leading opening bracket and a trailing closing bracket. If the + # opening bracket doesn't match our rule, maybe the closing will. + + penultimate = line.leaves[-2] + last = line.leaves[-1] + + if ( + last.type == token.RPAR + or last.type == token.RBRACE + or ( + # don't use indexing for omitting optional parentheses; + # it looks weird + last.type == token.RSQB + and last.parent + and last.parent.type != syms.trailer + ) + ): + if penultimate.type in OPENING_BRACKETS: + # Empty brackets don't help. + return False + + if is_multiline_string(first): + # Additional wrapping of a multiline string in this situation is + # unnecessary. + return True + + if _can_omit_closing_paren(line, last=last, line_length=line_length): + return True + + return False + + +def _can_omit_opening_paren(line: Line, *, first: Leaf, line_length: int) -> bool: + """See `can_omit_invisible_parens`.""" + remainder = False + length = 4 * line.depth + _index = -1 + for _index, leaf, leaf_length in line.enumerate_with_length(): + if leaf.type in CLOSING_BRACKETS and leaf.opening_bracket is first: + remainder = True + if remainder: + length += leaf_length + if length > line_length: + break + + if leaf.type in OPENING_BRACKETS: + # There are brackets we can further split on. + remainder = False + + else: + # checked the entire string and line length wasn't exceeded + if len(line.leaves) == _index + 1: + return True + + return False + + +def _can_omit_closing_paren(line: Line, *, last: Leaf, line_length: int) -> bool: + """See `can_omit_invisible_parens`.""" + length = 4 * line.depth + seen_other_brackets = False + for _index, leaf, leaf_length in line.enumerate_with_length(): + length += leaf_length + if leaf is last.opening_bracket: + if seen_other_brackets or length <= line_length: + return True + + elif leaf.type in OPENING_BRACKETS: + # There are brackets we can further split on. + seen_other_brackets = True + + return False + + +def line_to_string(line: Line) -> str: + """Returns the string representation of @line. + + WARNING: This is known to be computationally expensive. + """ + return str(line).strip("\n") diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/mode.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/mode.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..bdf6def7d Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/mode.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/mode.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/mode.py new file mode 100644 index 000000000..ca224fc8d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/mode.py @@ -0,0 +1,290 @@ +"""Data structures configuring Black behavior. + +Mostly around Python language feature support per version and Black configuration +chosen by the user. +""" + +from dataclasses import dataclass, field +from enum import Enum, auto +from hashlib import sha256 +from operator import attrgetter +from typing import Dict, Final, Set + +from black.const import DEFAULT_LINE_LENGTH + + +class TargetVersion(Enum): + PY33 = 3 + PY34 = 4 + PY35 = 5 + PY36 = 6 + PY37 = 7 + PY38 = 8 + PY39 = 9 + PY310 = 10 + PY311 = 11 + PY312 = 12 + PY313 = 13 + + +class Feature(Enum): + F_STRINGS = 2 + NUMERIC_UNDERSCORES = 3 + TRAILING_COMMA_IN_CALL = 4 + TRAILING_COMMA_IN_DEF = 5 + # The following two feature-flags are mutually exclusive, and exactly one should be + # set for every version of python. + ASYNC_IDENTIFIERS = 6 + ASYNC_KEYWORDS = 7 + ASSIGNMENT_EXPRESSIONS = 8 + POS_ONLY_ARGUMENTS = 9 + RELAXED_DECORATORS = 10 + PATTERN_MATCHING = 11 + UNPACKING_ON_FLOW = 12 + ANN_ASSIGN_EXTENDED_RHS = 13 + EXCEPT_STAR = 14 + VARIADIC_GENERICS = 15 + DEBUG_F_STRINGS = 16 + PARENTHESIZED_CONTEXT_MANAGERS = 17 + TYPE_PARAMS = 18 + FSTRING_PARSING = 19 + TYPE_PARAM_DEFAULTS = 20 + FORCE_OPTIONAL_PARENTHESES = 50 + + # __future__ flags + FUTURE_ANNOTATIONS = 51 + + +FUTURE_FLAG_TO_FEATURE: Final = { + "annotations": Feature.FUTURE_ANNOTATIONS, +} + + +VERSION_TO_FEATURES: Dict[TargetVersion, Set[Feature]] = { + TargetVersion.PY33: {Feature.ASYNC_IDENTIFIERS}, + TargetVersion.PY34: {Feature.ASYNC_IDENTIFIERS}, + TargetVersion.PY35: {Feature.TRAILING_COMMA_IN_CALL, Feature.ASYNC_IDENTIFIERS}, + TargetVersion.PY36: { + Feature.F_STRINGS, + Feature.NUMERIC_UNDERSCORES, + Feature.TRAILING_COMMA_IN_CALL, + Feature.TRAILING_COMMA_IN_DEF, + Feature.ASYNC_IDENTIFIERS, + }, + TargetVersion.PY37: { + Feature.F_STRINGS, + Feature.NUMERIC_UNDERSCORES, + Feature.TRAILING_COMMA_IN_CALL, + Feature.TRAILING_COMMA_IN_DEF, + Feature.ASYNC_KEYWORDS, + Feature.FUTURE_ANNOTATIONS, + }, + TargetVersion.PY38: { + Feature.F_STRINGS, + Feature.DEBUG_F_STRINGS, + Feature.NUMERIC_UNDERSCORES, + Feature.TRAILING_COMMA_IN_CALL, + Feature.TRAILING_COMMA_IN_DEF, + Feature.ASYNC_KEYWORDS, + Feature.FUTURE_ANNOTATIONS, + Feature.ASSIGNMENT_EXPRESSIONS, + Feature.POS_ONLY_ARGUMENTS, + Feature.UNPACKING_ON_FLOW, + Feature.ANN_ASSIGN_EXTENDED_RHS, + }, + TargetVersion.PY39: { + Feature.F_STRINGS, + Feature.DEBUG_F_STRINGS, + Feature.NUMERIC_UNDERSCORES, + Feature.TRAILING_COMMA_IN_CALL, + Feature.TRAILING_COMMA_IN_DEF, + Feature.ASYNC_KEYWORDS, + Feature.FUTURE_ANNOTATIONS, + Feature.ASSIGNMENT_EXPRESSIONS, + Feature.RELAXED_DECORATORS, + Feature.POS_ONLY_ARGUMENTS, + Feature.UNPACKING_ON_FLOW, + Feature.ANN_ASSIGN_EXTENDED_RHS, + Feature.PARENTHESIZED_CONTEXT_MANAGERS, + }, + TargetVersion.PY310: { + Feature.F_STRINGS, + Feature.DEBUG_F_STRINGS, + Feature.NUMERIC_UNDERSCORES, + Feature.TRAILING_COMMA_IN_CALL, + Feature.TRAILING_COMMA_IN_DEF, + Feature.ASYNC_KEYWORDS, + Feature.FUTURE_ANNOTATIONS, + Feature.ASSIGNMENT_EXPRESSIONS, + Feature.RELAXED_DECORATORS, + Feature.POS_ONLY_ARGUMENTS, + Feature.UNPACKING_ON_FLOW, + Feature.ANN_ASSIGN_EXTENDED_RHS, + Feature.PARENTHESIZED_CONTEXT_MANAGERS, + Feature.PATTERN_MATCHING, + }, + TargetVersion.PY311: { + Feature.F_STRINGS, + Feature.DEBUG_F_STRINGS, + Feature.NUMERIC_UNDERSCORES, + Feature.TRAILING_COMMA_IN_CALL, + Feature.TRAILING_COMMA_IN_DEF, + Feature.ASYNC_KEYWORDS, + Feature.FUTURE_ANNOTATIONS, + Feature.ASSIGNMENT_EXPRESSIONS, + Feature.RELAXED_DECORATORS, + Feature.POS_ONLY_ARGUMENTS, + Feature.UNPACKING_ON_FLOW, + Feature.ANN_ASSIGN_EXTENDED_RHS, + Feature.PARENTHESIZED_CONTEXT_MANAGERS, + Feature.PATTERN_MATCHING, + Feature.EXCEPT_STAR, + Feature.VARIADIC_GENERICS, + }, + TargetVersion.PY312: { + Feature.F_STRINGS, + Feature.DEBUG_F_STRINGS, + Feature.NUMERIC_UNDERSCORES, + Feature.TRAILING_COMMA_IN_CALL, + Feature.TRAILING_COMMA_IN_DEF, + Feature.ASYNC_KEYWORDS, + Feature.FUTURE_ANNOTATIONS, + Feature.ASSIGNMENT_EXPRESSIONS, + Feature.RELAXED_DECORATORS, + Feature.POS_ONLY_ARGUMENTS, + Feature.UNPACKING_ON_FLOW, + Feature.ANN_ASSIGN_EXTENDED_RHS, + Feature.PARENTHESIZED_CONTEXT_MANAGERS, + Feature.PATTERN_MATCHING, + Feature.EXCEPT_STAR, + Feature.VARIADIC_GENERICS, + Feature.TYPE_PARAMS, + Feature.FSTRING_PARSING, + }, + TargetVersion.PY313: { + Feature.F_STRINGS, + Feature.DEBUG_F_STRINGS, + Feature.NUMERIC_UNDERSCORES, + Feature.TRAILING_COMMA_IN_CALL, + Feature.TRAILING_COMMA_IN_DEF, + Feature.ASYNC_KEYWORDS, + Feature.FUTURE_ANNOTATIONS, + Feature.ASSIGNMENT_EXPRESSIONS, + Feature.RELAXED_DECORATORS, + Feature.POS_ONLY_ARGUMENTS, + Feature.UNPACKING_ON_FLOW, + Feature.ANN_ASSIGN_EXTENDED_RHS, + Feature.PARENTHESIZED_CONTEXT_MANAGERS, + Feature.PATTERN_MATCHING, + Feature.EXCEPT_STAR, + Feature.VARIADIC_GENERICS, + Feature.TYPE_PARAMS, + Feature.FSTRING_PARSING, + Feature.TYPE_PARAM_DEFAULTS, + }, +} + + +def supports_feature(target_versions: Set[TargetVersion], feature: Feature) -> bool: + return all(feature in VERSION_TO_FEATURES[version] for version in target_versions) + + +class Preview(Enum): + """Individual preview style features.""" + + hex_codes_in_unicode_sequences = auto() + # NOTE: string_processing requires wrap_long_dict_values_in_parens + # for https://github.com/psf/black/issues/3117 to be fixed. + string_processing = auto() + hug_parens_with_braces_and_square_brackets = auto() + unify_docstring_detection = auto() + no_normalize_fmt_skip_whitespace = auto() + wrap_long_dict_values_in_parens = auto() + multiline_string_handling = auto() + typed_params_trailing_comma = auto() + is_simple_lookup_for_doublestar_expression = auto() + docstring_check_for_newline = auto() + remove_redundant_guard_parens = auto() + parens_for_long_if_clauses_in_case_block = auto() + + +UNSTABLE_FEATURES: Set[Preview] = { + # Many issues, see summary in https://github.com/psf/black/issues/4042 + Preview.string_processing, + # See issues #3452 and #4158 + Preview.wrap_long_dict_values_in_parens, + # See issue #4159 + Preview.multiline_string_handling, + # See issue #4036 (crash), #4098, #4099 (proposed tweaks) + Preview.hug_parens_with_braces_and_square_brackets, +} + + +class Deprecated(UserWarning): + """Visible deprecation warning.""" + + +_MAX_CACHE_KEY_PART_LENGTH: Final = 32 + + +@dataclass +class Mode: + target_versions: Set[TargetVersion] = field(default_factory=set) + line_length: int = DEFAULT_LINE_LENGTH + string_normalization: bool = True + is_pyi: bool = False + is_ipynb: bool = False + skip_source_first_line: bool = False + magic_trailing_comma: bool = True + python_cell_magics: Set[str] = field(default_factory=set) + preview: bool = False + unstable: bool = False + enabled_features: Set[Preview] = field(default_factory=set) + + def __contains__(self, feature: Preview) -> bool: + """ + Provide `Preview.FEATURE in Mode` syntax that mirrors the ``preview`` flag. + + In unstable mode, all features are enabled. In preview mode, all features + except those in UNSTABLE_FEATURES are enabled. Any features in + `self.enabled_features` are also enabled. + """ + if self.unstable: + return True + if feature in self.enabled_features: + return True + return self.preview and feature not in UNSTABLE_FEATURES + + def get_cache_key(self) -> str: + if self.target_versions: + version_str = ",".join( + str(version.value) + for version in sorted(self.target_versions, key=attrgetter("value")) + ) + else: + version_str = "-" + if len(version_str) > _MAX_CACHE_KEY_PART_LENGTH: + version_str = sha256(version_str.encode()).hexdigest()[ + :_MAX_CACHE_KEY_PART_LENGTH + ] + features_and_magics = ( + ",".join(sorted(f.name for f in self.enabled_features)) + + "@" + + ",".join(sorted(self.python_cell_magics)) + ) + if len(features_and_magics) > _MAX_CACHE_KEY_PART_LENGTH: + features_and_magics = sha256(features_and_magics.encode()).hexdigest()[ + :_MAX_CACHE_KEY_PART_LENGTH + ] + parts = [ + version_str, + str(self.line_length), + str(int(self.string_normalization)), + str(int(self.is_pyi)), + str(int(self.is_ipynb)), + str(int(self.skip_source_first_line)), + str(int(self.magic_trailing_comma)), + str(int(self.preview)), + features_and_magics, + ] + return ".".join(parts) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/nodes.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/nodes.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..ba81762fd Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/nodes.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/nodes.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/nodes.py new file mode 100644 index 000000000..9579b715a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/nodes.py @@ -0,0 +1,1037 @@ +""" +blib2to3 Node/Leaf transformation-related utility functions. +""" + +import sys +from typing import ( + Final, + Generic, + Iterator, + List, + Literal, + Optional, + Set, + Tuple, + TypeVar, + Union, +) + +if sys.version_info >= (3, 10): + from typing import TypeGuard +else: + from typing_extensions import TypeGuard + +from mypy_extensions import mypyc_attr + +from black.cache import CACHE_DIR +from black.mode import Mode, Preview +from black.strings import get_string_prefix, has_triple_quotes +from blib2to3 import pygram +from blib2to3.pgen2 import token +from blib2to3.pytree import NL, Leaf, Node, type_repr + +pygram.initialize(CACHE_DIR) +syms: Final = pygram.python_symbols + + +# types +T = TypeVar("T") +LN = Union[Leaf, Node] +LeafID = int +NodeType = int + + +WHITESPACE: Final = {token.DEDENT, token.INDENT, token.NEWLINE} +STATEMENT: Final = { + syms.if_stmt, + syms.while_stmt, + syms.for_stmt, + syms.try_stmt, + syms.except_clause, + syms.with_stmt, + syms.funcdef, + syms.classdef, + syms.match_stmt, + syms.case_block, +} +STANDALONE_COMMENT: Final = 153 +token.tok_name[STANDALONE_COMMENT] = "STANDALONE_COMMENT" +LOGIC_OPERATORS: Final = {"and", "or"} +COMPARATORS: Final = { + token.LESS, + token.GREATER, + token.EQEQUAL, + token.NOTEQUAL, + token.LESSEQUAL, + token.GREATEREQUAL, +} +MATH_OPERATORS: Final = { + token.VBAR, + token.CIRCUMFLEX, + token.AMPER, + token.LEFTSHIFT, + token.RIGHTSHIFT, + token.PLUS, + token.MINUS, + token.STAR, + token.SLASH, + token.DOUBLESLASH, + token.PERCENT, + token.AT, + token.TILDE, + token.DOUBLESTAR, +} +STARS: Final = {token.STAR, token.DOUBLESTAR} +VARARGS_SPECIALS: Final = STARS | {token.SLASH} +VARARGS_PARENTS: Final = { + syms.arglist, + syms.argument, # double star in arglist + syms.trailer, # single argument to call + syms.typedargslist, + syms.varargslist, # lambdas +} +UNPACKING_PARENTS: Final = { + syms.atom, # single element of a list or set literal + syms.dictsetmaker, + syms.listmaker, + syms.testlist_gexp, + syms.testlist_star_expr, + syms.subject_expr, + syms.pattern, +} +TEST_DESCENDANTS: Final = { + syms.test, + syms.lambdef, + syms.or_test, + syms.and_test, + syms.not_test, + syms.comparison, + syms.star_expr, + syms.expr, + syms.xor_expr, + syms.and_expr, + syms.shift_expr, + syms.arith_expr, + syms.trailer, + syms.term, + syms.power, + syms.namedexpr_test, +} +TYPED_NAMES: Final = {syms.tname, syms.tname_star} +ASSIGNMENTS: Final = { + "=", + "+=", + "-=", + "*=", + "@=", + "/=", + "%=", + "&=", + "|=", + "^=", + "<<=", + ">>=", + "**=", + "//=", + ":", +} + +IMPLICIT_TUPLE: Final = {syms.testlist, syms.testlist_star_expr, syms.exprlist} +BRACKET: Final = { + token.LPAR: token.RPAR, + token.LSQB: token.RSQB, + token.LBRACE: token.RBRACE, +} +OPENING_BRACKETS: Final = set(BRACKET.keys()) +CLOSING_BRACKETS: Final = set(BRACKET.values()) +BRACKETS: Final = OPENING_BRACKETS | CLOSING_BRACKETS +ALWAYS_NO_SPACE: Final = CLOSING_BRACKETS | { + token.COMMA, + STANDALONE_COMMENT, + token.FSTRING_MIDDLE, + token.FSTRING_END, + token.BANG, +} + +RARROW = 55 + + +@mypyc_attr(allow_interpreted_subclasses=True) +class Visitor(Generic[T]): + """Basic lib2to3 visitor that yields things of type `T` on `visit()`.""" + + def visit(self, node: LN) -> Iterator[T]: + """Main method to visit `node` and its children. + + It tries to find a `visit_*()` method for the given `node.type`, like + `visit_simple_stmt` for Node objects or `visit_INDENT` for Leaf objects. + If no dedicated `visit_*()` method is found, chooses `visit_default()` + instead. + + Then yields objects of type `T` from the selected visitor. + """ + if node.type < 256: + name = token.tok_name[node.type] + else: + name = str(type_repr(node.type)) + # We explicitly branch on whether a visitor exists (instead of + # using self.visit_default as the default arg to getattr) in order + # to save needing to create a bound method object and so mypyc can + # generate a native call to visit_default. + visitf = getattr(self, f"visit_{name}", None) + if visitf: + yield from visitf(node) + else: + yield from self.visit_default(node) + + def visit_default(self, node: LN) -> Iterator[T]: + """Default `visit_*()` implementation. Recurses to children of `node`.""" + if isinstance(node, Node): + for child in node.children: + yield from self.visit(child) + + +def whitespace(leaf: Leaf, *, complex_subscript: bool, mode: Mode) -> str: # noqa: C901 + """Return whitespace prefix if needed for the given `leaf`. + + `complex_subscript` signals whether the given leaf is part of a subscription + which has non-trivial arguments, like arithmetic expressions or function calls. + """ + NO: Final[str] = "" + SPACE: Final[str] = " " + DOUBLESPACE: Final[str] = " " + t = leaf.type + p = leaf.parent + v = leaf.value + if t in ALWAYS_NO_SPACE: + return NO + + if t == token.COMMENT: + return DOUBLESPACE + + assert p is not None, f"INTERNAL ERROR: hand-made leaf without parent: {leaf!r}" + if t == token.COLON and p.type not in { + syms.subscript, + syms.subscriptlist, + syms.sliceop, + }: + return NO + + if t == token.LBRACE and p.type == syms.fstring_replacement_field: + return NO + + prev = leaf.prev_sibling + if not prev: + prevp = preceding_leaf(p) + if not prevp or prevp.type in OPENING_BRACKETS: + return NO + + if t == token.COLON: + if prevp.type == token.COLON: + return NO + + elif prevp.type != token.COMMA and not complex_subscript: + return NO + + return SPACE + + if prevp.type == token.EQUAL: + if prevp.parent: + if prevp.parent.type in { + syms.arglist, + syms.argument, + syms.parameters, + syms.varargslist, + }: + return NO + + elif prevp.parent.type == syms.typedargslist: + # A bit hacky: if the equal sign has whitespace, it means we + # previously found it's a typed argument. So, we're using + # that, too. + return prevp.prefix + + elif ( + prevp.type == token.STAR + and parent_type(prevp) == syms.star_expr + and parent_type(prevp.parent) == syms.subscriptlist + ): + # No space between typevar tuples. + return NO + + elif prevp.type in VARARGS_SPECIALS: + if is_vararg(prevp, within=VARARGS_PARENTS | UNPACKING_PARENTS): + return NO + + elif prevp.type == token.COLON: + if prevp.parent and prevp.parent.type in {syms.subscript, syms.sliceop}: + return SPACE if complex_subscript else NO + + elif ( + prevp.parent + and prevp.parent.type == syms.factor + and prevp.type in MATH_OPERATORS + ): + return NO + + elif prevp.type == token.AT and p.parent and p.parent.type == syms.decorator: + # no space in decorators + return NO + + elif prev.type in OPENING_BRACKETS: + return NO + + elif prev.type == token.BANG: + return NO + + if p.type in {syms.parameters, syms.arglist}: + # untyped function signatures or calls + if not prev or prev.type != token.COMMA: + return NO + + elif p.type == syms.varargslist: + # lambdas + if prev and prev.type != token.COMMA: + return NO + + elif p.type == syms.typedargslist: + # typed function signatures + if not prev: + return NO + + if t == token.EQUAL: + if prev.type not in TYPED_NAMES: + return NO + + elif prev.type == token.EQUAL: + # A bit hacky: if the equal sign has whitespace, it means we + # previously found it's a typed argument. So, we're using that, too. + return prev.prefix + + elif prev.type != token.COMMA: + return NO + + elif p.type in TYPED_NAMES: + # type names + if not prev: + prevp = preceding_leaf(p) + if not prevp or prevp.type != token.COMMA: + return NO + + elif p.type == syms.trailer: + # attributes and calls + if t == token.LPAR or t == token.RPAR: + return NO + + if not prev: + if t == token.DOT or t == token.LSQB: + return NO + + elif prev.type != token.COMMA: + return NO + + elif p.type == syms.argument: + # single argument + if t == token.EQUAL: + return NO + + if not prev: + prevp = preceding_leaf(p) + if not prevp or prevp.type == token.LPAR: + return NO + + elif prev.type in {token.EQUAL} | VARARGS_SPECIALS: + return NO + + elif p.type == syms.decorator: + # decorators + return NO + + elif p.type == syms.dotted_name: + if prev: + return NO + + prevp = preceding_leaf(p) + if not prevp or prevp.type == token.AT or prevp.type == token.DOT: + return NO + + elif p.type == syms.classdef: + if t == token.LPAR: + return NO + + if prev and prev.type == token.LPAR: + return NO + + elif p.type in {syms.subscript, syms.sliceop}: + # indexing + if not prev: + assert p.parent is not None, "subscripts are always parented" + if p.parent.type == syms.subscriptlist: + return SPACE + + return NO + + elif t == token.COLONEQUAL or prev.type == token.COLONEQUAL: + return SPACE + + elif not complex_subscript: + return NO + + elif p.type == syms.atom: + if prev and t == token.DOT: + # dots, but not the first one. + return NO + + elif p.type == syms.dictsetmaker: + # dict unpacking + if prev and prev.type == token.DOUBLESTAR: + return NO + + elif p.type in {syms.factor, syms.star_expr}: + # unary ops + if not prev: + prevp = preceding_leaf(p) + if not prevp or prevp.type in OPENING_BRACKETS: + return NO + + prevp_parent = prevp.parent + assert prevp_parent is not None + if prevp.type == token.COLON and prevp_parent.type in { + syms.subscript, + syms.sliceop, + }: + return NO + + elif prevp.type == token.EQUAL and prevp_parent.type == syms.argument: + return NO + + # TODO: add fstring here? + elif t in {token.NAME, token.NUMBER, token.STRING}: + return NO + + elif p.type == syms.import_from: + if t == token.DOT: + if prev and prev.type == token.DOT: + return NO + + elif t == token.NAME: + if v == "import": + return SPACE + + if prev and prev.type == token.DOT: + return NO + + elif p.type == syms.sliceop: + return NO + + elif p.type == syms.except_clause: + if t == token.STAR: + return NO + + return SPACE + + +def make_simple_prefix(nl_count: int, form_feed: bool, empty_line: str = "\n") -> str: + """Generate a normalized prefix string.""" + if form_feed: + return (empty_line * (nl_count - 1)) + "\f" + empty_line + return empty_line * nl_count + + +def preceding_leaf(node: Optional[LN]) -> Optional[Leaf]: + """Return the first leaf that precedes `node`, if any.""" + while node: + res = node.prev_sibling + if res: + if isinstance(res, Leaf): + return res + + try: + return list(res.leaves())[-1] + + except IndexError: + return None + + node = node.parent + return None + + +def prev_siblings_are(node: Optional[LN], tokens: List[Optional[NodeType]]) -> bool: + """Return if the `node` and its previous siblings match types against the provided + list of tokens; the provided `node`has its type matched against the last element in + the list. `None` can be used as the first element to declare that the start of the + list is anchored at the start of its parent's children.""" + if not tokens: + return True + if tokens[-1] is None: + return node is None + if not node: + return False + if node.type != tokens[-1]: + return False + return prev_siblings_are(node.prev_sibling, tokens[:-1]) + + +def parent_type(node: Optional[LN]) -> Optional[NodeType]: + """ + Returns: + @node.parent.type, if @node is not None and has a parent. + OR + None, otherwise. + """ + if node is None or node.parent is None: + return None + + return node.parent.type + + +def child_towards(ancestor: Node, descendant: LN) -> Optional[LN]: + """Return the child of `ancestor` that contains `descendant`.""" + node: Optional[LN] = descendant + while node and node.parent != ancestor: + node = node.parent + return node + + +def replace_child(old_child: LN, new_child: LN) -> None: + """ + Side Effects: + * If @old_child.parent is set, replace @old_child with @new_child in + @old_child's underlying Node structure. + OR + * Otherwise, this function does nothing. + """ + parent = old_child.parent + if not parent: + return + + child_idx = old_child.remove() + if child_idx is not None: + parent.insert_child(child_idx, new_child) + + +def container_of(leaf: Leaf) -> LN: + """Return `leaf` or one of its ancestors that is the topmost container of it. + + By "container" we mean a node where `leaf` is the very first child. + """ + same_prefix = leaf.prefix + container: LN = leaf + while container: + parent = container.parent + if parent is None: + break + + if parent.children[0].prefix != same_prefix: + break + + if parent.type == syms.file_input: + break + + if parent.prev_sibling is not None and parent.prev_sibling.type in BRACKETS: + break + + container = parent + return container + + +def first_leaf_of(node: LN) -> Optional[Leaf]: + """Returns the first leaf of the node tree.""" + if isinstance(node, Leaf): + return node + if node.children: + return first_leaf_of(node.children[0]) + else: + return None + + +def is_arith_like(node: LN) -> bool: + """Whether node is an arithmetic or a binary arithmetic expression""" + return node.type in { + syms.arith_expr, + syms.shift_expr, + syms.xor_expr, + syms.and_expr, + } + + +def is_docstring(node: NL, mode: Mode) -> bool: + if isinstance(node, Leaf): + if node.type != token.STRING: + return False + + prefix = get_string_prefix(node.value) + if set(prefix).intersection("bBfF"): + return False + + if ( + Preview.unify_docstring_detection in mode + and node.parent + and node.parent.type == syms.simple_stmt + and not node.parent.prev_sibling + and node.parent.parent + and node.parent.parent.type == syms.file_input + ): + return True + + if prev_siblings_are( + node.parent, [None, token.NEWLINE, token.INDENT, syms.simple_stmt] + ): + return True + + # Multiline docstring on the same line as the `def`. + if prev_siblings_are(node.parent, [syms.parameters, token.COLON, syms.simple_stmt]): + # `syms.parameters` is only used in funcdefs and async_funcdefs in the Python + # grammar. We're safe to return True without further checks. + return True + + return False + + +def is_empty_tuple(node: LN) -> bool: + """Return True if `node` holds an empty tuple.""" + return ( + node.type == syms.atom + and len(node.children) == 2 + and node.children[0].type == token.LPAR + and node.children[1].type == token.RPAR + ) + + +def is_one_tuple(node: LN) -> bool: + """Return True if `node` holds a tuple with one element, with or without parens.""" + if node.type == syms.atom: + gexp = unwrap_singleton_parenthesis(node) + if gexp is None or gexp.type != syms.testlist_gexp: + return False + + return len(gexp.children) == 2 and gexp.children[1].type == token.COMMA + + return ( + node.type in IMPLICIT_TUPLE + and len(node.children) == 2 + and node.children[1].type == token.COMMA + ) + + +def is_tuple_containing_walrus(node: LN) -> bool: + """Return True if `node` holds a tuple that contains a walrus operator.""" + if node.type != syms.atom: + return False + gexp = unwrap_singleton_parenthesis(node) + if gexp is None or gexp.type != syms.testlist_gexp: + return False + + return any(child.type == syms.namedexpr_test for child in gexp.children) + + +def is_one_sequence_between( + opening: Leaf, + closing: Leaf, + leaves: List[Leaf], + brackets: Tuple[int, int] = (token.LPAR, token.RPAR), +) -> bool: + """Return True if content between `opening` and `closing` is a one-sequence.""" + if (opening.type, closing.type) != brackets: + return False + + depth = closing.bracket_depth + 1 + for _opening_index, leaf in enumerate(leaves): + if leaf is opening: + break + + else: + raise LookupError("Opening paren not found in `leaves`") + + commas = 0 + _opening_index += 1 + for leaf in leaves[_opening_index:]: + if leaf is closing: + break + + bracket_depth = leaf.bracket_depth + if bracket_depth == depth and leaf.type == token.COMMA: + commas += 1 + if leaf.parent and leaf.parent.type in { + syms.arglist, + syms.typedargslist, + }: + commas += 1 + break + + return commas < 2 + + +def is_walrus_assignment(node: LN) -> bool: + """Return True iff `node` is of the shape ( test := test )""" + inner = unwrap_singleton_parenthesis(node) + return inner is not None and inner.type == syms.namedexpr_test + + +def is_simple_decorator_trailer(node: LN, last: bool = False) -> bool: + """Return True iff `node` is a trailer valid in a simple decorator""" + return node.type == syms.trailer and ( + ( + len(node.children) == 2 + and node.children[0].type == token.DOT + and node.children[1].type == token.NAME + ) + # last trailer can be an argument-less parentheses pair + or ( + last + and len(node.children) == 2 + and node.children[0].type == token.LPAR + and node.children[1].type == token.RPAR + ) + # last trailer can be arguments + or ( + last + and len(node.children) == 3 + and node.children[0].type == token.LPAR + # and node.children[1].type == syms.argument + and node.children[2].type == token.RPAR + ) + ) + + +def is_simple_decorator_expression(node: LN) -> bool: + """Return True iff `node` could be a 'dotted name' decorator + + This function takes the node of the 'namedexpr_test' of the new decorator + grammar and test if it would be valid under the old decorator grammar. + + The old grammar was: decorator: @ dotted_name [arguments] NEWLINE + The new grammar is : decorator: @ namedexpr_test NEWLINE + """ + if node.type == token.NAME: + return True + if node.type == syms.power: + if node.children: + return ( + node.children[0].type == token.NAME + and all(map(is_simple_decorator_trailer, node.children[1:-1])) + and ( + len(node.children) < 2 + or is_simple_decorator_trailer(node.children[-1], last=True) + ) + ) + return False + + +def is_yield(node: LN) -> bool: + """Return True if `node` holds a `yield` or `yield from` expression.""" + if node.type == syms.yield_expr: + return True + + if is_name_token(node) and node.value == "yield": + return True + + if node.type != syms.atom: + return False + + if len(node.children) != 3: + return False + + lpar, expr, rpar = node.children + if lpar.type == token.LPAR and rpar.type == token.RPAR: + return is_yield(expr) + + return False + + +def is_vararg(leaf: Leaf, within: Set[NodeType]) -> bool: + """Return True if `leaf` is a star or double star in a vararg or kwarg. + + If `within` includes VARARGS_PARENTS, this applies to function signatures. + If `within` includes UNPACKING_PARENTS, it applies to right hand-side + extended iterable unpacking (PEP 3132) and additional unpacking + generalizations (PEP 448). + """ + if leaf.type not in VARARGS_SPECIALS or not leaf.parent: + return False + + p = leaf.parent + if p.type == syms.star_expr: + # Star expressions are also used as assignment targets in extended + # iterable unpacking (PEP 3132). See what its parent is instead. + if not p.parent: + return False + + p = p.parent + + return p.type in within + + +def is_fstring(node: Node) -> bool: + """Return True if the node is an f-string""" + return node.type == syms.fstring + + +def fstring_to_string(node: Node) -> Leaf: + """Converts an fstring node back to a string node.""" + string_without_prefix = str(node)[len(node.prefix) :] + string_leaf = Leaf(token.STRING, string_without_prefix, prefix=node.prefix) + string_leaf.lineno = node.get_lineno() or 0 + return string_leaf + + +def is_multiline_string(node: LN) -> bool: + """Return True if `leaf` is a multiline string that actually spans many lines.""" + if isinstance(node, Node) and is_fstring(node): + leaf = fstring_to_string(node) + elif isinstance(node, Leaf): + leaf = node + else: + return False + + return has_triple_quotes(leaf.value) and "\n" in leaf.value + + +def is_parent_function_or_class(node: Node) -> bool: + assert node.type in {syms.suite, syms.simple_stmt} + assert node.parent is not None + # Note this works for suites / simple_stmts in async def as well + return node.parent.type in {syms.funcdef, syms.classdef} + + +def is_function_or_class(node: Node) -> bool: + return node.type in {syms.funcdef, syms.classdef, syms.async_funcdef} + + +def is_stub_suite(node: Node) -> bool: + """Return True if `node` is a suite with a stub body.""" + if node.parent is not None and not is_parent_function_or_class(node): + return False + + # If there is a comment, we want to keep it. + if node.prefix.strip(): + return False + + if ( + len(node.children) != 4 + or node.children[0].type != token.NEWLINE + or node.children[1].type != token.INDENT + or node.children[3].type != token.DEDENT + ): + return False + + if node.children[3].prefix.strip(): + return False + + return is_stub_body(node.children[2]) + + +def is_stub_body(node: LN) -> bool: + """Return True if `node` is a simple statement containing an ellipsis.""" + if not isinstance(node, Node) or node.type != syms.simple_stmt: + return False + + if len(node.children) != 2: + return False + + child = node.children[0] + return ( + not child.prefix.strip() + and child.type == syms.atom + and len(child.children) == 3 + and all(leaf == Leaf(token.DOT, ".") for leaf in child.children) + ) + + +def is_atom_with_invisible_parens(node: LN) -> bool: + """Given a `LN`, determines whether it's an atom `node` with invisible + parens. Useful in dedupe-ing and normalizing parens. + """ + if isinstance(node, Leaf) or node.type != syms.atom: + return False + + first, last = node.children[0], node.children[-1] + return ( + isinstance(first, Leaf) + and first.type == token.LPAR + and first.value == "" + and isinstance(last, Leaf) + and last.type == token.RPAR + and last.value == "" + ) + + +def is_empty_par(leaf: Leaf) -> bool: + return is_empty_lpar(leaf) or is_empty_rpar(leaf) + + +def is_empty_lpar(leaf: Leaf) -> bool: + return leaf.type == token.LPAR and leaf.value == "" + + +def is_empty_rpar(leaf: Leaf) -> bool: + return leaf.type == token.RPAR and leaf.value == "" + + +def is_import(leaf: Leaf) -> bool: + """Return True if the given leaf starts an import statement.""" + p = leaf.parent + t = leaf.type + v = leaf.value + return bool( + t == token.NAME + and ( + (v == "import" and p and p.type == syms.import_name) + or (v == "from" and p and p.type == syms.import_from) + ) + ) + + +def is_with_or_async_with_stmt(leaf: Leaf) -> bool: + """Return True if the given leaf starts a with or async with statement.""" + return bool( + leaf.type == token.NAME + and leaf.value == "with" + and leaf.parent + and leaf.parent.type == syms.with_stmt + ) or bool( + leaf.type == token.ASYNC + and leaf.next_sibling + and leaf.next_sibling.type == syms.with_stmt + ) + + +def is_async_stmt_or_funcdef(leaf: Leaf) -> bool: + """Return True if the given leaf starts an async def/for/with statement. + + Note that `async def` can be either an `async_stmt` or `async_funcdef`, + the latter is used when it has decorators. + """ + return bool( + leaf.type == token.ASYNC + and leaf.parent + and leaf.parent.type in {syms.async_stmt, syms.async_funcdef} + ) + + +def is_type_comment(leaf: Leaf) -> bool: + """Return True if the given leaf is a type comment. This function should only + be used for general type comments (excluding ignore annotations, which should + use `is_type_ignore_comment`). Note that general type comments are no longer + used in modern version of Python, this function may be deprecated in the future.""" + t = leaf.type + v = leaf.value + return t in {token.COMMENT, STANDALONE_COMMENT} and v.startswith("# type:") + + +def is_type_ignore_comment(leaf: Leaf) -> bool: + """Return True if the given leaf is a type comment with ignore annotation.""" + t = leaf.type + v = leaf.value + return t in {token.COMMENT, STANDALONE_COMMENT} and is_type_ignore_comment_string(v) + + +def is_type_ignore_comment_string(value: str) -> bool: + """Return True if the given string match with type comment with + ignore annotation.""" + return value.startswith("# type: ignore") + + +def wrap_in_parentheses(parent: Node, child: LN, *, visible: bool = True) -> None: + """Wrap `child` in parentheses. + + This replaces `child` with an atom holding the parentheses and the old + child. That requires moving the prefix. + + If `visible` is False, the leaves will be valueless (and thus invisible). + """ + lpar = Leaf(token.LPAR, "(" if visible else "") + rpar = Leaf(token.RPAR, ")" if visible else "") + prefix = child.prefix + child.prefix = "" + index = child.remove() or 0 + new_child = Node(syms.atom, [lpar, child, rpar]) + new_child.prefix = prefix + parent.insert_child(index, new_child) + + +def unwrap_singleton_parenthesis(node: LN) -> Optional[LN]: + """Returns `wrapped` if `node` is of the shape ( wrapped ). + + Parenthesis can be optional. Returns None otherwise""" + if len(node.children) != 3: + return None + + lpar, wrapped, rpar = node.children + if not (lpar.type == token.LPAR and rpar.type == token.RPAR): + return None + + return wrapped + + +def ensure_visible(leaf: Leaf) -> None: + """Make sure parentheses are visible. + + They could be invisible as part of some statements (see + :func:`normalize_invisible_parens` and :func:`visit_import_from`). + """ + if leaf.type == token.LPAR: + leaf.value = "(" + elif leaf.type == token.RPAR: + leaf.value = ")" + + +def is_name_token(nl: NL) -> TypeGuard[Leaf]: + return nl.type == token.NAME + + +def is_lpar_token(nl: NL) -> TypeGuard[Leaf]: + return nl.type == token.LPAR + + +def is_rpar_token(nl: NL) -> TypeGuard[Leaf]: + return nl.type == token.RPAR + + +def is_number_token(nl: NL) -> TypeGuard[Leaf]: + return nl.type == token.NUMBER + + +def get_annotation_type(leaf: Leaf) -> Literal["return", "param", None]: + """Returns the type of annotation this leaf is part of, if any.""" + ancestor = leaf.parent + while ancestor is not None: + if ancestor.prev_sibling and ancestor.prev_sibling.type == token.RARROW: + return "return" + if ancestor.parent and ancestor.parent.type == syms.tname: + return "param" + ancestor = ancestor.parent + return None + + +def is_part_of_annotation(leaf: Leaf) -> bool: + """Returns whether this leaf is part of a type annotation.""" + return get_annotation_type(leaf) is not None + + +def first_leaf(node: LN) -> Optional[Leaf]: + """Returns the first leaf of the ancestor node.""" + if isinstance(node, Leaf): + return node + elif not node.children: + return None + else: + return first_leaf(node.children[0]) + + +def last_leaf(node: LN) -> Optional[Leaf]: + """Returns the last leaf of the ancestor node.""" + if isinstance(node, Leaf): + return node + elif not node.children: + return None + else: + return last_leaf(node.children[-1]) + + +def furthest_ancestor_with_last_leaf(leaf: Leaf) -> LN: + """Returns the furthest ancestor that has this leaf node as the last leaf.""" + node: LN = leaf + while node.parent and node.parent.children and node is node.parent.children[-1]: + node = node.parent + return node diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/numerics.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/numerics.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..391d3b1a1 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/numerics.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/numerics.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/numerics.py new file mode 100644 index 000000000..3040de06f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/numerics.py @@ -0,0 +1,61 @@ +""" +Formatting numeric literals. +""" + +from blib2to3.pytree import Leaf + + +def format_hex(text: str) -> str: + """ + Formats a hexadecimal string like "0x12B3" + """ + before, after = text[:2], text[2:] + return f"{before}{after.upper()}" + + +def format_scientific_notation(text: str) -> str: + """Formats a numeric string utilizing scientific notation""" + before, after = text.split("e") + sign = "" + if after.startswith("-"): + after = after[1:] + sign = "-" + elif after.startswith("+"): + after = after[1:] + before = format_float_or_int_string(before) + return f"{before}e{sign}{after}" + + +def format_complex_number(text: str) -> str: + """Formats a complex string like `10j`""" + number = text[:-1] + suffix = text[-1] + return f"{format_float_or_int_string(number)}{suffix}" + + +def format_float_or_int_string(text: str) -> str: + """Formats a float string like "1.0".""" + if "." not in text: + return text + + before, after = text.split(".") + return f"{before or 0}.{after or 0}" + + +def normalize_numeric_literal(leaf: Leaf) -> None: + """Normalizes numeric (float, int, and complex) literals. + + All letters used in the representation are normalized to lowercase.""" + text = leaf.value.lower() + if text.startswith(("0o", "0b")): + # Leave octal and binary literals alone. + pass + elif text.startswith("0x"): + text = format_hex(text) + elif "e" in text: + text = format_scientific_notation(text) + elif text.endswith("j"): + text = format_complex_number(text) + else: + text = format_float_or_int_string(text) + leaf.value = text diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/output.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/output.py new file mode 100644 index 000000000..7c7dd0fe1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/output.py @@ -0,0 +1,122 @@ +"""Nice output for Black. + +The double calls are for patching purposes in tests. +""" + +import json +import re +import tempfile +from typing import Any, List, Optional + +from click import echo, style +from mypy_extensions import mypyc_attr + + +@mypyc_attr(patchable=True) +def _out(message: Optional[str] = None, nl: bool = True, **styles: Any) -> None: + if message is not None: + if "bold" not in styles: + styles["bold"] = True + message = style(message, **styles) + echo(message, nl=nl, err=True) + + +@mypyc_attr(patchable=True) +def _err(message: Optional[str] = None, nl: bool = True, **styles: Any) -> None: + if message is not None: + if "fg" not in styles: + styles["fg"] = "red" + message = style(message, **styles) + echo(message, nl=nl, err=True) + + +@mypyc_attr(patchable=True) +def out(message: Optional[str] = None, nl: bool = True, **styles: Any) -> None: + _out(message, nl=nl, **styles) + + +def err(message: Optional[str] = None, nl: bool = True, **styles: Any) -> None: + _err(message, nl=nl, **styles) + + +def ipynb_diff(a: str, b: str, a_name: str, b_name: str) -> str: + """Return a unified diff string between each cell in notebooks `a` and `b`.""" + a_nb = json.loads(a) + b_nb = json.loads(b) + diff_lines = [ + diff( + "".join(a_nb["cells"][cell_number]["source"]) + "\n", + "".join(b_nb["cells"][cell_number]["source"]) + "\n", + f"{a_name}:cell_{cell_number}", + f"{b_name}:cell_{cell_number}", + ) + for cell_number, cell in enumerate(a_nb["cells"]) + if cell["cell_type"] == "code" + ] + return "".join(diff_lines) + + +_line_pattern = re.compile(r"(.*?(?:\r\n|\n|\r|$))") + + +def _splitlines_no_ff(source: str) -> List[str]: + """Split a string into lines ignoring form feed and other chars. + + This mimics how the Python parser splits source code. + + A simplified version of the function with the same name in Lib/ast.py + """ + result = [match[0] for match in _line_pattern.finditer(source)] + if result[-1] == "": + result.pop(-1) + return result + + +def diff(a: str, b: str, a_name: str, b_name: str) -> str: + """Return a unified diff string between strings `a` and `b`.""" + import difflib + + a_lines = _splitlines_no_ff(a) + b_lines = _splitlines_no_ff(b) + diff_lines = [] + for line in difflib.unified_diff( + a_lines, b_lines, fromfile=a_name, tofile=b_name, n=5 + ): + # Work around https://bugs.python.org/issue2142 + # See: + # https://www.gnu.org/software/diffutils/manual/html_node/Incomplete-Lines.html + if line[-1] == "\n": + diff_lines.append(line) + else: + diff_lines.append(line + "\n") + diff_lines.append("\\ No newline at end of file\n") + return "".join(diff_lines) + + +def color_diff(contents: str) -> str: + """Inject the ANSI color codes to the diff.""" + lines = contents.split("\n") + for i, line in enumerate(lines): + if line.startswith("+++") or line.startswith("---"): + line = "\033[1m" + line + "\033[0m" # bold, reset + elif line.startswith("@@"): + line = "\033[36m" + line + "\033[0m" # cyan, reset + elif line.startswith("+"): + line = "\033[32m" + line + "\033[0m" # green, reset + elif line.startswith("-"): + line = "\033[31m" + line + "\033[0m" # red, reset + lines[i] = line + return "\n".join(lines) + + +@mypyc_attr(patchable=True) +def dump_to_file(*output: str, ensure_final_newline: bool = True) -> str: + """Dump `output` to a temporary file. Return path to the file.""" + with tempfile.NamedTemporaryFile( + mode="w", prefix="blk_", suffix=".log", delete=False, encoding="utf8" + ) as f: + for lines in output: + f.write(lines) + if ensure_final_newline and lines and lines[-1] != "\n": + f.write("\n") + return f.name diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/parsing.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/parsing.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..a4b0dcf3c Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/parsing.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/parsing.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/parsing.py new file mode 100644 index 000000000..e8664b5ee --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/parsing.py @@ -0,0 +1,244 @@ +""" +Parse Python code and perform AST validation. +""" + +import ast +import sys +import warnings +from typing import Iterable, Iterator, List, Set, Tuple + +from black.mode import VERSION_TO_FEATURES, Feature, TargetVersion, supports_feature +from black.nodes import syms +from blib2to3 import pygram +from blib2to3.pgen2 import driver +from blib2to3.pgen2.grammar import Grammar +from blib2to3.pgen2.parse import ParseError +from blib2to3.pgen2.tokenize import TokenError +from blib2to3.pytree import Leaf, Node + + +class InvalidInput(ValueError): + """Raised when input source code fails all parse attempts.""" + + +def get_grammars(target_versions: Set[TargetVersion]) -> List[Grammar]: + if not target_versions: + # No target_version specified, so try all grammars. + return [ + # Python 3.7-3.9 + pygram.python_grammar_async_keywords, + # Python 3.0-3.6 + pygram.python_grammar, + # Python 3.10+ + pygram.python_grammar_soft_keywords, + ] + + grammars = [] + # If we have to parse both, try to parse async as a keyword first + if not supports_feature( + target_versions, Feature.ASYNC_IDENTIFIERS + ) and not supports_feature(target_versions, Feature.PATTERN_MATCHING): + # Python 3.7-3.9 + grammars.append(pygram.python_grammar_async_keywords) + if not supports_feature(target_versions, Feature.ASYNC_KEYWORDS): + # Python 3.0-3.6 + grammars.append(pygram.python_grammar) + if any(Feature.PATTERN_MATCHING in VERSION_TO_FEATURES[v] for v in target_versions): + # Python 3.10+ + grammars.append(pygram.python_grammar_soft_keywords) + + # At least one of the above branches must have been taken, because every Python + # version has exactly one of the two 'ASYNC_*' flags + return grammars + + +def lib2to3_parse(src_txt: str, target_versions: Iterable[TargetVersion] = ()) -> Node: + """Given a string with source, return the lib2to3 Node.""" + if not src_txt.endswith("\n"): + src_txt += "\n" + + grammars = get_grammars(set(target_versions)) + errors = {} + for grammar in grammars: + drv = driver.Driver(grammar) + try: + result = drv.parse_string(src_txt, True) + break + + except ParseError as pe: + lineno, column = pe.context[1] + lines = src_txt.splitlines() + try: + faulty_line = lines[lineno - 1] + except IndexError: + faulty_line = "" + errors[grammar.version] = InvalidInput( + f"Cannot parse: {lineno}:{column}: {faulty_line}" + ) + + except TokenError as te: + # In edge cases these are raised; and typically don't have a "faulty_line". + lineno, column = te.args[1] + errors[grammar.version] = InvalidInput( + f"Cannot parse: {lineno}:{column}: {te.args[0]}" + ) + + else: + # Choose the latest version when raising the actual parsing error. + assert len(errors) >= 1 + exc = errors[max(errors)] + raise exc from None + + if isinstance(result, Leaf): + result = Node(syms.file_input, [result]) + return result + + +def matches_grammar(src_txt: str, grammar: Grammar) -> bool: + drv = driver.Driver(grammar) + try: + drv.parse_string(src_txt, True) + except (ParseError, TokenError, IndentationError): + return False + else: + return True + + +def lib2to3_unparse(node: Node) -> str: + """Given a lib2to3 node, return its string representation.""" + code = str(node) + return code + + +class ASTSafetyError(Exception): + """Raised when Black's generated code is not equivalent to the old AST.""" + + +def _parse_single_version( + src: str, version: Tuple[int, int], *, type_comments: bool +) -> ast.AST: + filename = "" + with warnings.catch_warnings(): + warnings.simplefilter("ignore", SyntaxWarning) + warnings.simplefilter("ignore", DeprecationWarning) + return ast.parse( + src, filename, feature_version=version, type_comments=type_comments + ) + + +def parse_ast(src: str) -> ast.AST: + # TODO: support Python 4+ ;) + versions = [(3, minor) for minor in range(3, sys.version_info[1] + 1)] + + first_error = "" + for version in sorted(versions, reverse=True): + try: + return _parse_single_version(src, version, type_comments=True) + except SyntaxError as e: + if not first_error: + first_error = str(e) + + # Try to parse without type comments + for version in sorted(versions, reverse=True): + try: + return _parse_single_version(src, version, type_comments=False) + except SyntaxError: + pass + + raise SyntaxError(first_error) + + +def _normalize(lineend: str, value: str) -> str: + # To normalize, we strip any leading and trailing space from + # each line... + stripped: List[str] = [i.strip() for i in value.splitlines()] + normalized = lineend.join(stripped) + # ...and remove any blank lines at the beginning and end of + # the whole string + return normalized.strip() + + +def stringify_ast(node: ast.AST) -> Iterator[str]: + """Simple visitor generating strings to compare ASTs by content.""" + return _stringify_ast(node, []) + + +def _stringify_ast_with_new_parent( + node: ast.AST, parent_stack: List[ast.AST], new_parent: ast.AST +) -> Iterator[str]: + parent_stack.append(new_parent) + yield from _stringify_ast(node, parent_stack) + parent_stack.pop() + + +def _stringify_ast(node: ast.AST, parent_stack: List[ast.AST]) -> Iterator[str]: + if ( + isinstance(node, ast.Constant) + and isinstance(node.value, str) + and node.kind == "u" + ): + # It's a quirk of history that we strip the u prefix over here. We used to + # rewrite the AST nodes for Python version compatibility and we never copied + # over the kind + node.kind = None + + yield f"{' ' * len(parent_stack)}{node.__class__.__name__}(" + + for field in sorted(node._fields): # noqa: F402 + # TypeIgnore has only one field 'lineno' which breaks this comparison + if isinstance(node, ast.TypeIgnore): + break + + try: + value: object = getattr(node, field) + except AttributeError: + continue + + yield f"{' ' * (len(parent_stack) + 1)}{field}=" + + if isinstance(value, list): + for item in value: + # Ignore nested tuples within del statements, because we may insert + # parentheses and they change the AST. + if ( + field == "targets" + and isinstance(node, ast.Delete) + and isinstance(item, ast.Tuple) + ): + for elt in item.elts: + yield from _stringify_ast_with_new_parent( + elt, parent_stack, node + ) + + elif isinstance(item, ast.AST): + yield from _stringify_ast_with_new_parent(item, parent_stack, node) + + elif isinstance(value, ast.AST): + yield from _stringify_ast_with_new_parent(value, parent_stack, node) + + else: + normalized: object + if ( + isinstance(node, ast.Constant) + and field == "value" + and isinstance(value, str) + and len(parent_stack) >= 2 + # Any standalone string, ideally this would + # exactly match black.nodes.is_docstring + and isinstance(parent_stack[-1], ast.Expr) + ): + # Constant strings may be indented across newlines, if they are + # docstrings; fold spaces after newlines when comparing. Similarly, + # trailing and leading space may be removed. + normalized = _normalize("\n", value) + elif field == "type_comment" and isinstance(value, str): + # Trailing whitespace in type comments is removed. + normalized = value.rstrip() + else: + normalized = value + yield ( + f"{' ' * (len(parent_stack) + 1)}{normalized!r}, #" + f" {value.__class__.__name__}" + ) + + yield f"{' ' * len(parent_stack)}) # /{node.__class__.__name__}" diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/py.typed b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/ranges.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/ranges.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..675e70347 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/ranges.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/ranges.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/ranges.py new file mode 100644 index 000000000..1ecaf7b0a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/ranges.py @@ -0,0 +1,521 @@ +"""Functions related to Black's formatting by line ranges feature.""" + +import difflib +from dataclasses import dataclass +from typing import Collection, Iterator, List, Sequence, Set, Tuple, Union + +from black.nodes import ( + LN, + STANDALONE_COMMENT, + Leaf, + Node, + Visitor, + first_leaf, + furthest_ancestor_with_last_leaf, + last_leaf, + syms, +) +from blib2to3.pgen2.token import ASYNC, NEWLINE + + +def parse_line_ranges(line_ranges: Sequence[str]) -> List[Tuple[int, int]]: + lines: List[Tuple[int, int]] = [] + for lines_str in line_ranges: + parts = lines_str.split("-") + if len(parts) != 2: + raise ValueError( + "Incorrect --line-ranges format, expect 'START-END', found" + f" {lines_str!r}" + ) + try: + start = int(parts[0]) + end = int(parts[1]) + except ValueError: + raise ValueError( + "Incorrect --line-ranges value, expect integer ranges, found" + f" {lines_str!r}" + ) from None + else: + lines.append((start, end)) + return lines + + +def is_valid_line_range(lines: Tuple[int, int]) -> bool: + """Returns whether the line range is valid.""" + return not lines or lines[0] <= lines[1] + + +def sanitized_lines( + lines: Collection[Tuple[int, int]], src_contents: str +) -> Collection[Tuple[int, int]]: + """Returns the valid line ranges for the given source. + + This removes ranges that are entirely outside the valid lines. + + Other ranges are normalized so that the start values are at least 1 and the + end values are at most the (1-based) index of the last source line. + """ + if not src_contents: + return [] + good_lines = [] + src_line_count = src_contents.count("\n") + if not src_contents.endswith("\n"): + src_line_count += 1 + for start, end in lines: + if start > src_line_count: + continue + # line-ranges are 1-based + start = max(start, 1) + if end < start: + continue + end = min(end, src_line_count) + good_lines.append((start, end)) + return good_lines + + +def adjusted_lines( + lines: Collection[Tuple[int, int]], + original_source: str, + modified_source: str, +) -> List[Tuple[int, int]]: + """Returns the adjusted line ranges based on edits from the original code. + + This computes the new line ranges by diffing original_source and + modified_source, and adjust each range based on how the range overlaps with + the diffs. + + Note the diff can contain lines outside of the original line ranges. This can + happen when the formatting has to be done in adjacent to maintain consistent + local results. For example: + + 1. def my_func(arg1, arg2, + 2. arg3,): + 3. pass + + If it restricts to line 2-2, it can't simply reformat line 2, it also has + to reformat line 1: + + 1. def my_func( + 2. arg1, + 3. arg2, + 4. arg3, + 5. ): + 6. pass + + In this case, we will expand the line ranges to also include the whole diff + block. + + Args: + lines: a collection of line ranges. + original_source: the original source. + modified_source: the modified source. + """ + lines_mappings = _calculate_lines_mappings(original_source, modified_source) + + new_lines = [] + # Keep an index of the current search. Since the lines and lines_mappings are + # sorted, this makes the search complexity linear. + current_mapping_index = 0 + for start, end in sorted(lines): + start_mapping_index = _find_lines_mapping_index( + start, + lines_mappings, + current_mapping_index, + ) + end_mapping_index = _find_lines_mapping_index( + end, + lines_mappings, + start_mapping_index, + ) + current_mapping_index = start_mapping_index + if start_mapping_index >= len(lines_mappings) or end_mapping_index >= len( + lines_mappings + ): + # Protect against invalid inputs. + continue + start_mapping = lines_mappings[start_mapping_index] + end_mapping = lines_mappings[end_mapping_index] + if start_mapping.is_changed_block: + # When the line falls into a changed block, expands to the whole block. + new_start = start_mapping.modified_start + else: + new_start = ( + start - start_mapping.original_start + start_mapping.modified_start + ) + if end_mapping.is_changed_block: + # When the line falls into a changed block, expands to the whole block. + new_end = end_mapping.modified_end + else: + new_end = end - end_mapping.original_start + end_mapping.modified_start + new_range = (new_start, new_end) + if is_valid_line_range(new_range): + new_lines.append(new_range) + return new_lines + + +def convert_unchanged_lines(src_node: Node, lines: Collection[Tuple[int, int]]) -> None: + """Converts unchanged lines to STANDALONE_COMMENT. + + The idea is similar to how `# fmt: on/off` is implemented. It also converts the + nodes between those markers as a single `STANDALONE_COMMENT` leaf node with + the unformatted code as its value. `STANDALONE_COMMENT` is a "fake" token + that will be formatted as-is with its prefix normalized. + + Here we perform two passes: + + 1. Visit the top-level statements, and convert them to a single + `STANDALONE_COMMENT` when unchanged. This speeds up formatting when some + of the top-level statements aren't changed. + 2. Convert unchanged "unwrapped lines" to `STANDALONE_COMMENT` nodes line by + line. "unwrapped lines" are divided by the `NEWLINE` token. e.g. a + multi-line statement is *one* "unwrapped line" that ends with `NEWLINE`, + even though this statement itself can span multiple lines, and the + tokenizer only sees the last '\n' as the `NEWLINE` token. + + NOTE: During pass (2), comment prefixes and indentations are ALWAYS + normalized even when the lines aren't changed. This is fixable by moving + more formatting to pass (1). However, it's hard to get it correct when + incorrect indentations are used. So we defer this to future optimizations. + """ + lines_set: Set[int] = set() + for start, end in lines: + lines_set.update(range(start, end + 1)) + visitor = _TopLevelStatementsVisitor(lines_set) + _ = list(visitor.visit(src_node)) # Consume all results. + _convert_unchanged_line_by_line(src_node, lines_set) + + +def _contains_standalone_comment(node: LN) -> bool: + if isinstance(node, Leaf): + return node.type == STANDALONE_COMMENT + else: + for child in node.children: + if _contains_standalone_comment(child): + return True + return False + + +class _TopLevelStatementsVisitor(Visitor[None]): + """ + A node visitor that converts unchanged top-level statements to + STANDALONE_COMMENT. + + This is used in addition to _convert_unchanged_line_by_line, to + speed up formatting when there are unchanged top-level + classes/functions/statements. + """ + + def __init__(self, lines_set: Set[int]): + self._lines_set = lines_set + + def visit_simple_stmt(self, node: Node) -> Iterator[None]: + # This is only called for top-level statements, since `visit_suite` + # won't visit its children nodes. + yield from [] + newline_leaf = last_leaf(node) + if not newline_leaf: + return + assert ( + newline_leaf.type == NEWLINE + ), f"Unexpectedly found leaf.type={newline_leaf.type}" + # We need to find the furthest ancestor with the NEWLINE as the last + # leaf, since a `suite` can simply be a `simple_stmt` when it puts + # its body on the same line. Example: `if cond: pass`. + ancestor = furthest_ancestor_with_last_leaf(newline_leaf) + if not _get_line_range(ancestor).intersection(self._lines_set): + _convert_node_to_standalone_comment(ancestor) + + def visit_suite(self, node: Node) -> Iterator[None]: + yield from [] + # If there is a STANDALONE_COMMENT node, it means parts of the node tree + # have fmt on/off/skip markers. Those STANDALONE_COMMENT nodes can't + # be simply converted by calling str(node). So we just don't convert + # here. + if _contains_standalone_comment(node): + return + # Find the semantic parent of this suite. For `async_stmt` and + # `async_funcdef`, the ASYNC token is defined on a separate level by the + # grammar. + semantic_parent = node.parent + if semantic_parent is not None: + if ( + semantic_parent.prev_sibling is not None + and semantic_parent.prev_sibling.type == ASYNC + ): + semantic_parent = semantic_parent.parent + if semantic_parent is not None and not _get_line_range( + semantic_parent + ).intersection(self._lines_set): + _convert_node_to_standalone_comment(semantic_parent) + + +def _convert_unchanged_line_by_line(node: Node, lines_set: Set[int]) -> None: + """Converts unchanged to STANDALONE_COMMENT line by line.""" + for leaf in node.leaves(): + if leaf.type != NEWLINE: + # We only consider "unwrapped lines", which are divided by the NEWLINE + # token. + continue + if leaf.parent and leaf.parent.type == syms.match_stmt: + # The `suite` node is defined as: + # match_stmt: "match" subject_expr ':' NEWLINE INDENT case_block+ DEDENT + # Here we need to check `subject_expr`. The `case_block+` will be + # checked by their own NEWLINEs. + nodes_to_ignore: List[LN] = [] + prev_sibling = leaf.prev_sibling + while prev_sibling: + nodes_to_ignore.insert(0, prev_sibling) + prev_sibling = prev_sibling.prev_sibling + if not _get_line_range(nodes_to_ignore).intersection(lines_set): + _convert_nodes_to_standalone_comment(nodes_to_ignore, newline=leaf) + elif leaf.parent and leaf.parent.type == syms.suite: + # The `suite` node is defined as: + # suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT + # We will check `simple_stmt` and `stmt+` separately against the lines set + parent_sibling = leaf.parent.prev_sibling + nodes_to_ignore = [] + while parent_sibling and not parent_sibling.type == syms.suite: + # NOTE: Multiple suite nodes can exist as siblings in e.g. `if_stmt`. + nodes_to_ignore.insert(0, parent_sibling) + parent_sibling = parent_sibling.prev_sibling + # Special case for `async_stmt` and `async_funcdef` where the ASYNC + # token is on the grandparent node. + grandparent = leaf.parent.parent + if ( + grandparent is not None + and grandparent.prev_sibling is not None + and grandparent.prev_sibling.type == ASYNC + ): + nodes_to_ignore.insert(0, grandparent.prev_sibling) + if not _get_line_range(nodes_to_ignore).intersection(lines_set): + _convert_nodes_to_standalone_comment(nodes_to_ignore, newline=leaf) + else: + ancestor = furthest_ancestor_with_last_leaf(leaf) + # Consider multiple decorators as a whole block, as their + # newlines have different behaviors than the rest of the grammar. + if ( + ancestor.type == syms.decorator + and ancestor.parent + and ancestor.parent.type == syms.decorators + ): + ancestor = ancestor.parent + if not _get_line_range(ancestor).intersection(lines_set): + _convert_node_to_standalone_comment(ancestor) + + +def _convert_node_to_standalone_comment(node: LN) -> None: + """Convert node to STANDALONE_COMMENT by modifying the tree inline.""" + parent = node.parent + if not parent: + return + first = first_leaf(node) + last = last_leaf(node) + if not first or not last: + return + if first is last: + # This can happen on the following edge cases: + # 1. A block of `# fmt: off/on` code except the `# fmt: on` is placed + # on the end of the last line instead of on a new line. + # 2. A single backslash on its own line followed by a comment line. + # Ideally we don't want to format them when not requested, but fixing + # isn't easy. These cases are also badly formatted code, so it isn't + # too bad we reformat them. + return + # The prefix contains comments and indentation whitespaces. They are + # reformatted accordingly to the correct indentation level. + # This also means the indentation will be changed on the unchanged lines, and + # this is actually required to not break incremental reformatting. + prefix = first.prefix + first.prefix = "" + index = node.remove() + if index is not None: + # Remove the '\n', as STANDALONE_COMMENT will have '\n' appended when + # generating the formatted code. + value = str(node)[:-1] + parent.insert_child( + index, + Leaf( + STANDALONE_COMMENT, + value, + prefix=prefix, + fmt_pass_converted_first_leaf=first, + ), + ) + + +def _convert_nodes_to_standalone_comment(nodes: Sequence[LN], *, newline: Leaf) -> None: + """Convert nodes to STANDALONE_COMMENT by modifying the tree inline.""" + if not nodes: + return + parent = nodes[0].parent + first = first_leaf(nodes[0]) + if not parent or not first: + return + prefix = first.prefix + first.prefix = "" + value = "".join(str(node) for node in nodes) + # The prefix comment on the NEWLINE leaf is the trailing comment of the statement. + if newline.prefix: + value += newline.prefix + newline.prefix = "" + index = nodes[0].remove() + for node in nodes[1:]: + node.remove() + if index is not None: + parent.insert_child( + index, + Leaf( + STANDALONE_COMMENT, + value, + prefix=prefix, + fmt_pass_converted_first_leaf=first, + ), + ) + + +def _leaf_line_end(leaf: Leaf) -> int: + """Returns the line number of the leaf node's last line.""" + if leaf.type == NEWLINE: + return leaf.lineno + else: + # Leaf nodes like multiline strings can occupy multiple lines. + return leaf.lineno + str(leaf).count("\n") + + +def _get_line_range(node_or_nodes: Union[LN, List[LN]]) -> Set[int]: + """Returns the line range of this node or list of nodes.""" + if isinstance(node_or_nodes, list): + nodes = node_or_nodes + if not nodes: + return set() + first = first_leaf(nodes[0]) + last = last_leaf(nodes[-1]) + if first and last: + line_start = first.lineno + line_end = _leaf_line_end(last) + return set(range(line_start, line_end + 1)) + else: + return set() + else: + node = node_or_nodes + if isinstance(node, Leaf): + return set(range(node.lineno, _leaf_line_end(node) + 1)) + else: + first = first_leaf(node) + last = last_leaf(node) + if first and last: + return set(range(first.lineno, _leaf_line_end(last) + 1)) + else: + return set() + + +@dataclass +class _LinesMapping: + """1-based lines mapping from original source to modified source. + + Lines [original_start, original_end] from original source + are mapped to [modified_start, modified_end]. + + The ranges are inclusive on both ends. + """ + + original_start: int + original_end: int + modified_start: int + modified_end: int + # Whether this range corresponds to a changed block, or an unchanged block. + is_changed_block: bool + + +def _calculate_lines_mappings( + original_source: str, + modified_source: str, +) -> Sequence[_LinesMapping]: + """Returns a sequence of _LinesMapping by diffing the sources. + + For example, given the following diff: + import re + - def func(arg1, + - arg2, arg3): + + def func(arg1, arg2, arg3): + pass + It returns the following mappings: + original -> modified + (1, 1) -> (1, 1), is_changed_block=False (the "import re" line) + (2, 3) -> (2, 2), is_changed_block=True (the diff) + (4, 4) -> (3, 3), is_changed_block=False (the "pass" line) + + You can think of this visually as if it brings up a side-by-side diff, and tries + to map the line ranges from the left side to the right side: + + (1, 1)->(1, 1) 1. import re 1. import re + (2, 3)->(2, 2) 2. def func(arg1, 2. def func(arg1, arg2, arg3): + 3. arg2, arg3): + (4, 4)->(3, 3) 4. pass 3. pass + + Args: + original_source: the original source. + modified_source: the modified source. + """ + matcher = difflib.SequenceMatcher( + None, + original_source.splitlines(keepends=True), + modified_source.splitlines(keepends=True), + ) + matching_blocks = matcher.get_matching_blocks() + lines_mappings: List[_LinesMapping] = [] + # matching_blocks is a sequence of "same block of code ranges", see + # https://docs.python.org/3/library/difflib.html#difflib.SequenceMatcher.get_matching_blocks + # Each block corresponds to a _LinesMapping with is_changed_block=False, + # and the ranges between two blocks corresponds to a _LinesMapping with + # is_changed_block=True, + # NOTE: matching_blocks is 0-based, but _LinesMapping is 1-based. + for i, block in enumerate(matching_blocks): + if i == 0: + if block.a != 0 or block.b != 0: + lines_mappings.append( + _LinesMapping( + original_start=1, + original_end=block.a, + modified_start=1, + modified_end=block.b, + is_changed_block=False, + ) + ) + else: + previous_block = matching_blocks[i - 1] + lines_mappings.append( + _LinesMapping( + original_start=previous_block.a + previous_block.size + 1, + original_end=block.a, + modified_start=previous_block.b + previous_block.size + 1, + modified_end=block.b, + is_changed_block=True, + ) + ) + if i < len(matching_blocks) - 1: + lines_mappings.append( + _LinesMapping( + original_start=block.a + 1, + original_end=block.a + block.size, + modified_start=block.b + 1, + modified_end=block.b + block.size, + is_changed_block=False, + ) + ) + return lines_mappings + + +def _find_lines_mapping_index( + original_line: int, + lines_mappings: Sequence[_LinesMapping], + start_index: int, +) -> int: + """Returns the original index of the lines mappings for the original line.""" + index = start_index + while index < len(lines_mappings): + mapping = lines_mappings[index] + if mapping.original_start <= original_line <= mapping.original_end: + return index + index += 1 + return index diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/report.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/report.py new file mode 100644 index 000000000..89899f2f3 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/report.py @@ -0,0 +1,107 @@ +""" +Summarize Black runs to users. +""" + +from dataclasses import dataclass +from enum import Enum +from pathlib import Path + +from click import style + +from black.output import err, out + + +class Changed(Enum): + NO = 0 + CACHED = 1 + YES = 2 + + +class NothingChanged(UserWarning): + """Raised when reformatted code is the same as source.""" + + +@dataclass +class Report: + """Provides a reformatting counter. Can be rendered with `str(report)`.""" + + check: bool = False + diff: bool = False + quiet: bool = False + verbose: bool = False + change_count: int = 0 + same_count: int = 0 + failure_count: int = 0 + + def done(self, src: Path, changed: Changed) -> None: + """Increment the counter for successful reformatting. Write out a message.""" + if changed is Changed.YES: + reformatted = "would reformat" if self.check or self.diff else "reformatted" + if self.verbose or not self.quiet: + out(f"{reformatted} {src}") + self.change_count += 1 + else: + if self.verbose: + if changed is Changed.NO: + msg = f"{src} already well formatted, good job." + else: + msg = f"{src} wasn't modified on disk since last run." + out(msg, bold=False) + self.same_count += 1 + + def failed(self, src: Path, message: str) -> None: + """Increment the counter for failed reformatting. Write out a message.""" + err(f"error: cannot format {src}: {message}") + self.failure_count += 1 + + def path_ignored(self, path: Path, message: str) -> None: + if self.verbose: + out(f"{path} ignored: {message}", bold=False) + + @property + def return_code(self) -> int: + """Return the exit code that the app should use. + + This considers the current state of changed files and failures: + - if there were any failures, return 123; + - if any files were changed and --check is being used, return 1; + - otherwise return 0. + """ + # According to http://tldp.org/LDP/abs/html/exitcodes.html starting with + # 126 we have special return codes reserved by the shell. + if self.failure_count: + return 123 + + elif self.change_count and self.check: + return 1 + + return 0 + + def __str__(self) -> str: + """Render a color report of the current state. + + Use `click.unstyle` to remove colors. + """ + if self.check or self.diff: + reformatted = "would be reformatted" + unchanged = "would be left unchanged" + failed = "would fail to reformat" + else: + reformatted = "reformatted" + unchanged = "left unchanged" + failed = "failed to reformat" + report = [] + if self.change_count: + s = "s" if self.change_count > 1 else "" + report.append( + style(f"{self.change_count} file{s} ", bold=True, fg="blue") + + style(f"{reformatted}", bold=True) + ) + + if self.same_count: + s = "s" if self.same_count > 1 else "" + report.append(style(f"{self.same_count} file{s} ", fg="blue") + unchanged) + if self.failure_count: + s = "s" if self.failure_count > 1 else "" + report.append(style(f"{self.failure_count} file{s} {failed}", fg="red")) + return ", ".join(report) + "." diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/resources/__init__.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/resources/__init__.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..862bf1f7c Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/resources/__init__.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/resources/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/resources/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/resources/black.schema.json b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/resources/black.schema.json new file mode 100644 index 000000000..66f33c839 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/resources/black.schema.json @@ -0,0 +1,154 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://github.com/psf/black/blob/main/black/resources/black.schema.json", + "$comment": "tool.black table in pyproject.toml", + "type": "object", + "additionalProperties": false, + "properties": { + "code": { + "type": "string", + "description": "Format the code passed in as a string." + }, + "line-length": { + "type": "integer", + "description": "How many characters per line to allow.", + "default": 88 + }, + "target-version": { + "type": "array", + "items": { + "enum": [ + "py33", + "py34", + "py35", + "py36", + "py37", + "py38", + "py39", + "py310", + "py311", + "py312", + "py313" + ] + }, + "description": "Python versions that should be supported by Black's output. You should include all versions that your code supports. By default, Black will infer target versions from the project metadata in pyproject.toml. If this does not yield conclusive results, Black will use per-file auto-detection." + }, + "pyi": { + "type": "boolean", + "description": "Format all input files like typing stubs regardless of file extension. This is useful when piping source on standard input.", + "default": false + }, + "ipynb": { + "type": "boolean", + "description": "Format all input files like Jupyter Notebooks regardless of file extension. This is useful when piping source on standard input.", + "default": false + }, + "python-cell-magics": { + "type": "array", + "items": { + "type": "string" + }, + "description": "When processing Jupyter Notebooks, add the given magic to the list of known python-magics (capture, prun, pypy, python, python3, time, timeit). Useful for formatting cells with custom python magics." + }, + "skip-source-first-line": { + "type": "boolean", + "description": "Skip the first line of the source code.", + "default": false + }, + "skip-string-normalization": { + "type": "boolean", + "description": "Don't normalize string quotes or prefixes.", + "default": false + }, + "skip-magic-trailing-comma": { + "type": "boolean", + "description": "Don't use trailing commas as a reason to split lines.", + "default": false + }, + "preview": { + "type": "boolean", + "description": "Enable potentially disruptive style changes that may be added to Black's main functionality in the next major release.", + "default": false + }, + "unstable": { + "type": "boolean", + "description": "Enable potentially disruptive style changes that have known bugs or are not currently expected to make it into the stable style Black's next major release. Implies --preview.", + "default": false + }, + "enable-unstable-feature": { + "type": "array", + "items": { + "enum": [ + "hex_codes_in_unicode_sequences", + "string_processing", + "hug_parens_with_braces_and_square_brackets", + "unify_docstring_detection", + "no_normalize_fmt_skip_whitespace", + "wrap_long_dict_values_in_parens", + "multiline_string_handling", + "typed_params_trailing_comma", + "is_simple_lookup_for_doublestar_expression", + "docstring_check_for_newline", + "remove_redundant_guard_parens", + "parens_for_long_if_clauses_in_case_block" + ] + }, + "description": "Enable specific features included in the `--unstable` style. Requires `--preview`. No compatibility guarantees are provided on the behavior or existence of any unstable features." + }, + "check": { + "type": "boolean", + "description": "Don't write the files back, just return the status. Return code 0 means nothing would change. Return code 1 means some files would be reformatted. Return code 123 means there was an internal error.", + "default": false + }, + "diff": { + "type": "boolean", + "description": "Don't write the files back, just output a diff to indicate what changes Black would've made. They are printed to stdout so capturing them is simple.", + "default": false + }, + "color": { + "type": "boolean", + "description": "Show (or do not show) colored diff. Only applies when --diff is given.", + "default": false + }, + "fast": { + "type": "boolean", + "description": "By default, Black performs an AST safety check after formatting your code. The --fast flag turns off this check and the --safe flag explicitly enables it. [default: --safe]", + "default": false + }, + "required-version": { + "type": "string", + "description": "Require a specific version of Black to be running. This is useful for ensuring that all contributors to your project are using the same version, because different versions of Black may format code a little differently. This option can be set in a configuration file for consistent results across environments." + }, + "exclude": { + "type": "string", + "description": "A regular expression that matches files and directories that should be excluded on recursive searches. An empty value means no paths are excluded. Use forward slashes for directories on all platforms (Windows, too). By default, Black also ignores all paths listed in .gitignore. Changing this value will override all default exclusions. [default: /(\\.direnv|\\.eggs|\\.git|\\.hg|\\.ipynb_checkpoints|\\.mypy_cache|\\.nox|\\.pytest_cache|\\.ruff_cache|\\.tox|\\.svn|\\.venv|\\.vscode|__pypackages__|_build|buck-out|build|dist|venv)/]" + }, + "extend-exclude": { + "type": "string", + "description": "Like --exclude, but adds additional files and directories on top of the default values instead of overriding them." + }, + "force-exclude": { + "type": "string", + "description": "Like --exclude, but files and directories matching this regex will be excluded even when they are passed explicitly as arguments. This is useful when invoking Black programmatically on changed files, such as in a pre-commit hook or editor plugin." + }, + "include": { + "type": "string", + "description": "A regular expression that matches files and directories that should be included on recursive searches. An empty value means all files are included regardless of the name. Use forward slashes for directories on all platforms (Windows, too). Overrides all exclusions, including from .gitignore and command line options.", + "default": "(\\.pyi?|\\.ipynb)$" + }, + "workers": { + "type": "integer", + "description": "When Black formats multiple files, it may use a process pool to speed up formatting. This option controls the number of parallel workers. This can also be specified via the BLACK_NUM_WORKERS environment variable. Defaults to the number of CPUs in the system." + }, + "quiet": { + "type": "boolean", + "description": "Stop emitting all non-critical output. Error messages will still be emitted (which can silenced by 2>/dev/null).", + "default": false + }, + "verbose": { + "type": "boolean", + "description": "Emit messages about files that were not changed or were ignored due to exclusion patterns. If Black is using a configuration file, a message detailing which one it is using will be emitted.", + "default": false + } + } +} diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/rusty.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/rusty.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..b316dd20b Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/rusty.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/rusty.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/rusty.py new file mode 100644 index 000000000..ebd4c052d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/rusty.py @@ -0,0 +1,28 @@ +"""An error-handling model influenced by that used by the Rust programming language + +See https://doc.rust-lang.org/book/ch09-00-error-handling.html. +""" + +from typing import Generic, TypeVar, Union + +T = TypeVar("T") +E = TypeVar("E", bound=Exception) + + +class Ok(Generic[T]): + def __init__(self, value: T) -> None: + self._value = value + + def ok(self) -> T: + return self._value + + +class Err(Generic[E]): + def __init__(self, e: E) -> None: + self._e = e + + def err(self) -> E: + return self._e + + +Result = Union[Ok[T], Err[E]] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/schema.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/schema.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..40e45c7ab Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/schema.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/schema.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/schema.py new file mode 100644 index 000000000..78e9564cd --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/schema.py @@ -0,0 +1,20 @@ +import importlib.resources +import json +import sys +from typing import Any + + +def get_schema(tool_name: str = "black") -> Any: + """Get the stored complete schema for black's settings.""" + assert tool_name == "black", "Only black is supported." + + pkg = "black.resources" + fname = "black.schema.json" + + if sys.version_info < (3, 9): + with importlib.resources.open_text(pkg, fname, encoding="utf-8") as f: + return json.load(f) + + schema = importlib.resources.files(pkg).joinpath(fname) # type: ignore[unreachable] + with schema.open(encoding="utf-8") as f: + return json.load(f) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/strings.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/strings.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..a72f4b186 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/strings.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/strings.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/strings.py new file mode 100644 index 000000000..69a8c8002 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/strings.py @@ -0,0 +1,390 @@ +""" +Simple formatting on strings. Further string formatting code is in trans.py. +""" + +import re +import sys +from functools import lru_cache +from typing import Final, List, Match, Pattern, Tuple + +from black._width_table import WIDTH_TABLE +from blib2to3.pytree import Leaf + +STRING_PREFIX_CHARS: Final = "furbFURB" # All possible string prefix characters. +STRING_PREFIX_RE: Final = re.compile( + r"^([" + STRING_PREFIX_CHARS + r"]*)(.*)$", re.DOTALL +) +UNICODE_ESCAPE_RE: Final = re.compile( + r"(?P\\+)(?P" + r"(u(?P[a-fA-F0-9]{4}))" # Character with 16-bit hex value xxxx + r"|(U(?P[a-fA-F0-9]{8}))" # Character with 32-bit hex value xxxxxxxx + r"|(x(?P[a-fA-F0-9]{2}))" # Character with hex value hh + r"|(N\{(?P[a-zA-Z0-9 \-]{2,})\})" # Character named name in the Unicode database + r")", + re.VERBOSE, +) + + +def sub_twice(regex: Pattern[str], replacement: str, original: str) -> str: + """Replace `regex` with `replacement` twice on `original`. + + This is used by string normalization to perform replaces on + overlapping matches. + """ + return regex.sub(replacement, regex.sub(replacement, original)) + + +def has_triple_quotes(string: str) -> bool: + """ + Returns: + True iff @string starts with three quotation characters. + """ + raw_string = string.lstrip(STRING_PREFIX_CHARS) + return raw_string[:3] in {'"""', "'''"} + + +def lines_with_leading_tabs_expanded(s: str) -> List[str]: + """ + Splits string into lines and expands only leading tabs (following the normal + Python rules) + """ + lines = [] + for line in s.splitlines(): + stripped_line = line.lstrip() + if not stripped_line or stripped_line == line: + lines.append(line) + else: + prefix_length = len(line) - len(stripped_line) + prefix = line[:prefix_length].expandtabs() + lines.append(prefix + stripped_line) + if s.endswith("\n"): + lines.append("") + return lines + + +def fix_docstring(docstring: str, prefix: str) -> str: + # https://www.python.org/dev/peps/pep-0257/#handling-docstring-indentation + if not docstring: + return "" + lines = lines_with_leading_tabs_expanded(docstring) + # Determine minimum indentation (first line doesn't count): + indent = sys.maxsize + for line in lines[1:]: + stripped = line.lstrip() + if stripped: + indent = min(indent, len(line) - len(stripped)) + # Remove indentation (first line is special): + trimmed = [lines[0].strip()] + if indent < sys.maxsize: + last_line_idx = len(lines) - 2 + for i, line in enumerate(lines[1:]): + stripped_line = line[indent:].rstrip() + if stripped_line or i == last_line_idx: + trimmed.append(prefix + stripped_line) + else: + trimmed.append("") + return "\n".join(trimmed) + + +def get_string_prefix(string: str) -> str: + """ + Pre-conditions: + * assert_is_leaf_string(@string) + + Returns: + @string's prefix (e.g. '', 'r', 'f', or 'rf'). + """ + assert_is_leaf_string(string) + + prefix = "" + prefix_idx = 0 + while string[prefix_idx] in STRING_PREFIX_CHARS: + prefix += string[prefix_idx] + prefix_idx += 1 + + return prefix + + +def assert_is_leaf_string(string: str) -> None: + """ + Checks the pre-condition that @string has the format that you would expect + of `leaf.value` where `leaf` is some Leaf such that `leaf.type == + token.STRING`. A more precise description of the pre-conditions that are + checked are listed below. + + Pre-conditions: + * @string starts with either ', ", ', or " where + `set()` is some subset of `set(STRING_PREFIX_CHARS)`. + * @string ends with a quote character (' or "). + + Raises: + AssertionError(...) if the pre-conditions listed above are not + satisfied. + """ + dquote_idx = string.find('"') + squote_idx = string.find("'") + if -1 in [dquote_idx, squote_idx]: + quote_idx = max(dquote_idx, squote_idx) + else: + quote_idx = min(squote_idx, dquote_idx) + + assert ( + 0 <= quote_idx < len(string) - 1 + ), f"{string!r} is missing a starting quote character (' or \")." + assert string[-1] in ( + "'", + '"', + ), f"{string!r} is missing an ending quote character (' or \")." + assert set(string[:quote_idx]).issubset( + set(STRING_PREFIX_CHARS) + ), f"{set(string[:quote_idx])} is NOT a subset of {set(STRING_PREFIX_CHARS)}." + + +def normalize_string_prefix(s: str) -> str: + """Make all string prefixes lowercase.""" + match = STRING_PREFIX_RE.match(s) + assert match is not None, f"failed to match string {s!r}" + orig_prefix = match.group(1) + new_prefix = ( + orig_prefix.replace("F", "f") + .replace("B", "b") + .replace("U", "") + .replace("u", "") + ) + + # Python syntax guarantees max 2 prefixes and that one of them is "r" + if len(new_prefix) == 2 and "r" != new_prefix[0].lower(): + new_prefix = new_prefix[::-1] + return f"{new_prefix}{match.group(2)}" + + +# Re(gex) does actually cache patterns internally but this still improves +# performance on a long list literal of strings by 5-9% since lru_cache's +# caching overhead is much lower. +@lru_cache(maxsize=64) +def _cached_compile(pattern: str) -> Pattern[str]: + return re.compile(pattern) + + +def normalize_string_quotes(s: str) -> str: + """Prefer double quotes but only if it doesn't cause more escaping. + + Adds or removes backslashes as appropriate. + """ + value = s.lstrip(STRING_PREFIX_CHARS) + if value[:3] == '"""': + return s + + elif value[:3] == "'''": + orig_quote = "'''" + new_quote = '"""' + elif value[0] == '"': + orig_quote = '"' + new_quote = "'" + else: + orig_quote = "'" + new_quote = '"' + first_quote_pos = s.find(orig_quote) + if first_quote_pos == -1: + return s # There's an internal error + + prefix = s[:first_quote_pos] + unescaped_new_quote = _cached_compile(rf"(([^\\]|^)(\\\\)*){new_quote}") + escaped_new_quote = _cached_compile(rf"([^\\]|^)\\((?:\\\\)*){new_quote}") + escaped_orig_quote = _cached_compile(rf"([^\\]|^)\\((?:\\\\)*){orig_quote}") + body = s[first_quote_pos + len(orig_quote) : -len(orig_quote)] + if "r" in prefix.casefold(): + if unescaped_new_quote.search(body): + # There's at least one unescaped new_quote in this raw string + # so converting is impossible + return s + + # Do not introduce or remove backslashes in raw strings + new_body = body + else: + # remove unnecessary escapes + new_body = sub_twice(escaped_new_quote, rf"\1\2{new_quote}", body) + if body != new_body: + # Consider the string without unnecessary escapes as the original + body = new_body + s = f"{prefix}{orig_quote}{body}{orig_quote}" + new_body = sub_twice(escaped_orig_quote, rf"\1\2{orig_quote}", new_body) + new_body = sub_twice(unescaped_new_quote, rf"\1\\{new_quote}", new_body) + + if "f" in prefix.casefold(): + matches = re.findall( + r""" + (?:(? orig_escape_count: + return s # Do not introduce more escaping + + if new_escape_count == orig_escape_count and orig_quote == '"': + return s # Prefer double quotes + + return f"{prefix}{new_quote}{new_body}{new_quote}" + + +def normalize_fstring_quotes( + quote: str, + middles: List[Leaf], + is_raw_fstring: bool, +) -> Tuple[List[Leaf], str]: + """Prefer double quotes but only if it doesn't cause more escaping. + + Adds or removes backslashes as appropriate. + """ + if quote == '"""': + return middles, quote + + elif quote == "'''": + new_quote = '"""' + elif quote == '"': + new_quote = "'" + else: + new_quote = '"' + + unescaped_new_quote = _cached_compile(rf"(([^\\]|^)(\\\\)*){new_quote}") + escaped_new_quote = _cached_compile(rf"([^\\]|^)\\((?:\\\\)*){new_quote}") + escaped_orig_quote = _cached_compile(rf"([^\\]|^)\\((?:\\\\)*){quote}") + if is_raw_fstring: + for middle in middles: + if unescaped_new_quote.search(middle.value): + # There's at least one unescaped new_quote in this raw string + # so converting is impossible + return middles, quote + + # Do not introduce or remove backslashes in raw strings, just use double quote + return middles, '"' + + new_segments = [] + for middle in middles: + segment = middle.value + # remove unnecessary escapes + new_segment = sub_twice(escaped_new_quote, rf"\1\2{new_quote}", segment) + if segment != new_segment: + # Consider the string without unnecessary escapes as the original + middle.value = new_segment + + new_segment = sub_twice(escaped_orig_quote, rf"\1\2{quote}", new_segment) + new_segment = sub_twice(unescaped_new_quote, rf"\1\\{new_quote}", new_segment) + new_segments.append(new_segment) + + if new_quote == '"""' and new_segments[-1].endswith('"'): + # edge case: + new_segments[-1] = new_segments[-1][:-1] + '\\"' + + for middle, new_segment in zip(middles, new_segments): + orig_escape_count = middle.value.count("\\") + new_escape_count = new_segment.count("\\") + + if new_escape_count > orig_escape_count: + return middles, quote # Do not introduce more escaping + + if new_escape_count == orig_escape_count and quote == '"': + return middles, quote # Prefer double quotes + + for middle, new_segment in zip(middles, new_segments): + middle.value = new_segment + + return middles, new_quote + + +def normalize_unicode_escape_sequences(leaf: Leaf) -> None: + """Replace hex codes in Unicode escape sequences with lowercase representation.""" + text = leaf.value + prefix = get_string_prefix(text) + if "r" in prefix.lower(): + return + + def replace(m: Match[str]) -> str: + groups = m.groupdict() + back_slashes = groups["backslashes"] + + if len(back_slashes) % 2 == 0: + return back_slashes + groups["body"] + + if groups["u"]: + # \u + return back_slashes + "u" + groups["u"].lower() + elif groups["U"]: + # \U + return back_slashes + "U" + groups["U"].lower() + elif groups["x"]: + # \x + return back_slashes + "x" + groups["x"].lower() + else: + assert groups["N"], f"Unexpected match: {m}" + # \N{} + return back_slashes + "N{" + groups["N"].upper() + "}" + + leaf.value = re.sub(UNICODE_ESCAPE_RE, replace, text) + + +@lru_cache(maxsize=4096) +def char_width(char: str) -> int: + """Return the width of a single character as it would be displayed in a + terminal or editor (which respects Unicode East Asian Width). + + Full width characters are counted as 2, while half width characters are + counted as 1. Also control characters are counted as 0. + """ + table = WIDTH_TABLE + codepoint = ord(char) + highest = len(table) - 1 + lowest = 0 + idx = highest // 2 + while True: + start_codepoint, end_codepoint, width = table[idx] + if codepoint < start_codepoint: + highest = idx - 1 + elif codepoint > end_codepoint: + lowest = idx + 1 + else: + return 0 if width < 0 else width + if highest < lowest: + break + idx = (highest + lowest) // 2 + return 1 + + +def str_width(line_str: str) -> int: + """Return the width of `line_str` as it would be displayed in a terminal + or editor (which respects Unicode East Asian Width). + + You could utilize this function to determine, for example, if a string + is too wide to display in a terminal or editor. + """ + if line_str.isascii(): + # Fast path for a line consisting of only ASCII characters + return len(line_str) + return sum(map(char_width, line_str)) + + +def count_chars_in_width(line_str: str, max_width: int) -> int: + """Count the number of characters in `line_str` that would fit in a + terminal or editor of `max_width` (which respects Unicode East Asian + Width). + """ + total_width = 0 + for i, char in enumerate(line_str): + width = char_width(char) + if width + total_width > max_width: + return i + total_width += width + return len(line_str) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/trans.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/trans.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..ecea0dbab Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/trans.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/black/trans.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/trans.py new file mode 100644 index 000000000..29a978c6b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/black/trans.py @@ -0,0 +1,2534 @@ +""" +String transformers that can split and merge strings. +""" + +import re +from abc import ABC, abstractmethod +from collections import defaultdict +from dataclasses import dataclass +from typing import ( + Any, + Callable, + ClassVar, + Collection, + Dict, + Final, + Iterable, + Iterator, + List, + Literal, + Optional, + Sequence, + Set, + Tuple, + TypeVar, + Union, +) + +from mypy_extensions import trait + +from black.comments import contains_pragma_comment +from black.lines import Line, append_leaves +from black.mode import Feature, Mode, Preview +from black.nodes import ( + CLOSING_BRACKETS, + OPENING_BRACKETS, + STANDALONE_COMMENT, + is_empty_lpar, + is_empty_par, + is_empty_rpar, + is_part_of_annotation, + parent_type, + replace_child, + syms, +) +from black.rusty import Err, Ok, Result +from black.strings import ( + assert_is_leaf_string, + count_chars_in_width, + get_string_prefix, + has_triple_quotes, + normalize_string_quotes, + str_width, +) +from blib2to3.pgen2 import token +from blib2to3.pytree import Leaf, Node + + +class CannotTransform(Exception): + """Base class for errors raised by Transformers.""" + + +# types +T = TypeVar("T") +LN = Union[Leaf, Node] +Transformer = Callable[[Line, Collection[Feature], Mode], Iterator[Line]] +Index = int +NodeType = int +ParserState = int +StringID = int +TResult = Result[T, CannotTransform] # (T)ransform Result +TMatchResult = TResult[List[Index]] + +SPLIT_SAFE_CHARS = frozenset(["\u3001", "\u3002", "\uff0c"]) # East Asian stops + + +def TErr(err_msg: str) -> Err[CannotTransform]: + """(T)ransform Err + + Convenience function used when working with the TResult type. + """ + cant_transform = CannotTransform(err_msg) + return Err(cant_transform) + + +def hug_power_op( + line: Line, features: Collection[Feature], mode: Mode +) -> Iterator[Line]: + """A transformer which normalizes spacing around power operators.""" + + # Performance optimization to avoid unnecessary Leaf clones and other ops. + for leaf in line.leaves: + if leaf.type == token.DOUBLESTAR: + break + else: + raise CannotTransform("No doublestar token was found in the line.") + + def is_simple_lookup(index: int, kind: Literal[1, -1]) -> bool: + # Brackets and parentheses indicate calls, subscripts, etc. ... + # basically stuff that doesn't count as "simple". Only a NAME lookup + # or dotted lookup (eg. NAME.NAME) is OK. + if Preview.is_simple_lookup_for_doublestar_expression not in mode: + return original_is_simple_lookup_func(line, index, kind) + + else: + if kind == -1: + return handle_is_simple_look_up_prev( + line, index, {token.RPAR, token.RSQB} + ) + else: + return handle_is_simple_lookup_forward( + line, index, {token.LPAR, token.LSQB} + ) + + def is_simple_operand(index: int, kind: Literal[1, -1]) -> bool: + # An operand is considered "simple" if's a NAME, a numeric CONSTANT, a simple + # lookup (see above), with or without a preceding unary operator. + start = line.leaves[index] + if start.type in {token.NAME, token.NUMBER}: + return is_simple_lookup(index, kind) + + if start.type in {token.PLUS, token.MINUS, token.TILDE}: + if line.leaves[index + 1].type in {token.NAME, token.NUMBER}: + # kind is always one as bases with a preceding unary op will be checked + # for simplicity starting from the next token (so it'll hit the check + # above). + return is_simple_lookup(index + 1, kind=1) + + return False + + new_line = line.clone() + should_hug = False + for idx, leaf in enumerate(line.leaves): + new_leaf = leaf.clone() + if should_hug: + new_leaf.prefix = "" + should_hug = False + + should_hug = ( + (0 < idx < len(line.leaves) - 1) + and leaf.type == token.DOUBLESTAR + and is_simple_operand(idx - 1, kind=-1) + and line.leaves[idx - 1].value != "lambda" + and is_simple_operand(idx + 1, kind=1) + ) + if should_hug: + new_leaf.prefix = "" + + # We have to be careful to make a new line properly: + # - bracket related metadata must be maintained (handled by Line.append) + # - comments need to copied over, updating the leaf IDs they're attached to + new_line.append(new_leaf, preformatted=True) + for comment_leaf in line.comments_after(leaf): + new_line.append(comment_leaf, preformatted=True) + + yield new_line + + +def original_is_simple_lookup_func( + line: Line, index: int, step: Literal[1, -1] +) -> bool: + if step == -1: + disallowed = {token.RPAR, token.RSQB} + else: + disallowed = {token.LPAR, token.LSQB} + + while 0 <= index < len(line.leaves): + current = line.leaves[index] + if current.type in disallowed: + return False + if current.type not in {token.NAME, token.DOT} or current.value == "for": + # If the current token isn't disallowed, we'll assume this is + # simple as only the disallowed tokens are semantically + # attached to this lookup expression we're checking. Also, + # stop early if we hit the 'for' bit of a comprehension. + return True + + index += step + + return True + + +def handle_is_simple_look_up_prev(line: Line, index: int, disallowed: Set[int]) -> bool: + """ + Handling the determination of is_simple_lookup for the lines prior to the doublestar + token. This is required because of the need to isolate the chained expression + to determine the bracket or parenthesis belong to the single expression. + """ + contains_disallowed = False + chain = [] + + while 0 <= index < len(line.leaves): + current = line.leaves[index] + chain.append(current) + if not contains_disallowed and current.type in disallowed: + contains_disallowed = True + if not is_expression_chained(chain): + return not contains_disallowed + + index -= 1 + + return True + + +def handle_is_simple_lookup_forward( + line: Line, index: int, disallowed: Set[int] +) -> bool: + """ + Handling decision is_simple_lookup for the lines behind the doublestar token. + This function is simplified to keep consistent with the prior logic and the forward + case are more straightforward and do not need to care about chained expressions. + """ + while 0 <= index < len(line.leaves): + current = line.leaves[index] + if current.type in disallowed: + return False + if current.type not in {token.NAME, token.DOT} or ( + current.type == token.NAME and current.value == "for" + ): + # If the current token isn't disallowed, we'll assume this is simple as + # only the disallowed tokens are semantically attached to this lookup + # expression we're checking. Also, stop early if we hit the 'for' bit + # of a comprehension. + return True + + index += 1 + + return True + + +def is_expression_chained(chained_leaves: List[Leaf]) -> bool: + """ + Function to determine if the variable is a chained call. + (e.g., foo.lookup, foo().lookup, (foo.lookup())) will be recognized as chained call) + """ + if len(chained_leaves) < 2: + return True + + current_leaf = chained_leaves[-1] + past_leaf = chained_leaves[-2] + + if past_leaf.type == token.NAME: + return current_leaf.type in {token.DOT} + elif past_leaf.type in {token.RPAR, token.RSQB}: + return current_leaf.type in {token.RSQB, token.RPAR} + elif past_leaf.type in {token.LPAR, token.LSQB}: + return current_leaf.type in {token.NAME, token.LPAR, token.LSQB} + else: + return False + + +class StringTransformer(ABC): + """ + An implementation of the Transformer protocol that relies on its + subclasses overriding the template methods `do_match(...)` and + `do_transform(...)`. + + This Transformer works exclusively on strings (for example, by merging + or splitting them). + + The following sections can be found among the docstrings of each concrete + StringTransformer subclass. + + Requirements: + Which requirements must be met of the given Line for this + StringTransformer to be applied? + + Transformations: + If the given Line meets all of the above requirements, which string + transformations can you expect to be applied to it by this + StringTransformer? + + Collaborations: + What contractual agreements does this StringTransformer have with other + StringTransfomers? Such collaborations should be eliminated/minimized + as much as possible. + """ + + __name__: Final = "StringTransformer" + + # Ideally this would be a dataclass, but unfortunately mypyc breaks when used with + # `abc.ABC`. + def __init__(self, line_length: int, normalize_strings: bool) -> None: + self.line_length = line_length + self.normalize_strings = normalize_strings + + @abstractmethod + def do_match(self, line: Line) -> TMatchResult: + """ + Returns: + * Ok(string_indices) such that for each index, `line.leaves[index]` + is our target string if a match was able to be made. For + transformers that don't result in more lines (e.g. StringMerger, + StringParenStripper), multiple matches and transforms are done at + once to reduce the complexity. + OR + * Err(CannotTransform), if no match could be made. + """ + + @abstractmethod + def do_transform( + self, line: Line, string_indices: List[int] + ) -> Iterator[TResult[Line]]: + """ + Yields: + * Ok(new_line) where new_line is the new transformed line. + OR + * Err(CannotTransform) if the transformation failed for some reason. The + `do_match(...)` template method should usually be used to reject + the form of the given Line, but in some cases it is difficult to + know whether or not a Line meets the StringTransformer's + requirements until the transformation is already midway. + + Side Effects: + This method should NOT mutate @line directly, but it MAY mutate the + Line's underlying Node structure. (WARNING: If the underlying Node + structure IS altered, then this method should NOT be allowed to + yield an CannotTransform after that point.) + """ + + def __call__( + self, line: Line, _features: Collection[Feature], _mode: Mode + ) -> Iterator[Line]: + """ + StringTransformer instances have a call signature that mirrors that of + the Transformer type. + + Raises: + CannotTransform(...) if the concrete StringTransformer class is unable + to transform @line. + """ + # Optimization to avoid calling `self.do_match(...)` when the line does + # not contain any string. + if not any(leaf.type == token.STRING for leaf in line.leaves): + raise CannotTransform("There are no strings in this line.") + + match_result = self.do_match(line) + + if isinstance(match_result, Err): + cant_transform = match_result.err() + raise CannotTransform( + f"The string transformer {self.__class__.__name__} does not recognize" + " this line as one that it can transform." + ) from cant_transform + + string_indices = match_result.ok() + + for line_result in self.do_transform(line, string_indices): + if isinstance(line_result, Err): + cant_transform = line_result.err() + raise CannotTransform( + "StringTransformer failed while attempting to transform string." + ) from cant_transform + line = line_result.ok() + yield line + + +@dataclass +class CustomSplit: + """A custom (i.e. manual) string split. + + A single CustomSplit instance represents a single substring. + + Examples: + Consider the following string: + ``` + "Hi there friend." + " This is a custom" + f" string {split}." + ``` + + This string will correspond to the following three CustomSplit instances: + ``` + CustomSplit(False, 16) + CustomSplit(False, 17) + CustomSplit(True, 16) + ``` + """ + + has_prefix: bool + break_idx: int + + +@trait +class CustomSplitMapMixin: + """ + This mixin class is used to map merged strings to a sequence of + CustomSplits, which will then be used to re-split the strings iff none of + the resultant substrings go over the configured max line length. + """ + + _Key: ClassVar = Tuple[StringID, str] + _CUSTOM_SPLIT_MAP: ClassVar[Dict[_Key, Tuple[CustomSplit, ...]]] = defaultdict( + tuple + ) + + @staticmethod + def _get_key(string: str) -> "CustomSplitMapMixin._Key": + """ + Returns: + A unique identifier that is used internally to map @string to a + group of custom splits. + """ + return (id(string), string) + + def add_custom_splits( + self, string: str, custom_splits: Iterable[CustomSplit] + ) -> None: + """Custom Split Map Setter Method + + Side Effects: + Adds a mapping from @string to the custom splits @custom_splits. + """ + key = self._get_key(string) + self._CUSTOM_SPLIT_MAP[key] = tuple(custom_splits) + + def pop_custom_splits(self, string: str) -> List[CustomSplit]: + """Custom Split Map Getter Method + + Returns: + * A list of the custom splits that are mapped to @string, if any + exist. + OR + * [], otherwise. + + Side Effects: + Deletes the mapping between @string and its associated custom + splits (which are returned to the caller). + """ + key = self._get_key(string) + + custom_splits = self._CUSTOM_SPLIT_MAP[key] + del self._CUSTOM_SPLIT_MAP[key] + + return list(custom_splits) + + def has_custom_splits(self, string: str) -> bool: + """ + Returns: + True iff @string is associated with a set of custom splits. + """ + key = self._get_key(string) + return key in self._CUSTOM_SPLIT_MAP + + +class StringMerger(StringTransformer, CustomSplitMapMixin): + """StringTransformer that merges strings together. + + Requirements: + (A) The line contains adjacent strings such that ALL of the validation checks + listed in StringMerger._validate_msg(...)'s docstring pass. + OR + (B) The line contains a string which uses line continuation backslashes. + + Transformations: + Depending on which of the two requirements above where met, either: + + (A) The string group associated with the target string is merged. + OR + (B) All line-continuation backslashes are removed from the target string. + + Collaborations: + StringMerger provides custom split information to StringSplitter. + """ + + def do_match(self, line: Line) -> TMatchResult: + LL = line.leaves + + is_valid_index = is_valid_index_factory(LL) + + string_indices = [] + idx = 0 + while is_valid_index(idx): + leaf = LL[idx] + if ( + leaf.type == token.STRING + and is_valid_index(idx + 1) + and LL[idx + 1].type == token.STRING + ): + # Let's check if the string group contains an inline comment + # If we have a comment inline, we don't merge the strings + contains_comment = False + i = idx + while is_valid_index(i): + if LL[i].type != token.STRING: + break + if line.comments_after(LL[i]): + contains_comment = True + break + i += 1 + + if not is_part_of_annotation(leaf) and not contains_comment: + string_indices.append(idx) + + # Advance to the next non-STRING leaf. + idx += 2 + while is_valid_index(idx) and LL[idx].type == token.STRING: + idx += 1 + + elif leaf.type == token.STRING and "\\\n" in leaf.value: + string_indices.append(idx) + # Advance to the next non-STRING leaf. + idx += 1 + while is_valid_index(idx) and LL[idx].type == token.STRING: + idx += 1 + + else: + idx += 1 + + if string_indices: + return Ok(string_indices) + else: + return TErr("This line has no strings that need merging.") + + def do_transform( + self, line: Line, string_indices: List[int] + ) -> Iterator[TResult[Line]]: + new_line = line + + rblc_result = self._remove_backslash_line_continuation_chars( + new_line, string_indices + ) + if isinstance(rblc_result, Ok): + new_line = rblc_result.ok() + + msg_result = self._merge_string_group(new_line, string_indices) + if isinstance(msg_result, Ok): + new_line = msg_result.ok() + + if isinstance(rblc_result, Err) and isinstance(msg_result, Err): + msg_cant_transform = msg_result.err() + rblc_cant_transform = rblc_result.err() + cant_transform = CannotTransform( + "StringMerger failed to merge any strings in this line." + ) + + # Chain the errors together using `__cause__`. + msg_cant_transform.__cause__ = rblc_cant_transform + cant_transform.__cause__ = msg_cant_transform + + yield Err(cant_transform) + else: + yield Ok(new_line) + + @staticmethod + def _remove_backslash_line_continuation_chars( + line: Line, string_indices: List[int] + ) -> TResult[Line]: + """ + Merge strings that were split across multiple lines using + line-continuation backslashes. + + Returns: + Ok(new_line), if @line contains backslash line-continuation + characters. + OR + Err(CannotTransform), otherwise. + """ + LL = line.leaves + + indices_to_transform = [] + for string_idx in string_indices: + string_leaf = LL[string_idx] + if ( + string_leaf.type == token.STRING + and "\\\n" in string_leaf.value + and not has_triple_quotes(string_leaf.value) + ): + indices_to_transform.append(string_idx) + + if not indices_to_transform: + return TErr( + "Found no string leaves that contain backslash line continuation" + " characters." + ) + + new_line = line.clone() + new_line.comments = line.comments.copy() + append_leaves(new_line, line, LL) + + for string_idx in indices_to_transform: + new_string_leaf = new_line.leaves[string_idx] + new_string_leaf.value = new_string_leaf.value.replace("\\\n", "") + + return Ok(new_line) + + def _merge_string_group( + self, line: Line, string_indices: List[int] + ) -> TResult[Line]: + """ + Merges string groups (i.e. set of adjacent strings). + + Each index from `string_indices` designates one string group's first + leaf in `line.leaves`. + + Returns: + Ok(new_line), if ALL of the validation checks found in + _validate_msg(...) pass. + OR + Err(CannotTransform), otherwise. + """ + LL = line.leaves + + is_valid_index = is_valid_index_factory(LL) + + # A dict of {string_idx: tuple[num_of_strings, string_leaf]}. + merged_string_idx_dict: Dict[int, Tuple[int, Leaf]] = {} + for string_idx in string_indices: + vresult = self._validate_msg(line, string_idx) + if isinstance(vresult, Err): + continue + merged_string_idx_dict[string_idx] = self._merge_one_string_group( + LL, string_idx, is_valid_index + ) + + if not merged_string_idx_dict: + return TErr("No string group is merged") + + # Build the final line ('new_line') that this method will later return. + new_line = line.clone() + previous_merged_string_idx = -1 + previous_merged_num_of_strings = -1 + for i, leaf in enumerate(LL): + if i in merged_string_idx_dict: + previous_merged_string_idx = i + previous_merged_num_of_strings, string_leaf = merged_string_idx_dict[i] + new_line.append(string_leaf) + + if ( + previous_merged_string_idx + <= i + < previous_merged_string_idx + previous_merged_num_of_strings + ): + for comment_leaf in line.comments_after(LL[i]): + new_line.append(comment_leaf, preformatted=True) + continue + + append_leaves(new_line, line, [leaf]) + + return Ok(new_line) + + def _merge_one_string_group( + self, LL: List[Leaf], string_idx: int, is_valid_index: Callable[[int], bool] + ) -> Tuple[int, Leaf]: + """ + Merges one string group where the first string in the group is + `LL[string_idx]`. + + Returns: + A tuple of `(num_of_strings, leaf)` where `num_of_strings` is the + number of strings merged and `leaf` is the newly merged string + to be replaced in the new line. + """ + # If the string group is wrapped inside an Atom node, we must make sure + # to later replace that Atom with our new (merged) string leaf. + atom_node = LL[string_idx].parent + + # We will place BREAK_MARK in between every two substrings that we + # merge. We will then later go through our final result and use the + # various instances of BREAK_MARK we find to add the right values to + # the custom split map. + BREAK_MARK = "@@@@@ BLACK BREAKPOINT MARKER @@@@@" + + QUOTE = LL[string_idx].value[-1] + + def make_naked(string: str, string_prefix: str) -> str: + """Strip @string (i.e. make it a "naked" string) + + Pre-conditions: + * assert_is_leaf_string(@string) + + Returns: + A string that is identical to @string except that + @string_prefix has been stripped, the surrounding QUOTE + characters have been removed, and any remaining QUOTE + characters have been escaped. + """ + assert_is_leaf_string(string) + if "f" in string_prefix: + f_expressions = ( + string[span[0] + 1 : span[1] - 1] # +-1 to get rid of curly braces + for span in iter_fexpr_spans(string) + ) + debug_expressions_contain_visible_quotes = any( + re.search(r".*[\'\"].*(?= 0 + ), "Logic error while filling the custom string breakpoint cache." + + temp_string = temp_string[mark_idx + len(BREAK_MARK) :] + breakpoint_idx = mark_idx + (len(prefix) if has_prefix else 0) + 1 + custom_splits.append(CustomSplit(has_prefix, breakpoint_idx)) + + string_leaf = Leaf(token.STRING, S_leaf.value.replace(BREAK_MARK, "")) + + if atom_node is not None: + # If not all children of the atom node are merged (this can happen + # when there is a standalone comment in the middle) ... + if non_string_idx - string_idx < len(atom_node.children): + # We need to replace the old STRING leaves with the new string leaf. + first_child_idx = LL[string_idx].remove() + for idx in range(string_idx + 1, non_string_idx): + LL[idx].remove() + if first_child_idx is not None: + atom_node.insert_child(first_child_idx, string_leaf) + else: + # Else replace the atom node with the new string leaf. + replace_child(atom_node, string_leaf) + + self.add_custom_splits(string_leaf.value, custom_splits) + return num_of_strings, string_leaf + + @staticmethod + def _validate_msg(line: Line, string_idx: int) -> TResult[None]: + """Validate (M)erge (S)tring (G)roup + + Transform-time string validation logic for _merge_string_group(...). + + Returns: + * Ok(None), if ALL validation checks (listed below) pass. + OR + * Err(CannotTransform), if any of the following are true: + - The target string group does not contain ANY stand-alone comments. + - The target string is not in a string group (i.e. it has no + adjacent strings). + - The string group has more than one inline comment. + - The string group has an inline comment that appears to be a pragma. + - The set of all string prefixes in the string group is of + length greater than one and is not equal to {"", "f"}. + - The string group consists of raw strings. + - The string group is stringified type annotations. We don't want to + process stringified type annotations since pyright doesn't support + them spanning multiple string values. (NOTE: mypy, pytype, pyre do + support them, so we can change if pyright also gains support in the + future. See https://github.com/microsoft/pyright/issues/4359.) + """ + # We first check for "inner" stand-alone comments (i.e. stand-alone + # comments that have a string leaf before them AND after them). + for inc in [1, -1]: + i = string_idx + found_sa_comment = False + is_valid_index = is_valid_index_factory(line.leaves) + while is_valid_index(i) and line.leaves[i].type in [ + token.STRING, + STANDALONE_COMMENT, + ]: + if line.leaves[i].type == STANDALONE_COMMENT: + found_sa_comment = True + elif found_sa_comment: + return TErr( + "StringMerger does NOT merge string groups which contain " + "stand-alone comments." + ) + + i += inc + + num_of_inline_string_comments = 0 + set_of_prefixes = set() + num_of_strings = 0 + for leaf in line.leaves[string_idx:]: + if leaf.type != token.STRING: + # If the string group is trailed by a comma, we count the + # comments trailing the comma to be one of the string group's + # comments. + if leaf.type == token.COMMA and id(leaf) in line.comments: + num_of_inline_string_comments += 1 + break + + if has_triple_quotes(leaf.value): + return TErr("StringMerger does NOT merge multiline strings.") + + num_of_strings += 1 + prefix = get_string_prefix(leaf.value).lower() + if "r" in prefix: + return TErr("StringMerger does NOT merge raw strings.") + + set_of_prefixes.add(prefix) + + if id(leaf) in line.comments: + num_of_inline_string_comments += 1 + if contains_pragma_comment(line.comments[id(leaf)]): + return TErr("Cannot merge strings which have pragma comments.") + + if num_of_strings < 2: + return TErr( + f"Not enough strings to merge (num_of_strings={num_of_strings})." + ) + + if num_of_inline_string_comments > 1: + return TErr( + f"Too many inline string comments ({num_of_inline_string_comments})." + ) + + if len(set_of_prefixes) > 1 and set_of_prefixes != {"", "f"}: + return TErr(f"Too many different prefixes ({set_of_prefixes}).") + + return Ok(None) + + +class StringParenStripper(StringTransformer): + """StringTransformer that strips surrounding parentheses from strings. + + Requirements: + The line contains a string which is surrounded by parentheses and: + - The target string is NOT the only argument to a function call. + - The target string is NOT a "pointless" string. + - If the target string contains a PERCENT, the brackets are not + preceded or followed by an operator with higher precedence than + PERCENT. + + Transformations: + The parentheses mentioned in the 'Requirements' section are stripped. + + Collaborations: + StringParenStripper has its own inherent usefulness, but it is also + relied on to clean up the parentheses created by StringParenWrapper (in + the event that they are no longer needed). + """ + + def do_match(self, line: Line) -> TMatchResult: + LL = line.leaves + + is_valid_index = is_valid_index_factory(LL) + + string_indices = [] + + idx = -1 + while True: + idx += 1 + if idx >= len(LL): + break + leaf = LL[idx] + + # Should be a string... + if leaf.type != token.STRING: + continue + + # If this is a "pointless" string... + if ( + leaf.parent + and leaf.parent.parent + and leaf.parent.parent.type == syms.simple_stmt + ): + continue + + # Should be preceded by a non-empty LPAR... + if ( + not is_valid_index(idx - 1) + or LL[idx - 1].type != token.LPAR + or is_empty_lpar(LL[idx - 1]) + ): + continue + + # That LPAR should NOT be preceded by a function name or a closing + # bracket (which could be a function which returns a function or a + # list/dictionary that contains a function)... + if is_valid_index(idx - 2) and ( + LL[idx - 2].type == token.NAME or LL[idx - 2].type in CLOSING_BRACKETS + ): + continue + + string_idx = idx + + # Skip the string trailer, if one exists. + string_parser = StringParser() + next_idx = string_parser.parse(LL, string_idx) + + # if the leaves in the parsed string include a PERCENT, we need to + # make sure the initial LPAR is NOT preceded by an operator with + # higher or equal precedence to PERCENT + if is_valid_index(idx - 2): + # mypy can't quite follow unless we name this + before_lpar = LL[idx - 2] + if token.PERCENT in {leaf.type for leaf in LL[idx - 1 : next_idx]} and ( + ( + before_lpar.type + in { + token.STAR, + token.AT, + token.SLASH, + token.DOUBLESLASH, + token.PERCENT, + token.TILDE, + token.DOUBLESTAR, + token.AWAIT, + token.LSQB, + token.LPAR, + } + ) + or ( + # only unary PLUS/MINUS + before_lpar.parent + and before_lpar.parent.type == syms.factor + and (before_lpar.type in {token.PLUS, token.MINUS}) + ) + ): + continue + + # Should be followed by a non-empty RPAR... + if ( + is_valid_index(next_idx) + and LL[next_idx].type == token.RPAR + and not is_empty_rpar(LL[next_idx]) + ): + # That RPAR should NOT be followed by anything with higher + # precedence than PERCENT + if is_valid_index(next_idx + 1) and LL[next_idx + 1].type in { + token.DOUBLESTAR, + token.LSQB, + token.LPAR, + token.DOT, + }: + continue + + string_indices.append(string_idx) + idx = string_idx + while idx < len(LL) - 1 and LL[idx + 1].type == token.STRING: + idx += 1 + + if string_indices: + return Ok(string_indices) + return TErr("This line has no strings wrapped in parens.") + + def do_transform( + self, line: Line, string_indices: List[int] + ) -> Iterator[TResult[Line]]: + LL = line.leaves + + string_and_rpar_indices: List[int] = [] + for string_idx in string_indices: + string_parser = StringParser() + rpar_idx = string_parser.parse(LL, string_idx) + + should_transform = True + for leaf in (LL[string_idx - 1], LL[rpar_idx]): + if line.comments_after(leaf): + # Should not strip parentheses which have comments attached + # to them. + should_transform = False + break + if should_transform: + string_and_rpar_indices.extend((string_idx, rpar_idx)) + + if string_and_rpar_indices: + yield Ok(self._transform_to_new_line(line, string_and_rpar_indices)) + else: + yield Err( + CannotTransform("All string groups have comments attached to them.") + ) + + def _transform_to_new_line( + self, line: Line, string_and_rpar_indices: List[int] + ) -> Line: + LL = line.leaves + + new_line = line.clone() + new_line.comments = line.comments.copy() + + previous_idx = -1 + # We need to sort the indices, since string_idx and its matching + # rpar_idx may not come in order, e.g. in + # `("outer" % ("inner".join(items)))`, the "inner" string's + # string_idx is smaller than "outer" string's rpar_idx. + for idx in sorted(string_and_rpar_indices): + leaf = LL[idx] + lpar_or_rpar_idx = idx - 1 if leaf.type == token.STRING else idx + append_leaves(new_line, line, LL[previous_idx + 1 : lpar_or_rpar_idx]) + if leaf.type == token.STRING: + string_leaf = Leaf(token.STRING, LL[idx].value) + LL[lpar_or_rpar_idx].remove() # Remove lpar. + replace_child(LL[idx], string_leaf) + new_line.append(string_leaf) + # replace comments + old_comments = new_line.comments.pop(id(LL[idx]), []) + new_line.comments.setdefault(id(string_leaf), []).extend(old_comments) + else: + LL[lpar_or_rpar_idx].remove() # This is a rpar. + + previous_idx = idx + + # Append the leaves after the last idx: + append_leaves(new_line, line, LL[idx + 1 :]) + + return new_line + + +class BaseStringSplitter(StringTransformer): + """ + Abstract class for StringTransformers which transform a Line's strings by splitting + them or placing them on their own lines where necessary to avoid going over + the configured line length. + + Requirements: + * The target string value is responsible for the line going over the + line length limit. It follows that after all of black's other line + split methods have been exhausted, this line (or one of the resulting + lines after all line splits are performed) would still be over the + line_length limit unless we split this string. + AND + + * The target string is NOT a "pointless" string (i.e. a string that has + no parent or siblings). + AND + + * The target string is not followed by an inline comment that appears + to be a pragma. + AND + + * The target string is not a multiline (i.e. triple-quote) string. + """ + + STRING_OPERATORS: Final = [ + token.EQEQUAL, + token.GREATER, + token.GREATEREQUAL, + token.LESS, + token.LESSEQUAL, + token.NOTEQUAL, + token.PERCENT, + token.PLUS, + token.STAR, + ] + + @abstractmethod + def do_splitter_match(self, line: Line) -> TMatchResult: + """ + BaseStringSplitter asks its clients to override this method instead of + `StringTransformer.do_match(...)`. + + Follows the same protocol as `StringTransformer.do_match(...)`. + + Refer to `help(StringTransformer.do_match)` for more information. + """ + + def do_match(self, line: Line) -> TMatchResult: + match_result = self.do_splitter_match(line) + if isinstance(match_result, Err): + return match_result + + string_indices = match_result.ok() + assert len(string_indices) == 1, ( + f"{self.__class__.__name__} should only find one match at a time, found" + f" {len(string_indices)}" + ) + string_idx = string_indices[0] + vresult = self._validate(line, string_idx) + if isinstance(vresult, Err): + return vresult + + return match_result + + def _validate(self, line: Line, string_idx: int) -> TResult[None]: + """ + Checks that @line meets all of the requirements listed in this classes' + docstring. Refer to `help(BaseStringSplitter)` for a detailed + description of those requirements. + + Returns: + * Ok(None), if ALL of the requirements are met. + OR + * Err(CannotTransform), if ANY of the requirements are NOT met. + """ + LL = line.leaves + + string_leaf = LL[string_idx] + + max_string_length = self._get_max_string_length(line, string_idx) + if len(string_leaf.value) <= max_string_length: + return TErr( + "The string itself is not what is causing this line to be too long." + ) + + if not string_leaf.parent or [L.type for L in string_leaf.parent.children] == [ + token.STRING, + token.NEWLINE, + ]: + return TErr( + f"This string ({string_leaf.value}) appears to be pointless (i.e. has" + " no parent)." + ) + + if id(line.leaves[string_idx]) in line.comments and contains_pragma_comment( + line.comments[id(line.leaves[string_idx])] + ): + return TErr( + "Line appears to end with an inline pragma comment. Splitting the line" + " could modify the pragma's behavior." + ) + + if has_triple_quotes(string_leaf.value): + return TErr("We cannot split multiline strings.") + + return Ok(None) + + def _get_max_string_length(self, line: Line, string_idx: int) -> int: + """ + Calculates the max string length used when attempting to determine + whether or not the target string is responsible for causing the line to + go over the line length limit. + + WARNING: This method is tightly coupled to both StringSplitter and + (especially) StringParenWrapper. There is probably a better way to + accomplish what is being done here. + + Returns: + max_string_length: such that `line.leaves[string_idx].value > + max_string_length` implies that the target string IS responsible + for causing this line to exceed the line length limit. + """ + LL = line.leaves + + is_valid_index = is_valid_index_factory(LL) + + # We use the shorthand "WMA4" in comments to abbreviate "We must + # account for". When giving examples, we use STRING to mean some/any + # valid string. + # + # Finally, we use the following convenience variables: + # + # P: The leaf that is before the target string leaf. + # N: The leaf that is after the target string leaf. + # NN: The leaf that is after N. + + # WMA4 the whitespace at the beginning of the line. + offset = line.depth * 4 + + if is_valid_index(string_idx - 1): + p_idx = string_idx - 1 + if ( + LL[string_idx - 1].type == token.LPAR + and LL[string_idx - 1].value == "" + and string_idx >= 2 + ): + # If the previous leaf is an empty LPAR placeholder, we should skip it. + p_idx -= 1 + + P = LL[p_idx] + if P.type in self.STRING_OPERATORS: + # WMA4 a space and a string operator (e.g. `+ STRING` or `== STRING`). + offset += len(str(P)) + 1 + + if P.type == token.COMMA: + # WMA4 a space, a comma, and a closing bracket [e.g. `), STRING`]. + offset += 3 + + if P.type in [token.COLON, token.EQUAL, token.PLUSEQUAL, token.NAME]: + # This conditional branch is meant to handle dictionary keys, + # variable assignments, 'return STRING' statement lines, and + # 'else STRING' ternary expression lines. + + # WMA4 a single space. + offset += 1 + + # WMA4 the lengths of any leaves that came before that space, + # but after any closing bracket before that space. + for leaf in reversed(LL[: p_idx + 1]): + offset += len(str(leaf)) + if leaf.type in CLOSING_BRACKETS: + break + + if is_valid_index(string_idx + 1): + N = LL[string_idx + 1] + if N.type == token.RPAR and N.value == "" and len(LL) > string_idx + 2: + # If the next leaf is an empty RPAR placeholder, we should skip it. + N = LL[string_idx + 2] + + if N.type == token.COMMA: + # WMA4 a single comma at the end of the string (e.g `STRING,`). + offset += 1 + + if is_valid_index(string_idx + 2): + NN = LL[string_idx + 2] + + if N.type == token.DOT and NN.type == token.NAME: + # This conditional branch is meant to handle method calls invoked + # off of a string literal up to and including the LPAR character. + + # WMA4 the '.' character. + offset += 1 + + if ( + is_valid_index(string_idx + 3) + and LL[string_idx + 3].type == token.LPAR + ): + # WMA4 the left parenthesis character. + offset += 1 + + # WMA4 the length of the method's name. + offset += len(NN.value) + + has_comments = False + for comment_leaf in line.comments_after(LL[string_idx]): + if not has_comments: + has_comments = True + # WMA4 two spaces before the '#' character. + offset += 2 + + # WMA4 the length of the inline comment. + offset += len(comment_leaf.value) + + max_string_length = count_chars_in_width(str(line), self.line_length - offset) + return max_string_length + + @staticmethod + def _prefer_paren_wrap_match(LL: List[Leaf]) -> Optional[int]: + """ + Returns: + string_idx such that @LL[string_idx] is equal to our target (i.e. + matched) string, if this line matches the "prefer paren wrap" statement + requirements listed in the 'Requirements' section of the StringParenWrapper + class's docstring. + OR + None, otherwise. + """ + # The line must start with a string. + if LL[0].type != token.STRING: + return None + + matching_nodes = [ + syms.listmaker, + syms.dictsetmaker, + syms.testlist_gexp, + ] + # If the string is an immediate child of a list/set/tuple literal... + if ( + parent_type(LL[0]) in matching_nodes + or parent_type(LL[0].parent) in matching_nodes + ): + # And the string is surrounded by commas (or is the first/last child)... + prev_sibling = LL[0].prev_sibling + next_sibling = LL[0].next_sibling + if ( + not prev_sibling + and not next_sibling + and parent_type(LL[0]) == syms.atom + ): + # If it's an atom string, we need to check the parent atom's siblings. + parent = LL[0].parent + assert parent is not None # For type checkers. + prev_sibling = parent.prev_sibling + next_sibling = parent.next_sibling + if (not prev_sibling or prev_sibling.type == token.COMMA) and ( + not next_sibling or next_sibling.type == token.COMMA + ): + return 0 + + return None + + +def iter_fexpr_spans(s: str) -> Iterator[Tuple[int, int]]: + """ + Yields spans corresponding to expressions in a given f-string. + Spans are half-open ranges (left inclusive, right exclusive). + Assumes the input string is a valid f-string, but will not crash if the input + string is invalid. + """ + stack: List[int] = [] # our curly paren stack + i = 0 + while i < len(s): + if s[i] == "{": + # if we're in a string part of the f-string, ignore escaped curly braces + if not stack and i + 1 < len(s) and s[i + 1] == "{": + i += 2 + continue + stack.append(i) + i += 1 + continue + + if s[i] == "}": + if not stack: + i += 1 + continue + j = stack.pop() + # we've made it back out of the expression! yield the span + if not stack: + yield (j, i + 1) + i += 1 + continue + + # if we're in an expression part of the f-string, fast-forward through strings + # note that backslashes are not legal in the expression portion of f-strings + if stack: + delim = None + if s[i : i + 3] in ("'''", '"""'): + delim = s[i : i + 3] + elif s[i] in ("'", '"'): + delim = s[i] + if delim: + i += len(delim) + while i < len(s) and s[i : i + len(delim)] != delim: + i += 1 + i += len(delim) + continue + i += 1 + + +def fstring_contains_expr(s: str) -> bool: + return any(iter_fexpr_spans(s)) + + +def _toggle_fexpr_quotes(fstring: str, old_quote: str) -> str: + """ + Toggles quotes used in f-string expressions that are `old_quote`. + + f-string expressions can't contain backslashes, so we need to toggle the + quotes if the f-string itself will end up using the same quote. We can + simply toggle without escaping because, quotes can't be reused in f-string + expressions. They will fail to parse. + + NOTE: If PEP 701 is accepted, above statement will no longer be true. + Though if quotes can be reused, we can simply reuse them without updates or + escaping, once Black figures out how to parse the new grammar. + """ + new_quote = "'" if old_quote == '"' else '"' + parts = [] + previous_index = 0 + for start, end in iter_fexpr_spans(fstring): + parts.append(fstring[previous_index:start]) + parts.append(fstring[start:end].replace(old_quote, new_quote)) + previous_index = end + parts.append(fstring[previous_index:]) + return "".join(parts) + + +class StringSplitter(BaseStringSplitter, CustomSplitMapMixin): + """ + StringTransformer that splits "atom" strings (i.e. strings which exist on + lines by themselves). + + Requirements: + * The line consists ONLY of a single string (possibly prefixed by a + string operator [e.g. '+' or '==']), MAYBE a string trailer, and MAYBE + a trailing comma. + AND + * All of the requirements listed in BaseStringSplitter's docstring. + + Transformations: + The string mentioned in the 'Requirements' section is split into as + many substrings as necessary to adhere to the configured line length. + + In the final set of substrings, no substring should be smaller than + MIN_SUBSTR_SIZE characters. + + The string will ONLY be split on spaces (i.e. each new substring should + start with a space). Note that the string will NOT be split on a space + which is escaped with a backslash. + + If the string is an f-string, it will NOT be split in the middle of an + f-expression (e.g. in f"FooBar: {foo() if x else bar()}", {foo() if x + else bar()} is an f-expression). + + If the string that is being split has an associated set of custom split + records and those custom splits will NOT result in any line going over + the configured line length, those custom splits are used. Otherwise the + string is split as late as possible (from left-to-right) while still + adhering to the transformation rules listed above. + + Collaborations: + StringSplitter relies on StringMerger to construct the appropriate + CustomSplit objects and add them to the custom split map. + """ + + MIN_SUBSTR_SIZE: Final = 6 + + def do_splitter_match(self, line: Line) -> TMatchResult: + LL = line.leaves + + if self._prefer_paren_wrap_match(LL) is not None: + return TErr("Line needs to be wrapped in parens first.") + + is_valid_index = is_valid_index_factory(LL) + + idx = 0 + + # The first two leaves MAY be the 'not in' keywords... + if ( + is_valid_index(idx) + and is_valid_index(idx + 1) + and [LL[idx].type, LL[idx + 1].type] == [token.NAME, token.NAME] + and str(LL[idx]) + str(LL[idx + 1]) == "not in" + ): + idx += 2 + # Else the first leaf MAY be a string operator symbol or the 'in' keyword... + elif is_valid_index(idx) and ( + LL[idx].type in self.STRING_OPERATORS + or LL[idx].type == token.NAME + and str(LL[idx]) == "in" + ): + idx += 1 + + # The next/first leaf MAY be an empty LPAR... + if is_valid_index(idx) and is_empty_lpar(LL[idx]): + idx += 1 + + # The next/first leaf MUST be a string... + if not is_valid_index(idx) or LL[idx].type != token.STRING: + return TErr("Line does not start with a string.") + + string_idx = idx + + # Skip the string trailer, if one exists. + string_parser = StringParser() + idx = string_parser.parse(LL, string_idx) + + # That string MAY be followed by an empty RPAR... + if is_valid_index(idx) and is_empty_rpar(LL[idx]): + idx += 1 + + # That string / empty RPAR leaf MAY be followed by a comma... + if is_valid_index(idx) and LL[idx].type == token.COMMA: + idx += 1 + + # But no more leaves are allowed... + if is_valid_index(idx): + return TErr("This line does not end with a string.") + + return Ok([string_idx]) + + def do_transform( + self, line: Line, string_indices: List[int] + ) -> Iterator[TResult[Line]]: + LL = line.leaves + assert len(string_indices) == 1, ( + f"{self.__class__.__name__} should only find one match at a time, found" + f" {len(string_indices)}" + ) + string_idx = string_indices[0] + + QUOTE = LL[string_idx].value[-1] + + is_valid_index = is_valid_index_factory(LL) + insert_str_child = insert_str_child_factory(LL[string_idx]) + + prefix = get_string_prefix(LL[string_idx].value).lower() + + # We MAY choose to drop the 'f' prefix from substrings that don't + # contain any f-expressions, but ONLY if the original f-string + # contains at least one f-expression. Otherwise, we will alter the AST + # of the program. + drop_pointless_f_prefix = ("f" in prefix) and fstring_contains_expr( + LL[string_idx].value + ) + + first_string_line = True + + string_op_leaves = self._get_string_operator_leaves(LL) + string_op_leaves_length = ( + sum(len(str(prefix_leaf)) for prefix_leaf in string_op_leaves) + 1 + if string_op_leaves + else 0 + ) + + def maybe_append_string_operators(new_line: Line) -> None: + """ + Side Effects: + If @line starts with a string operator and this is the first + line we are constructing, this function appends the string + operator to @new_line and replaces the old string operator leaf + in the node structure. Otherwise this function does nothing. + """ + maybe_prefix_leaves = string_op_leaves if first_string_line else [] + for i, prefix_leaf in enumerate(maybe_prefix_leaves): + replace_child(LL[i], prefix_leaf) + new_line.append(prefix_leaf) + + ends_with_comma = ( + is_valid_index(string_idx + 1) and LL[string_idx + 1].type == token.COMMA + ) + + def max_last_string_column() -> int: + """ + Returns: + The max allowed width of the string value used for the last + line we will construct. Note that this value means the width + rather than the number of characters (e.g., many East Asian + characters expand to two columns). + """ + result = self.line_length + result -= line.depth * 4 + result -= 1 if ends_with_comma else 0 + result -= string_op_leaves_length + return result + + # --- Calculate Max Break Width (for string value) + # We start with the line length limit + max_break_width = self.line_length + # The last index of a string of length N is N-1. + max_break_width -= 1 + # Leading whitespace is not present in the string value (e.g. Leaf.value). + max_break_width -= line.depth * 4 + if max_break_width < 0: + yield TErr( + f"Unable to split {LL[string_idx].value} at such high of a line depth:" + f" {line.depth}" + ) + return + + # Check if StringMerger registered any custom splits. + custom_splits = self.pop_custom_splits(LL[string_idx].value) + # We use them ONLY if none of them would produce lines that exceed the + # line limit. + use_custom_breakpoints = bool( + custom_splits + and all(csplit.break_idx <= max_break_width for csplit in custom_splits) + ) + + # Temporary storage for the remaining chunk of the string line that + # can't fit onto the line currently being constructed. + rest_value = LL[string_idx].value + + def more_splits_should_be_made() -> bool: + """ + Returns: + True iff `rest_value` (the remaining string value from the last + split), should be split again. + """ + if use_custom_breakpoints: + return len(custom_splits) > 1 + else: + return str_width(rest_value) > max_last_string_column() + + string_line_results: List[Ok[Line]] = [] + while more_splits_should_be_made(): + if use_custom_breakpoints: + # Custom User Split (manual) + csplit = custom_splits.pop(0) + break_idx = csplit.break_idx + else: + # Algorithmic Split (automatic) + max_bidx = ( + count_chars_in_width(rest_value, max_break_width) + - string_op_leaves_length + ) + maybe_break_idx = self._get_break_idx(rest_value, max_bidx) + if maybe_break_idx is None: + # If we are unable to algorithmically determine a good split + # and this string has custom splits registered to it, we + # fall back to using them--which means we have to start + # over from the beginning. + if custom_splits: + rest_value = LL[string_idx].value + string_line_results = [] + first_string_line = True + use_custom_breakpoints = True + continue + + # Otherwise, we stop splitting here. + break + + break_idx = maybe_break_idx + + # --- Construct `next_value` + next_value = rest_value[:break_idx] + QUOTE + + # HACK: The following 'if' statement is a hack to fix the custom + # breakpoint index in the case of either: (a) substrings that were + # f-strings but will have the 'f' prefix removed OR (b) substrings + # that were not f-strings but will now become f-strings because of + # redundant use of the 'f' prefix (i.e. none of the substrings + # contain f-expressions but one or more of them had the 'f' prefix + # anyway; in which case, we will prepend 'f' to _all_ substrings). + # + # There is probably a better way to accomplish what is being done + # here... + # + # If this substring is an f-string, we _could_ remove the 'f' + # prefix, and the current custom split did NOT originally use a + # prefix... + if ( + use_custom_breakpoints + and not csplit.has_prefix + and ( + # `next_value == prefix + QUOTE` happens when the custom + # split is an empty string. + next_value == prefix + QUOTE + or next_value != self._normalize_f_string(next_value, prefix) + ) + ): + # Then `csplit.break_idx` will be off by one after removing + # the 'f' prefix. + break_idx += 1 + next_value = rest_value[:break_idx] + QUOTE + + if drop_pointless_f_prefix: + next_value = self._normalize_f_string(next_value, prefix) + + # --- Construct `next_leaf` + next_leaf = Leaf(token.STRING, next_value) + insert_str_child(next_leaf) + self._maybe_normalize_string_quotes(next_leaf) + + # --- Construct `next_line` + next_line = line.clone() + maybe_append_string_operators(next_line) + next_line.append(next_leaf) + string_line_results.append(Ok(next_line)) + + rest_value = prefix + QUOTE + rest_value[break_idx:] + first_string_line = False + + yield from string_line_results + + if drop_pointless_f_prefix: + rest_value = self._normalize_f_string(rest_value, prefix) + + rest_leaf = Leaf(token.STRING, rest_value) + insert_str_child(rest_leaf) + + # NOTE: I could not find a test case that verifies that the following + # line is actually necessary, but it seems to be. Otherwise we risk + # not normalizing the last substring, right? + self._maybe_normalize_string_quotes(rest_leaf) + + last_line = line.clone() + maybe_append_string_operators(last_line) + + # If there are any leaves to the right of the target string... + if is_valid_index(string_idx + 1): + # We use `temp_value` here to determine how long the last line + # would be if we were to append all the leaves to the right of the + # target string to the last string line. + temp_value = rest_value + for leaf in LL[string_idx + 1 :]: + temp_value += str(leaf) + if leaf.type == token.LPAR: + break + + # Try to fit them all on the same line with the last substring... + if ( + str_width(temp_value) <= max_last_string_column() + or LL[string_idx + 1].type == token.COMMA + ): + last_line.append(rest_leaf) + append_leaves(last_line, line, LL[string_idx + 1 :]) + yield Ok(last_line) + # Otherwise, place the last substring on one line and everything + # else on a line below that... + else: + last_line.append(rest_leaf) + yield Ok(last_line) + + non_string_line = line.clone() + append_leaves(non_string_line, line, LL[string_idx + 1 :]) + yield Ok(non_string_line) + # Else the target string was the last leaf... + else: + last_line.append(rest_leaf) + last_line.comments = line.comments.copy() + yield Ok(last_line) + + def _iter_nameescape_slices(self, string: str) -> Iterator[Tuple[Index, Index]]: + """ + Yields: + All ranges of @string which, if @string were to be split there, + would result in the splitting of an \\N{...} expression (which is NOT + allowed). + """ + # True - the previous backslash was unescaped + # False - the previous backslash was escaped *or* there was no backslash + previous_was_unescaped_backslash = False + it = iter(enumerate(string)) + for idx, c in it: + if c == "\\": + previous_was_unescaped_backslash = not previous_was_unescaped_backslash + continue + if not previous_was_unescaped_backslash or c != "N": + previous_was_unescaped_backslash = False + continue + previous_was_unescaped_backslash = False + + begin = idx - 1 # the position of backslash before \N{...} + for idx, c in it: + if c == "}": + end = idx + break + else: + # malformed nameescape expression? + # should have been detected by AST parsing earlier... + raise RuntimeError(f"{self.__class__.__name__} LOGIC ERROR!") + yield begin, end + + def _iter_fexpr_slices(self, string: str) -> Iterator[Tuple[Index, Index]]: + """ + Yields: + All ranges of @string which, if @string were to be split there, + would result in the splitting of an f-expression (which is NOT + allowed). + """ + if "f" not in get_string_prefix(string).lower(): + return + yield from iter_fexpr_spans(string) + + def _get_illegal_split_indices(self, string: str) -> Set[Index]: + illegal_indices: Set[Index] = set() + iterators = [ + self._iter_fexpr_slices(string), + self._iter_nameescape_slices(string), + ] + for it in iterators: + for begin, end in it: + illegal_indices.update(range(begin, end + 1)) + return illegal_indices + + def _get_break_idx(self, string: str, max_break_idx: int) -> Optional[int]: + """ + This method contains the algorithm that StringSplitter uses to + determine which character to split each string at. + + Args: + @string: The substring that we are attempting to split. + @max_break_idx: The ideal break index. We will return this value if it + meets all the necessary conditions. In the likely event that it + doesn't we will try to find the closest index BELOW @max_break_idx + that does. If that fails, we will expand our search by also + considering all valid indices ABOVE @max_break_idx. + + Pre-Conditions: + * assert_is_leaf_string(@string) + * 0 <= @max_break_idx < len(@string) + + Returns: + break_idx, if an index is able to be found that meets all of the + conditions listed in the 'Transformations' section of this classes' + docstring. + OR + None, otherwise. + """ + is_valid_index = is_valid_index_factory(string) + + assert is_valid_index(max_break_idx) + assert_is_leaf_string(string) + + _illegal_split_indices = self._get_illegal_split_indices(string) + + def breaks_unsplittable_expression(i: Index) -> bool: + """ + Returns: + True iff returning @i would result in the splitting of an + unsplittable expression (which is NOT allowed). + """ + return i in _illegal_split_indices + + def passes_all_checks(i: Index) -> bool: + """ + Returns: + True iff ALL of the conditions listed in the 'Transformations' + section of this classes' docstring would be met by returning @i. + """ + is_space = string[i] == " " + is_split_safe = is_valid_index(i - 1) and string[i - 1] in SPLIT_SAFE_CHARS + + is_not_escaped = True + j = i - 1 + while is_valid_index(j) and string[j] == "\\": + is_not_escaped = not is_not_escaped + j -= 1 + + is_big_enough = ( + len(string[i:]) >= self.MIN_SUBSTR_SIZE + and len(string[:i]) >= self.MIN_SUBSTR_SIZE + ) + return ( + (is_space or is_split_safe) + and is_not_escaped + and is_big_enough + and not breaks_unsplittable_expression(i) + ) + + # First, we check all indices BELOW @max_break_idx. + break_idx = max_break_idx + while is_valid_index(break_idx - 1) and not passes_all_checks(break_idx): + break_idx -= 1 + + if not passes_all_checks(break_idx): + # If that fails, we check all indices ABOVE @max_break_idx. + # + # If we are able to find a valid index here, the next line is going + # to be longer than the specified line length, but it's probably + # better than doing nothing at all. + break_idx = max_break_idx + 1 + while is_valid_index(break_idx + 1) and not passes_all_checks(break_idx): + break_idx += 1 + + if not is_valid_index(break_idx) or not passes_all_checks(break_idx): + return None + + return break_idx + + def _maybe_normalize_string_quotes(self, leaf: Leaf) -> None: + if self.normalize_strings: + leaf.value = normalize_string_quotes(leaf.value) + + def _normalize_f_string(self, string: str, prefix: str) -> str: + """ + Pre-Conditions: + * assert_is_leaf_string(@string) + + Returns: + * If @string is an f-string that contains no f-expressions, we + return a string identical to @string except that the 'f' prefix + has been stripped and all double braces (i.e. '{{' or '}}') have + been normalized (i.e. turned into '{' or '}'). + OR + * Otherwise, we return @string. + """ + assert_is_leaf_string(string) + + if "f" in prefix and not fstring_contains_expr(string): + new_prefix = prefix.replace("f", "") + + temp = string[len(prefix) :] + temp = re.sub(r"\{\{", "{", temp) + temp = re.sub(r"\}\}", "}", temp) + new_string = temp + + return f"{new_prefix}{new_string}" + else: + return string + + def _get_string_operator_leaves(self, leaves: Iterable[Leaf]) -> List[Leaf]: + LL = list(leaves) + + string_op_leaves = [] + i = 0 + while LL[i].type in self.STRING_OPERATORS + [token.NAME]: + prefix_leaf = Leaf(LL[i].type, str(LL[i]).strip()) + string_op_leaves.append(prefix_leaf) + i += 1 + return string_op_leaves + + +class StringParenWrapper(BaseStringSplitter, CustomSplitMapMixin): + """ + StringTransformer that wraps strings in parens and then splits at the LPAR. + + Requirements: + All of the requirements listed in BaseStringSplitter's docstring in + addition to the requirements listed below: + + * The line is a return/yield statement, which returns/yields a string. + OR + * The line is part of a ternary expression (e.g. `x = y if cond else + z`) such that the line starts with `else `, where is + some string. + OR + * The line is an assert statement, which ends with a string. + OR + * The line is an assignment statement (e.g. `x = ` or `x += + `) such that the variable is being assigned the value of some + string. + OR + * The line is a dictionary key assignment where some valid key is being + assigned the value of some string. + OR + * The line is an lambda expression and the value is a string. + OR + * The line starts with an "atom" string that prefers to be wrapped in + parens. It's preferred to be wrapped when it's is an immediate child of + a list/set/tuple literal, AND the string is surrounded by commas (or is + the first/last child). + + Transformations: + The chosen string is wrapped in parentheses and then split at the LPAR. + + We then have one line which ends with an LPAR and another line that + starts with the chosen string. The latter line is then split again at + the RPAR. This results in the RPAR (and possibly a trailing comma) + being placed on its own line. + + NOTE: If any leaves exist to the right of the chosen string (except + for a trailing comma, which would be placed after the RPAR), those + leaves are placed inside the parentheses. In effect, the chosen + string is not necessarily being "wrapped" by parentheses. We can, + however, count on the LPAR being placed directly before the chosen + string. + + In other words, StringParenWrapper creates "atom" strings. These + can then be split again by StringSplitter, if necessary. + + Collaborations: + In the event that a string line split by StringParenWrapper is + changed such that it no longer needs to be given its own line, + StringParenWrapper relies on StringParenStripper to clean up the + parentheses it created. + + For "atom" strings that prefers to be wrapped in parens, it requires + StringSplitter to hold the split until the string is wrapped in parens. + """ + + def do_splitter_match(self, line: Line) -> TMatchResult: + LL = line.leaves + + if line.leaves[-1].type in OPENING_BRACKETS: + return TErr( + "Cannot wrap parens around a line that ends in an opening bracket." + ) + + string_idx = ( + self._return_match(LL) + or self._else_match(LL) + or self._assert_match(LL) + or self._assign_match(LL) + or self._dict_or_lambda_match(LL) + or self._prefer_paren_wrap_match(LL) + ) + + if string_idx is not None: + string_value = line.leaves[string_idx].value + # If the string has neither spaces nor East Asian stops... + if not any( + char == " " or char in SPLIT_SAFE_CHARS for char in string_value + ): + # And will still violate the line length limit when split... + max_string_width = self.line_length - ((line.depth + 1) * 4) + if str_width(string_value) > max_string_width: + # And has no associated custom splits... + if not self.has_custom_splits(string_value): + # Then we should NOT put this string on its own line. + return TErr( + "We do not wrap long strings in parentheses when the" + " resultant line would still be over the specified line" + " length and can't be split further by StringSplitter." + ) + return Ok([string_idx]) + + return TErr("This line does not contain any non-atomic strings.") + + @staticmethod + def _return_match(LL: List[Leaf]) -> Optional[int]: + """ + Returns: + string_idx such that @LL[string_idx] is equal to our target (i.e. + matched) string, if this line matches the return/yield statement + requirements listed in the 'Requirements' section of this classes' + docstring. + OR + None, otherwise. + """ + # If this line is a part of a return/yield statement and the first leaf + # contains either the "return" or "yield" keywords... + if parent_type(LL[0]) in [syms.return_stmt, syms.yield_expr] and LL[ + 0 + ].value in ["return", "yield"]: + is_valid_index = is_valid_index_factory(LL) + + idx = 2 if is_valid_index(1) and is_empty_par(LL[1]) else 1 + # The next visible leaf MUST contain a string... + if is_valid_index(idx) and LL[idx].type == token.STRING: + return idx + + return None + + @staticmethod + def _else_match(LL: List[Leaf]) -> Optional[int]: + """ + Returns: + string_idx such that @LL[string_idx] is equal to our target (i.e. + matched) string, if this line matches the ternary expression + requirements listed in the 'Requirements' section of this classes' + docstring. + OR + None, otherwise. + """ + # If this line is a part of a ternary expression and the first leaf + # contains the "else" keyword... + if ( + parent_type(LL[0]) == syms.test + and LL[0].type == token.NAME + and LL[0].value == "else" + ): + is_valid_index = is_valid_index_factory(LL) + + idx = 2 if is_valid_index(1) and is_empty_par(LL[1]) else 1 + # The next visible leaf MUST contain a string... + if is_valid_index(idx) and LL[idx].type == token.STRING: + return idx + + return None + + @staticmethod + def _assert_match(LL: List[Leaf]) -> Optional[int]: + """ + Returns: + string_idx such that @LL[string_idx] is equal to our target (i.e. + matched) string, if this line matches the assert statement + requirements listed in the 'Requirements' section of this classes' + docstring. + OR + None, otherwise. + """ + # If this line is a part of an assert statement and the first leaf + # contains the "assert" keyword... + if parent_type(LL[0]) == syms.assert_stmt and LL[0].value == "assert": + is_valid_index = is_valid_index_factory(LL) + + for i, leaf in enumerate(LL): + # We MUST find a comma... + if leaf.type == token.COMMA: + idx = i + 2 if is_empty_par(LL[i + 1]) else i + 1 + + # That comma MUST be followed by a string... + if is_valid_index(idx) and LL[idx].type == token.STRING: + string_idx = idx + + # Skip the string trailer, if one exists. + string_parser = StringParser() + idx = string_parser.parse(LL, string_idx) + + # But no more leaves are allowed... + if not is_valid_index(idx): + return string_idx + + return None + + @staticmethod + def _assign_match(LL: List[Leaf]) -> Optional[int]: + """ + Returns: + string_idx such that @LL[string_idx] is equal to our target (i.e. + matched) string, if this line matches the assignment statement + requirements listed in the 'Requirements' section of this classes' + docstring. + OR + None, otherwise. + """ + # If this line is a part of an expression statement or is a function + # argument AND the first leaf contains a variable name... + if ( + parent_type(LL[0]) in [syms.expr_stmt, syms.argument, syms.power] + and LL[0].type == token.NAME + ): + is_valid_index = is_valid_index_factory(LL) + + for i, leaf in enumerate(LL): + # We MUST find either an '=' or '+=' symbol... + if leaf.type in [token.EQUAL, token.PLUSEQUAL]: + idx = i + 2 if is_empty_par(LL[i + 1]) else i + 1 + + # That symbol MUST be followed by a string... + if is_valid_index(idx) and LL[idx].type == token.STRING: + string_idx = idx + + # Skip the string trailer, if one exists. + string_parser = StringParser() + idx = string_parser.parse(LL, string_idx) + + # The next leaf MAY be a comma iff this line is a part + # of a function argument... + if ( + parent_type(LL[0]) == syms.argument + and is_valid_index(idx) + and LL[idx].type == token.COMMA + ): + idx += 1 + + # But no more leaves are allowed... + if not is_valid_index(idx): + return string_idx + + return None + + @staticmethod + def _dict_or_lambda_match(LL: List[Leaf]) -> Optional[int]: + """ + Returns: + string_idx such that @LL[string_idx] is equal to our target (i.e. + matched) string, if this line matches the dictionary key assignment + statement or lambda expression requirements listed in the + 'Requirements' section of this classes' docstring. + OR + None, otherwise. + """ + # If this line is a part of a dictionary key assignment or lambda expression... + parent_types = [parent_type(LL[0]), parent_type(LL[0].parent)] + if syms.dictsetmaker in parent_types or syms.lambdef in parent_types: + is_valid_index = is_valid_index_factory(LL) + + for i, leaf in enumerate(LL): + # We MUST find a colon, it can either be dict's or lambda's colon... + if leaf.type == token.COLON and i < len(LL) - 1: + idx = i + 2 if is_empty_par(LL[i + 1]) else i + 1 + + # That colon MUST be followed by a string... + if is_valid_index(idx) and LL[idx].type == token.STRING: + string_idx = idx + + # Skip the string trailer, if one exists. + string_parser = StringParser() + idx = string_parser.parse(LL, string_idx) + + # That string MAY be followed by a comma... + if is_valid_index(idx) and LL[idx].type == token.COMMA: + idx += 1 + + # But no more leaves are allowed... + if not is_valid_index(idx): + return string_idx + + return None + + def do_transform( + self, line: Line, string_indices: List[int] + ) -> Iterator[TResult[Line]]: + LL = line.leaves + assert len(string_indices) == 1, ( + f"{self.__class__.__name__} should only find one match at a time, found" + f" {len(string_indices)}" + ) + string_idx = string_indices[0] + + is_valid_index = is_valid_index_factory(LL) + insert_str_child = insert_str_child_factory(LL[string_idx]) + + comma_idx = -1 + ends_with_comma = False + if LL[comma_idx].type == token.COMMA: + ends_with_comma = True + + leaves_to_steal_comments_from = [LL[string_idx]] + if ends_with_comma: + leaves_to_steal_comments_from.append(LL[comma_idx]) + + # --- First Line + first_line = line.clone() + left_leaves = LL[:string_idx] + + # We have to remember to account for (possibly invisible) LPAR and RPAR + # leaves that already wrapped the target string. If these leaves do + # exist, we will replace them with our own LPAR and RPAR leaves. + old_parens_exist = False + if left_leaves and left_leaves[-1].type == token.LPAR: + old_parens_exist = True + leaves_to_steal_comments_from.append(left_leaves[-1]) + left_leaves.pop() + + append_leaves(first_line, line, left_leaves) + + lpar_leaf = Leaf(token.LPAR, "(") + if old_parens_exist: + replace_child(LL[string_idx - 1], lpar_leaf) + else: + insert_str_child(lpar_leaf) + first_line.append(lpar_leaf) + + # We throw inline comments that were originally to the right of the + # target string to the top line. They will now be shown to the right of + # the LPAR. + for leaf in leaves_to_steal_comments_from: + for comment_leaf in line.comments_after(leaf): + first_line.append(comment_leaf, preformatted=True) + + yield Ok(first_line) + + # --- Middle (String) Line + # We only need to yield one (possibly too long) string line, since the + # `StringSplitter` will break it down further if necessary. + string_value = LL[string_idx].value + string_line = Line( + mode=line.mode, + depth=line.depth + 1, + inside_brackets=True, + should_split_rhs=line.should_split_rhs, + magic_trailing_comma=line.magic_trailing_comma, + ) + string_leaf = Leaf(token.STRING, string_value) + insert_str_child(string_leaf) + string_line.append(string_leaf) + + old_rpar_leaf = None + if is_valid_index(string_idx + 1): + right_leaves = LL[string_idx + 1 :] + if ends_with_comma: + right_leaves.pop() + + if old_parens_exist: + assert right_leaves and right_leaves[-1].type == token.RPAR, ( + "Apparently, old parentheses do NOT exist?!" + f" (left_leaves={left_leaves}, right_leaves={right_leaves})" + ) + old_rpar_leaf = right_leaves.pop() + elif right_leaves and right_leaves[-1].type == token.RPAR: + # Special case for lambda expressions as dict's value, e.g.: + # my_dict = { + # "key": lambda x: f"formatted: {x}, + # } + # After wrapping the dict's value with parentheses, the string is + # followed by a RPAR but its opening bracket is lambda's, not + # the string's: + # "key": (lambda x: f"formatted: {x}), + opening_bracket = right_leaves[-1].opening_bracket + if opening_bracket is not None and opening_bracket in left_leaves: + index = left_leaves.index(opening_bracket) + if ( + 0 < index < len(left_leaves) - 1 + and left_leaves[index - 1].type == token.COLON + and left_leaves[index + 1].value == "lambda" + ): + right_leaves.pop() + + append_leaves(string_line, line, right_leaves) + + yield Ok(string_line) + + # --- Last Line + last_line = line.clone() + last_line.bracket_tracker = first_line.bracket_tracker + + new_rpar_leaf = Leaf(token.RPAR, ")") + if old_rpar_leaf is not None: + replace_child(old_rpar_leaf, new_rpar_leaf) + else: + insert_str_child(new_rpar_leaf) + last_line.append(new_rpar_leaf) + + # If the target string ended with a comma, we place this comma to the + # right of the RPAR on the last line. + if ends_with_comma: + comma_leaf = Leaf(token.COMMA, ",") + replace_child(LL[comma_idx], comma_leaf) + last_line.append(comma_leaf) + + yield Ok(last_line) + + +class StringParser: + """ + A state machine that aids in parsing a string's "trailer", which can be + either non-existent, an old-style formatting sequence (e.g. `% varX` or `% + (varX, varY)`), or a method-call / attribute access (e.g. `.format(varX, + varY)`). + + NOTE: A new StringParser object MUST be instantiated for each string + trailer we need to parse. + + Examples: + We shall assume that `line` equals the `Line` object that corresponds + to the following line of python code: + ``` + x = "Some {}.".format("String") + some_other_string + ``` + + Furthermore, we will assume that `string_idx` is some index such that: + ``` + assert line.leaves[string_idx].value == "Some {}." + ``` + + The following code snippet then holds: + ``` + string_parser = StringParser() + idx = string_parser.parse(line.leaves, string_idx) + assert line.leaves[idx].type == token.PLUS + ``` + """ + + DEFAULT_TOKEN: Final = 20210605 + + # String Parser States + START: Final = 1 + DOT: Final = 2 + NAME: Final = 3 + PERCENT: Final = 4 + SINGLE_FMT_ARG: Final = 5 + LPAR: Final = 6 + RPAR: Final = 7 + DONE: Final = 8 + + # Lookup Table for Next State + _goto: Final[Dict[Tuple[ParserState, NodeType], ParserState]] = { + # A string trailer may start with '.' OR '%'. + (START, token.DOT): DOT, + (START, token.PERCENT): PERCENT, + (START, DEFAULT_TOKEN): DONE, + # A '.' MUST be followed by an attribute or method name. + (DOT, token.NAME): NAME, + # A method name MUST be followed by an '(', whereas an attribute name + # is the last symbol in the string trailer. + (NAME, token.LPAR): LPAR, + (NAME, DEFAULT_TOKEN): DONE, + # A '%' symbol can be followed by an '(' or a single argument (e.g. a + # string or variable name). + (PERCENT, token.LPAR): LPAR, + (PERCENT, DEFAULT_TOKEN): SINGLE_FMT_ARG, + # If a '%' symbol is followed by a single argument, that argument is + # the last leaf in the string trailer. + (SINGLE_FMT_ARG, DEFAULT_TOKEN): DONE, + # If present, a ')' symbol is the last symbol in a string trailer. + # (NOTE: LPARS and nested RPARS are not included in this lookup table, + # since they are treated as a special case by the parsing logic in this + # classes' implementation.) + (RPAR, DEFAULT_TOKEN): DONE, + } + + def __init__(self) -> None: + self._state = self.START + self._unmatched_lpars = 0 + + def parse(self, leaves: List[Leaf], string_idx: int) -> int: + """ + Pre-conditions: + * @leaves[@string_idx].type == token.STRING + + Returns: + The index directly after the last leaf which is a part of the string + trailer, if a "trailer" exists. + OR + @string_idx + 1, if no string "trailer" exists. + """ + assert leaves[string_idx].type == token.STRING + + idx = string_idx + 1 + while idx < len(leaves) and self._next_state(leaves[idx]): + idx += 1 + return idx + + def _next_state(self, leaf: Leaf) -> bool: + """ + Pre-conditions: + * On the first call to this function, @leaf MUST be the leaf that + was directly after the string leaf in question (e.g. if our target + string is `line.leaves[i]` then the first call to this method must + be `line.leaves[i + 1]`). + * On the next call to this function, the leaf parameter passed in + MUST be the leaf directly following @leaf. + + Returns: + True iff @leaf is a part of the string's trailer. + """ + # We ignore empty LPAR or RPAR leaves. + if is_empty_par(leaf): + return True + + next_token = leaf.type + if next_token == token.LPAR: + self._unmatched_lpars += 1 + + current_state = self._state + + # The LPAR parser state is a special case. We will return True until we + # find the matching RPAR token. + if current_state == self.LPAR: + if next_token == token.RPAR: + self._unmatched_lpars -= 1 + if self._unmatched_lpars == 0: + self._state = self.RPAR + # Otherwise, we use a lookup table to determine the next state. + else: + # If the lookup table matches the current state to the next + # token, we use the lookup table. + if (current_state, next_token) in self._goto: + self._state = self._goto[current_state, next_token] + else: + # Otherwise, we check if a the current state was assigned a + # default. + if (current_state, self.DEFAULT_TOKEN) in self._goto: + self._state = self._goto[current_state, self.DEFAULT_TOKEN] + # If no default has been assigned, then this parser has a logic + # error. + else: + raise RuntimeError(f"{self.__class__.__name__} LOGIC ERROR!") + + if self._state == self.DONE: + return False + + return True + + +def insert_str_child_factory(string_leaf: Leaf) -> Callable[[LN], None]: + """ + Factory for a convenience function that is used to orphan @string_leaf + and then insert multiple new leaves into the same part of the node + structure that @string_leaf had originally occupied. + + Examples: + Let `string_leaf = Leaf(token.STRING, '"foo"')` and `N = + string_leaf.parent`. Assume the node `N` has the following + original structure: + + Node( + expr_stmt, [ + Leaf(NAME, 'x'), + Leaf(EQUAL, '='), + Leaf(STRING, '"foo"'), + ] + ) + + We then run the code snippet shown below. + ``` + insert_str_child = insert_str_child_factory(string_leaf) + + lpar = Leaf(token.LPAR, '(') + insert_str_child(lpar) + + bar = Leaf(token.STRING, '"bar"') + insert_str_child(bar) + + rpar = Leaf(token.RPAR, ')') + insert_str_child(rpar) + ``` + + After which point, it follows that `string_leaf.parent is None` and + the node `N` now has the following structure: + + Node( + expr_stmt, [ + Leaf(NAME, 'x'), + Leaf(EQUAL, '='), + Leaf(LPAR, '('), + Leaf(STRING, '"bar"'), + Leaf(RPAR, ')'), + ] + ) + """ + string_parent = string_leaf.parent + string_child_idx = string_leaf.remove() + + def insert_str_child(child: LN) -> None: + nonlocal string_child_idx + + assert string_parent is not None + assert string_child_idx is not None + + string_parent.insert_child(string_child_idx, child) + string_child_idx += 1 + + return insert_str_child + + +def is_valid_index_factory(seq: Sequence[Any]) -> Callable[[int], bool]: + """ + Examples: + ``` + my_list = [1, 2, 3] + + is_valid_index = is_valid_index_factory(my_list) + + assert is_valid_index(0) + assert is_valid_index(2) + + assert not is_valid_index(3) + assert not is_valid_index(-1) + ``` + """ + + def is_valid_index(idx: int) -> bool: + """ + Returns: + True iff @idx is positive AND seq[@idx] does NOT raise an + IndexError. + """ + return 0 <= idx < len(seq) + + return is_valid_index diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blackd/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/blackd/__init__.py new file mode 100644 index 000000000..7041671f5 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/blackd/__init__.py @@ -0,0 +1,259 @@ +import asyncio +import logging +from concurrent.futures import Executor, ProcessPoolExecutor +from datetime import datetime, timezone +from functools import partial +from multiprocessing import freeze_support +from typing import Set, Tuple + +try: + from aiohttp import web + from multidict import MultiMapping + + from .middlewares import cors +except ImportError as ie: + raise ImportError( + f"aiohttp dependency is not installed: {ie}. " + + "Please re-install black with the '[d]' extra install " + + "to obtain aiohttp_cors: `pip install black[d]`" + ) from None + +import click + +import black +from _black_version import version as __version__ +from black.concurrency import maybe_install_uvloop + +# This is used internally by tests to shut down the server prematurely +_stop_signal = asyncio.Event() + +# Request headers +PROTOCOL_VERSION_HEADER = "X-Protocol-Version" +LINE_LENGTH_HEADER = "X-Line-Length" +PYTHON_VARIANT_HEADER = "X-Python-Variant" +SKIP_SOURCE_FIRST_LINE = "X-Skip-Source-First-Line" +SKIP_STRING_NORMALIZATION_HEADER = "X-Skip-String-Normalization" +SKIP_MAGIC_TRAILING_COMMA = "X-Skip-Magic-Trailing-Comma" +PREVIEW = "X-Preview" +UNSTABLE = "X-Unstable" +ENABLE_UNSTABLE_FEATURE = "X-Enable-Unstable-Feature" +FAST_OR_SAFE_HEADER = "X-Fast-Or-Safe" +DIFF_HEADER = "X-Diff" + +BLACK_HEADERS = [ + PROTOCOL_VERSION_HEADER, + LINE_LENGTH_HEADER, + PYTHON_VARIANT_HEADER, + SKIP_SOURCE_FIRST_LINE, + SKIP_STRING_NORMALIZATION_HEADER, + SKIP_MAGIC_TRAILING_COMMA, + PREVIEW, + UNSTABLE, + ENABLE_UNSTABLE_FEATURE, + FAST_OR_SAFE_HEADER, + DIFF_HEADER, +] + +# Response headers +BLACK_VERSION_HEADER = "X-Black-Version" + + +class HeaderError(Exception): + pass + + +class InvalidVariantHeader(Exception): + pass + + +@click.command(context_settings={"help_option_names": ["-h", "--help"]}) +@click.option( + "--bind-host", + type=str, + help="Address to bind the server to.", + default="localhost", + show_default=True, +) +@click.option( + "--bind-port", type=int, help="Port to listen on", default=45484, show_default=True +) +@click.version_option(version=black.__version__) +def main(bind_host: str, bind_port: int) -> None: + logging.basicConfig(level=logging.INFO) + app = make_app() + ver = black.__version__ + black.out(f"blackd version {ver} listening on {bind_host} port {bind_port}") + web.run_app(app, host=bind_host, port=bind_port, handle_signals=True, print=None) + + +def make_app() -> web.Application: + app = web.Application( + middlewares=[cors(allow_headers=(*BLACK_HEADERS, "Content-Type"))] + ) + executor = ProcessPoolExecutor() + app.add_routes([web.post("/", partial(handle, executor=executor))]) + return app + + +async def handle(request: web.Request, executor: Executor) -> web.Response: + headers = {BLACK_VERSION_HEADER: __version__} + try: + if request.headers.get(PROTOCOL_VERSION_HEADER, "1") != "1": + return web.Response( + status=501, text="This server only supports protocol version 1" + ) + + fast = False + if request.headers.get(FAST_OR_SAFE_HEADER, "safe") == "fast": + fast = True + try: + mode = parse_mode(request.headers) + except HeaderError as e: + return web.Response(status=400, text=e.args[0]) + req_bytes = await request.content.read() + charset = request.charset if request.charset is not None else "utf8" + req_str = req_bytes.decode(charset) + then = datetime.now(timezone.utc) + + header = "" + if mode.skip_source_first_line: + first_newline_position: int = req_str.find("\n") + 1 + header = req_str[:first_newline_position] + req_str = req_str[first_newline_position:] + + loop = asyncio.get_event_loop() + formatted_str = await loop.run_in_executor( + executor, partial(black.format_file_contents, req_str, fast=fast, mode=mode) + ) + + # Preserve CRLF line endings + nl = req_str.find("\n") + if nl > 0 and req_str[nl - 1] == "\r": + formatted_str = formatted_str.replace("\n", "\r\n") + # If, after swapping line endings, nothing changed, then say so + if formatted_str == req_str: + raise black.NothingChanged + + # Put the source first line back + req_str = header + req_str + formatted_str = header + formatted_str + + # Only output the diff in the HTTP response + only_diff = bool(request.headers.get(DIFF_HEADER, False)) + if only_diff: + now = datetime.now(timezone.utc) + src_name = f"In\t{then}" + dst_name = f"Out\t{now}" + loop = asyncio.get_event_loop() + formatted_str = await loop.run_in_executor( + executor, + partial(black.diff, req_str, formatted_str, src_name, dst_name), + ) + + return web.Response( + content_type=request.content_type, + charset=charset, + headers=headers, + text=formatted_str, + ) + except black.NothingChanged: + return web.Response(status=204, headers=headers) + except black.InvalidInput as e: + return web.Response(status=400, headers=headers, text=str(e)) + except Exception as e: + logging.exception("Exception during handling a request") + return web.Response(status=500, headers=headers, text=str(e)) + + +def parse_mode(headers: MultiMapping[str]) -> black.Mode: + try: + line_length = int(headers.get(LINE_LENGTH_HEADER, black.DEFAULT_LINE_LENGTH)) + except ValueError: + raise HeaderError("Invalid line length header value") from None + + if PYTHON_VARIANT_HEADER in headers: + value = headers[PYTHON_VARIANT_HEADER] + try: + pyi, versions = parse_python_variant_header(value) + except InvalidVariantHeader as e: + raise HeaderError( + f"Invalid value for {PYTHON_VARIANT_HEADER}: {e.args[0]}", + ) from None + else: + pyi = False + versions = set() + + skip_string_normalization = bool( + headers.get(SKIP_STRING_NORMALIZATION_HEADER, False) + ) + skip_magic_trailing_comma = bool(headers.get(SKIP_MAGIC_TRAILING_COMMA, False)) + skip_source_first_line = bool(headers.get(SKIP_SOURCE_FIRST_LINE, False)) + + preview = bool(headers.get(PREVIEW, False)) + unstable = bool(headers.get(UNSTABLE, False)) + enable_features: Set[black.Preview] = set() + enable_unstable_features = headers.get(ENABLE_UNSTABLE_FEATURE, "").split(",") + for piece in enable_unstable_features: + piece = piece.strip() + if piece: + try: + enable_features.add(black.Preview[piece]) + except KeyError: + raise HeaderError( + f"Invalid value for {ENABLE_UNSTABLE_FEATURE}: {piece}", + ) from None + + return black.FileMode( + target_versions=versions, + is_pyi=pyi, + line_length=line_length, + skip_source_first_line=skip_source_first_line, + string_normalization=not skip_string_normalization, + magic_trailing_comma=not skip_magic_trailing_comma, + preview=preview, + unstable=unstable, + enabled_features=enable_features, + ) + + +def parse_python_variant_header(value: str) -> Tuple[bool, Set[black.TargetVersion]]: + if value == "pyi": + return True, set() + else: + versions = set() + for version in value.split(","): + if version.startswith("py"): + version = version[len("py") :] + if "." in version: + major_str, *rest = version.split(".") + else: + major_str = version[0] + rest = [version[1:]] if len(version) > 1 else [] + try: + major = int(major_str) + if major not in (2, 3): + raise InvalidVariantHeader("major version must be 2 or 3") + if len(rest) > 0: + minor = int(rest[0]) + if major == 2: + raise InvalidVariantHeader("Python 2 is not supported") + else: + # Default to lowest supported minor version. + minor = 7 if major == 2 else 3 + version_str = f"PY{major}{minor}" + if major == 3 and not hasattr(black.TargetVersion, version_str): + raise InvalidVariantHeader(f"3.{minor} is not supported") + versions.add(black.TargetVersion[version_str]) + except (KeyError, ValueError): + raise InvalidVariantHeader("expected e.g. '3.7', 'py3.5'") from None + return False, versions + + +def patched_main() -> None: + maybe_install_uvloop() + freeze_support() + main() + + +if __name__ == "__main__": + patched_main() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blackd/__main__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/blackd/__main__.py new file mode 100644 index 000000000..b5a4b1374 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/blackd/__main__.py @@ -0,0 +1,3 @@ +import blackd + +blackd.patched_main() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blackd/middlewares.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/blackd/middlewares.py new file mode 100644 index 000000000..370e0ae22 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/blackd/middlewares.py @@ -0,0 +1,45 @@ +from typing import TYPE_CHECKING, Any, Awaitable, Callable, Iterable, TypeVar + +from aiohttp.web_request import Request +from aiohttp.web_response import StreamResponse + +if TYPE_CHECKING: + F = TypeVar("F", bound=Callable[..., Any]) + middleware: Callable[[F], F] +else: + try: + from aiohttp.web_middlewares import middleware + except ImportError: + # @middleware is deprecated and its behaviour is the default since aiohttp 4.0 + # so if it doesn't exist anymore, define a no-op for forward compatibility. + middleware = lambda x: x # noqa: E731 + +Handler = Callable[[Request], Awaitable[StreamResponse]] +Middleware = Callable[[Request, Handler], Awaitable[StreamResponse]] + + +def cors(allow_headers: Iterable[str]) -> Middleware: + @middleware + async def impl(request: Request, handler: Handler) -> StreamResponse: + is_options = request.method == "OPTIONS" + is_preflight = is_options and "Access-Control-Request-Method" in request.headers + if is_preflight: + resp = StreamResponse() + else: + resp = await handler(request) + + origin = request.headers.get("Origin") + if not origin: + return resp + + resp.headers["Access-Control-Allow-Origin"] = "*" + resp.headers["Access-Control-Expose-Headers"] = "*" + if is_options: + resp.headers["Access-Control-Allow-Headers"] = ", ".join(allow_headers) + resp.headers["Access-Control-Allow-Methods"] = ", ".join( + ("OPTIONS", "POST") + ) + + return resp + + return impl diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/Grammar.txt b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/Grammar.txt new file mode 100644 index 000000000..c8800e219 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/Grammar.txt @@ -0,0 +1,261 @@ +# Grammar for 2to3. This grammar supports Python 2.x and 3.x. + +# NOTE WELL: You should also follow all the steps listed at +# https://devguide.python.org/grammar/ + +# Start symbols for the grammar: +# file_input is a module or sequence of commands read from an input file; +# single_input is a single interactive statement; +# eval_input is the input for the eval() and input() functions. +# NB: compound_stmt in single_input is followed by extra NEWLINE! +file_input: (NEWLINE | stmt)* ENDMARKER +single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE +eval_input: testlist NEWLINE* ENDMARKER + +typevar: NAME [':' expr] ['=' expr] +paramspec: '**' NAME ['=' expr] +typevartuple: '*' NAME ['=' (expr|star_expr)] +typeparam: typevar | paramspec | typevartuple +typeparams: '[' typeparam (',' typeparam)* [','] ']' + +decorator: '@' namedexpr_test NEWLINE +decorators: decorator+ +decorated: decorators (classdef | funcdef | async_funcdef) +async_funcdef: ASYNC funcdef +funcdef: 'def' NAME [typeparams] parameters ['->' test] ':' suite +parameters: '(' [typedargslist] ')' + +# The following definition for typedarglist is equivalent to this set of rules: +# +# arguments = argument (',' argument)* +# argument = tfpdef ['=' test] +# kwargs = '**' tname [','] +# args = '*' [tname_star] +# kwonly_kwargs = (',' argument)* [',' [kwargs]] +# args_kwonly_kwargs = args kwonly_kwargs | kwargs +# poskeyword_args_kwonly_kwargs = arguments [',' [args_kwonly_kwargs]] +# typedargslist_no_posonly = poskeyword_args_kwonly_kwargs | args_kwonly_kwargs +# typedarglist = arguments ',' '/' [',' [typedargslist_no_posonly]])|(typedargslist_no_posonly)" +# +# It needs to be fully expanded to allow our LL(1) parser to work on it. + +typedargslist: tfpdef ['=' test] (',' tfpdef ['=' test])* ',' '/' [ + ',' [((tfpdef ['=' test] ',')* ('*' [tname_star] (',' tname ['=' test])* + [',' ['**' tname [',']]] | '**' tname [',']) + | tfpdef ['=' test] (',' tfpdef ['=' test])* [','])] + ] | ((tfpdef ['=' test] ',')* ('*' [tname_star] (',' tname ['=' test])* + [',' ['**' tname [',']]] | '**' tname [',']) + | tfpdef ['=' test] (',' tfpdef ['=' test])* [',']) + +tname: NAME [':' test] +tname_star: NAME [':' (test|star_expr)] +tfpdef: tname | '(' tfplist ')' +tfplist: tfpdef (',' tfpdef)* [','] + +# The following definition for varargslist is equivalent to this set of rules: +# +# arguments = argument (',' argument )* +# argument = vfpdef ['=' test] +# kwargs = '**' vname [','] +# args = '*' [vname] +# kwonly_kwargs = (',' argument )* [',' [kwargs]] +# args_kwonly_kwargs = args kwonly_kwargs | kwargs +# poskeyword_args_kwonly_kwargs = arguments [',' [args_kwonly_kwargs]] +# vararglist_no_posonly = poskeyword_args_kwonly_kwargs | args_kwonly_kwargs +# varargslist = arguments ',' '/' [','[(vararglist_no_posonly)]] | (vararglist_no_posonly) +# +# It needs to be fully expanded to allow our LL(1) parser to work on it. + +varargslist: vfpdef ['=' test ](',' vfpdef ['=' test])* ',' '/' [',' [ + ((vfpdef ['=' test] ',')* ('*' [vname] (',' vname ['=' test])* + [',' ['**' vname [',']]] | '**' vname [',']) + | vfpdef ['=' test] (',' vfpdef ['=' test])* [',']) + ]] | ((vfpdef ['=' test] ',')* + ('*' [vname] (',' vname ['=' test])* [',' ['**' vname [',']]]| '**' vname [',']) + | vfpdef ['=' test] (',' vfpdef ['=' test])* [',']) + +vname: NAME +vfpdef: vname | '(' vfplist ')' +vfplist: vfpdef (',' vfpdef)* [','] + +stmt: simple_stmt | compound_stmt +simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE +small_stmt: (type_stmt | expr_stmt | del_stmt | pass_stmt | flow_stmt | + import_stmt | global_stmt | assert_stmt) +expr_stmt: testlist_star_expr (annassign | augassign (yield_expr|testlist) | + ('=' (yield_expr|testlist_star_expr))*) +annassign: ':' test ['=' (yield_expr|testlist_star_expr)] +testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [','] +augassign: ('+=' | '-=' | '*=' | '@=' | '/=' | '%=' | '&=' | '|=' | '^=' | + '<<=' | '>>=' | '**=' | '//=') +# For normal and annotated assignments, additional restrictions enforced by the interpreter +del_stmt: 'del' exprlist +pass_stmt: 'pass' +flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt +break_stmt: 'break' +continue_stmt: 'continue' +return_stmt: 'return' [testlist_star_expr] +yield_stmt: yield_expr +raise_stmt: 'raise' [test ['from' test | ',' test [',' test]]] +import_stmt: import_name | import_from +import_name: 'import' dotted_as_names +import_from: ('from' ('.'* dotted_name | '.'+) + 'import' ('*' | '(' import_as_names ')' | import_as_names)) +import_as_name: NAME ['as' NAME] +dotted_as_name: dotted_name ['as' NAME] +import_as_names: import_as_name (',' import_as_name)* [','] +dotted_as_names: dotted_as_name (',' dotted_as_name)* +dotted_name: NAME ('.' NAME)* +global_stmt: ('global' | 'nonlocal') NAME (',' NAME)* +assert_stmt: 'assert' test [',' test] +type_stmt: "type" NAME [typeparams] '=' test + +compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt | match_stmt +async_stmt: ASYNC (funcdef | with_stmt | for_stmt) +if_stmt: 'if' namedexpr_test ':' suite ('elif' namedexpr_test ':' suite)* ['else' ':' suite] +while_stmt: 'while' namedexpr_test ':' suite ['else' ':' suite] +for_stmt: 'for' exprlist 'in' testlist_star_expr ':' suite ['else' ':' suite] +try_stmt: ('try' ':' suite + ((except_clause ':' suite)+ + ['else' ':' suite] + ['finally' ':' suite] | + 'finally' ':' suite)) +with_stmt: 'with' asexpr_test (',' asexpr_test)* ':' suite + +# NB compile.c makes sure that the default except clause is last +except_clause: 'except' ['*'] [test [(',' | 'as') test]] +suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT + +# Backward compatibility cruft to support: +# [ x for x in lambda: True, lambda: False if x() ] +# even while also allowing: +# lambda x: 5 if x else 2 +# (But not a mix of the two) +testlist_safe: old_test [(',' old_test)+ [',']] +old_test: or_test | old_lambdef +old_lambdef: 'lambda' [varargslist] ':' old_test + +namedexpr_test: asexpr_test [':=' asexpr_test] + +# This is actually not a real rule, though since the parser is very +# limited in terms of the strategy about match/case rules, we are inserting +# a virtual case ( as ) as a valid expression. Unless a better +# approach is thought, the only side effect of this seem to be just allowing +# more stuff to be parser (which would fail on the ast). +asexpr_test: test ['as' test] + +test: or_test ['if' or_test 'else' test] | lambdef +or_test: and_test ('or' and_test)* +and_test: not_test ('and' not_test)* +not_test: 'not' not_test | comparison +comparison: expr (comp_op expr)* +comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not' +star_expr: '*' expr +expr: xor_expr ('|' xor_expr)* +xor_expr: and_expr ('^' and_expr)* +and_expr: shift_expr ('&' shift_expr)* +shift_expr: arith_expr (('<<'|'>>') arith_expr)* +arith_expr: term (('+'|'-') term)* +term: factor (('*'|'@'|'/'|'%'|'//') factor)* +factor: ('+'|'-'|'~') factor | power +power: [AWAIT] atom trailer* ['**' factor] +atom: ('(' [yield_expr|testlist_gexp] ')' | + '[' [listmaker] ']' | + '{' [dictsetmaker] '}' | + '`' testlist1 '`' | + NAME | NUMBER | (STRING | fstring)+ | '.' '.' '.') +listmaker: (namedexpr_test|star_expr) ( old_comp_for | (',' (namedexpr_test|star_expr))* [','] ) +testlist_gexp: (namedexpr_test|star_expr) ( old_comp_for | (',' (namedexpr_test|star_expr))* [','] ) +lambdef: 'lambda' [varargslist] ':' test +trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME +subscriptlist: (subscript|star_expr) (',' (subscript|star_expr))* [','] +subscript: test [':=' test] | [test] ':' [test] [sliceop] +sliceop: ':' [test] +exprlist: (expr|star_expr) (',' (expr|star_expr))* [','] +testlist: test (',' test)* [','] +dictsetmaker: ( ((test ':' asexpr_test | '**' expr) + (comp_for | (',' (test ':' asexpr_test | '**' expr))* [','])) | + ((test [':=' test] | star_expr) + (comp_for | (',' (test [':=' test] | star_expr))* [','])) ) + +classdef: 'class' NAME [typeparams] ['(' [arglist] ')'] ':' suite + +arglist: argument (',' argument)* [','] + +# "test '=' test" is really "keyword '=' test", but we have no such token. +# These need to be in a single rule to avoid grammar that is ambiguous +# to our LL(1) parser. Even though 'test' includes '*expr' in star_expr, +# we explicitly match '*' here, too, to give it proper precedence. +# Illegal combinations and orderings are blocked in ast.c: +# multiple (test comp_for) arguments are blocked; keyword unpackings +# that precede iterable unpackings are blocked; etc. +argument: ( test [comp_for] | + test ':=' test [comp_for] | + test 'as' test | + test '=' asexpr_test | + '**' test | + '*' test ) + +comp_iter: comp_for | comp_if +comp_for: [ASYNC] 'for' exprlist 'in' or_test [comp_iter] +comp_if: 'if' old_test [comp_iter] + +# As noted above, testlist_safe extends the syntax allowed in list +# comprehensions and generators. We can't use it indiscriminately in all +# derivations using a comp_for-like pattern because the testlist_safe derivation +# contains comma which clashes with trailing comma in arglist. +# +# This was an issue because the parser would not follow the correct derivation +# when parsing syntactically valid Python code. Since testlist_safe was created +# specifically to handle list comprehensions and generator expressions enclosed +# with parentheses, it's safe to only use it in those. That avoids the issue; we +# can parse code like set(x for x in [],). +# +# The syntax supported by this set of rules is not a valid Python 3 syntax, +# hence the prefix "old". +# +# See https://bugs.python.org/issue27494 +old_comp_iter: old_comp_for | old_comp_if +old_comp_for: [ASYNC] 'for' exprlist 'in' testlist_safe [old_comp_iter] +old_comp_if: 'if' old_test [old_comp_iter] + +testlist1: test (',' test)* + +# not used in grammar, but may appear in "node" passed from Parser to Compiler +encoding_decl: NAME + +yield_expr: 'yield' [yield_arg] +yield_arg: 'from' test | testlist_star_expr + + +# 3.10 match statement definition + +# PS: normally the grammar is much much more restricted, but +# at this moment for not trying to bother much with encoding the +# exact same DSL in a LL(1) parser, we will just accept an expression +# and let the ast.parse() step of the safe mode to reject invalid +# grammar. + +# The reason why it is more restricted is that, patterns are some +# sort of a DSL (more advanced than our LHS on assignments, but +# still in a very limited python subset). They are not really +# expressions, but who cares. If we can parse them, that is enough +# to reformat them. + +match_stmt: "match" subject_expr ':' NEWLINE INDENT case_block+ DEDENT + +# This is more permissive than the actual version. For example it +# accepts `match *something:`, even though single-item starred expressions +# are forbidden. +subject_expr: (namedexpr_test|star_expr) (',' (namedexpr_test|star_expr))* [','] + +# cases +case_block: "case" patterns [guard] ':' suite +guard: 'if' namedexpr_test +patterns: pattern (',' pattern)* [','] +pattern: (expr|star_expr) ['as' expr] + +fstring: FSTRING_START fstring_middle* FSTRING_END +fstring_middle: fstring_replacement_field | FSTRING_MIDDLE +fstring_replacement_field: '{' (yield_expr | testlist_star_expr) ['='] [ "!" NAME ] [ ':' fstring_format_spec* ] '}' +fstring_format_spec: FSTRING_MIDDLE | fstring_replacement_field diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/LICENSE b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/LICENSE new file mode 100644 index 000000000..ef8df0698 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/LICENSE @@ -0,0 +1,254 @@ +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see https://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations, which became +Zope Corporation. In 2001, the Python Software Foundation (PSF, see +https://www.python.org/psf/) was formed, a non-profit organization +created specifically to own Python-related Intellectual Property. +Zope Corporation was a sponsoring member of the PSF. + +All Python releases are Open Source (see https://opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2 and above 2.1.1 2001-now PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018 Python Software Foundation; All +Rights Reserved" are retained in Python alone or in any derivative version +prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the Internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the Internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/PatternGrammar.txt b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/PatternGrammar.txt new file mode 100644 index 000000000..36bf81482 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/PatternGrammar.txt @@ -0,0 +1,28 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +# A grammar to describe tree matching patterns. +# Not shown here: +# - 'TOKEN' stands for any token (leaf node) +# - 'any' stands for any node (leaf or interior) +# With 'any' we can still specify the sub-structure. + +# The start symbol is 'Matcher'. + +Matcher: Alternatives ENDMARKER + +Alternatives: Alternative ('|' Alternative)* + +Alternative: (Unit | NegatedUnit)+ + +Unit: [NAME '='] ( STRING [Repeater] + | NAME [Details] [Repeater] + | '(' Alternatives ')' [Repeater] + | '[' Alternatives ']' + ) + +NegatedUnit: 'not' (STRING | NAME [Details] | '(' Alternatives ')') + +Repeater: '*' | '+' | '{' NUMBER [',' NUMBER] '}' + +Details: '<' Alternatives '>' diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/README b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/README new file mode 100644 index 000000000..38b04158d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/README @@ -0,0 +1,24 @@ +A subset of lib2to3 taken from Python 3.7.0b2. Commit hash: +9c17e3a1987004b8bcfbe423953aad84493a7984 + +Reasons for forking: + +- consistent handling of f-strings for users of Python < 3.6.2 +- backport of BPO-33064 that fixes parsing files with trailing commas after \*args and + \*\*kwargs +- backport of GH-6143 that restores the ability to reformat legacy usage of `async` +- support all types of string literals +- better ability to debug (better reprs) +- INDENT and DEDENT don't hold whitespace and comment prefixes +- ability to Cythonize + +Change Log: + +- Changes default logger used by Driver +- Backported the following upstream parser changes: + - "bpo-42381: Allow walrus in set literals and set comprehensions (GH-23332)" + https://github.com/python/cpython/commit/cae60187cf7a7b26281d012e1952fafe4e2e97e9 + - "bpo-42316: Allow unparenthesized walrus operator in indexes (GH-23317)" + https://github.com/python/cpython/commit/b0aba1fcdc3da952698d99aec2334faa79a8b68c +- Tweaks to help mypyc compile faster code (including inlining type information, + "Final-ing", etc.) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/__init__.py new file mode 100644 index 000000000..1bb8bf6d7 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/__init__.py @@ -0,0 +1 @@ +# empty diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/__init__.py new file mode 100644 index 000000000..af3904845 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/__init__.py @@ -0,0 +1,4 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""The pgen2 package.""" diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/conv.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/conv.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..b331606ee Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/conv.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/conv.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/conv.py new file mode 100644 index 000000000..04eccfa1d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/conv.py @@ -0,0 +1,256 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +# mypy: ignore-errors + +"""Convert graminit.[ch] spit out by pgen to Python code. + +Pgen is the Python parser generator. It is useful to quickly create a +parser from a grammar file in Python's grammar notation. But I don't +want my parsers to be written in C (yet), so I'm translating the +parsing tables to Python data structures and writing a Python parse +engine. + +Note that the token numbers are constants determined by the standard +Python tokenizer. The standard token module defines these numbers and +their names (the names are not used much). The token numbers are +hardcoded into the Python tokenizer and into pgen. A Python +implementation of the Python tokenizer is also available, in the +standard tokenize module. + +On the other hand, symbol numbers (representing the grammar's +non-terminals) are assigned by pgen based on the actual grammar +input. + +Note: this module is pretty much obsolete; the pgen module generates +equivalent grammar tables directly from the Grammar.txt input file +without having to invoke the Python pgen C program. + +""" + +# Python imports +import re + +# Local imports +from pgen2 import grammar, token + + +class Converter(grammar.Grammar): + """Grammar subclass that reads classic pgen output files. + + The run() method reads the tables as produced by the pgen parser + generator, typically contained in two C files, graminit.h and + graminit.c. The other methods are for internal use only. + + See the base class for more documentation. + + """ + + def run(self, graminit_h, graminit_c): + """Load the grammar tables from the text files written by pgen.""" + self.parse_graminit_h(graminit_h) + self.parse_graminit_c(graminit_c) + self.finish_off() + + def parse_graminit_h(self, filename): + """Parse the .h file written by pgen. (Internal) + + This file is a sequence of #define statements defining the + nonterminals of the grammar as numbers. We build two tables + mapping the numbers to names and back. + + """ + try: + f = open(filename) + except OSError as err: + print(f"Can't open {filename}: {err}") + return False + self.symbol2number = {} + self.number2symbol = {} + lineno = 0 + for line in f: + lineno += 1 + mo = re.match(r"^#define\s+(\w+)\s+(\d+)$", line) + if not mo and line.strip(): + print(f"{filename}({lineno}): can't parse {line.strip()}") + else: + symbol, number = mo.groups() + number = int(number) + assert symbol not in self.symbol2number + assert number not in self.number2symbol + self.symbol2number[symbol] = number + self.number2symbol[number] = symbol + return True + + def parse_graminit_c(self, filename): + """Parse the .c file written by pgen. (Internal) + + The file looks as follows. The first two lines are always this: + + #include "pgenheaders.h" + #include "grammar.h" + + After that come four blocks: + + 1) one or more state definitions + 2) a table defining dfas + 3) a table defining labels + 4) a struct defining the grammar + + A state definition has the following form: + - one or more arc arrays, each of the form: + static arc arcs__[] = { + {, }, + ... + }; + - followed by a state array, of the form: + static state states_[] = { + {, arcs__}, + ... + }; + + """ + try: + f = open(filename) + except OSError as err: + print(f"Can't open {filename}: {err}") + return False + # The code below essentially uses f's iterator-ness! + lineno = 0 + + # Expect the two #include lines + lineno, line = lineno + 1, next(f) + assert line == '#include "pgenheaders.h"\n', (lineno, line) + lineno, line = lineno + 1, next(f) + assert line == '#include "grammar.h"\n', (lineno, line) + + # Parse the state definitions + lineno, line = lineno + 1, next(f) + allarcs = {} + states = [] + while line.startswith("static arc "): + while line.startswith("static arc "): + mo = re.match(r"static arc arcs_(\d+)_(\d+)\[(\d+)\] = {$", line) + assert mo, (lineno, line) + n, m, k = list(map(int, mo.groups())) + arcs = [] + for _ in range(k): + lineno, line = lineno + 1, next(f) + mo = re.match(r"\s+{(\d+), (\d+)},$", line) + assert mo, (lineno, line) + i, j = list(map(int, mo.groups())) + arcs.append((i, j)) + lineno, line = lineno + 1, next(f) + assert line == "};\n", (lineno, line) + allarcs[(n, m)] = arcs + lineno, line = lineno + 1, next(f) + mo = re.match(r"static state states_(\d+)\[(\d+)\] = {$", line) + assert mo, (lineno, line) + s, t = list(map(int, mo.groups())) + assert s == len(states), (lineno, line) + state = [] + for _ in range(t): + lineno, line = lineno + 1, next(f) + mo = re.match(r"\s+{(\d+), arcs_(\d+)_(\d+)},$", line) + assert mo, (lineno, line) + k, n, m = list(map(int, mo.groups())) + arcs = allarcs[n, m] + assert k == len(arcs), (lineno, line) + state.append(arcs) + states.append(state) + lineno, line = lineno + 1, next(f) + assert line == "};\n", (lineno, line) + lineno, line = lineno + 1, next(f) + self.states = states + + # Parse the dfas + dfas = {} + mo = re.match(r"static dfa dfas\[(\d+)\] = {$", line) + assert mo, (lineno, line) + ndfas = int(mo.group(1)) + for i in range(ndfas): + lineno, line = lineno + 1, next(f) + mo = re.match(r'\s+{(\d+), "(\w+)", (\d+), (\d+), states_(\d+),$', line) + assert mo, (lineno, line) + symbol = mo.group(2) + number, x, y, z = list(map(int, mo.group(1, 3, 4, 5))) + assert self.symbol2number[symbol] == number, (lineno, line) + assert self.number2symbol[number] == symbol, (lineno, line) + assert x == 0, (lineno, line) + state = states[z] + assert y == len(state), (lineno, line) + lineno, line = lineno + 1, next(f) + mo = re.match(r'\s+("(?:\\\d\d\d)*")},$', line) + assert mo, (lineno, line) + first = {} + rawbitset = eval(mo.group(1)) + for i, c in enumerate(rawbitset): + byte = ord(c) + for j in range(8): + if byte & (1 << j): + first[i * 8 + j] = 1 + dfas[number] = (state, first) + lineno, line = lineno + 1, next(f) + assert line == "};\n", (lineno, line) + self.dfas = dfas + + # Parse the labels + labels = [] + lineno, line = lineno + 1, next(f) + mo = re.match(r"static label labels\[(\d+)\] = {$", line) + assert mo, (lineno, line) + nlabels = int(mo.group(1)) + for i in range(nlabels): + lineno, line = lineno + 1, next(f) + mo = re.match(r'\s+{(\d+), (0|"\w+")},$', line) + assert mo, (lineno, line) + x, y = mo.groups() + x = int(x) + if y == "0": + y = None + else: + y = eval(y) + labels.append((x, y)) + lineno, line = lineno + 1, next(f) + assert line == "};\n", (lineno, line) + self.labels = labels + + # Parse the grammar struct + lineno, line = lineno + 1, next(f) + assert line == "grammar _PyParser_Grammar = {\n", (lineno, line) + lineno, line = lineno + 1, next(f) + mo = re.match(r"\s+(\d+),$", line) + assert mo, (lineno, line) + ndfas = int(mo.group(1)) + assert ndfas == len(self.dfas) + lineno, line = lineno + 1, next(f) + assert line == "\tdfas,\n", (lineno, line) + lineno, line = lineno + 1, next(f) + mo = re.match(r"\s+{(\d+), labels},$", line) + assert mo, (lineno, line) + nlabels = int(mo.group(1)) + assert nlabels == len(self.labels), (lineno, line) + lineno, line = lineno + 1, next(f) + mo = re.match(r"\s+(\d+)$", line) + assert mo, (lineno, line) + start = int(mo.group(1)) + assert start in self.number2symbol, (lineno, line) + self.start = start + lineno, line = lineno + 1, next(f) + assert line == "};\n", (lineno, line) + try: + lineno, line = lineno + 1, next(f) + except StopIteration: + pass + else: + assert 0, (lineno, line) + + def finish_off(self): + """Create additional useful structures. (Internal).""" + self.keywords = {} # map from keyword strings to arc labels + self.tokens = {} # map from numeric token values to arc labels + for ilabel, (type, value) in enumerate(self.labels): + if type == token.NAME and value is not None: + self.keywords[value] = ilabel + elif value is None: + self.tokens[type] = ilabel diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/driver.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/driver.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..1f18b07ba Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/driver.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/driver.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/driver.py new file mode 100644 index 000000000..71a147cbc --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/driver.py @@ -0,0 +1,318 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +# Modifications: +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Parser driver. + +This provides a high-level interface to parse a file into a syntax tree. + +""" + +__author__ = "Guido van Rossum " + +__all__ = ["Driver", "load_grammar"] + +# Python imports +import io +import logging +import os +import pkgutil +import sys +from contextlib import contextmanager +from dataclasses import dataclass, field +from logging import Logger +from typing import IO, Any, Iterable, Iterator, List, Optional, Tuple, Union, cast + +from blib2to3.pgen2.grammar import Grammar +from blib2to3.pgen2.tokenize import GoodTokenInfo +from blib2to3.pytree import NL + +# Pgen imports +from . import grammar, parse, pgen, token, tokenize + +Path = Union[str, "os.PathLike[str]"] + + +@dataclass +class ReleaseRange: + start: int + end: Optional[int] = None + tokens: List[Any] = field(default_factory=list) + + def lock(self) -> None: + total_eaten = len(self.tokens) + self.end = self.start + total_eaten + + +class TokenProxy: + def __init__(self, generator: Any) -> None: + self._tokens = generator + self._counter = 0 + self._release_ranges: List[ReleaseRange] = [] + + @contextmanager + def release(self) -> Iterator["TokenProxy"]: + release_range = ReleaseRange(self._counter) + self._release_ranges.append(release_range) + try: + yield self + finally: + # Lock the last release range to the final position that + # has been eaten. + release_range.lock() + + def eat(self, point: int) -> Any: + eaten_tokens = self._release_ranges[-1].tokens + if point < len(eaten_tokens): + return eaten_tokens[point] + else: + while point >= len(eaten_tokens): + token = next(self._tokens) + eaten_tokens.append(token) + return token + + def __iter__(self) -> "TokenProxy": + return self + + def __next__(self) -> Any: + # If the current position is already compromised (looked up) + # return the eaten token, if not just go further on the given + # token producer. + for release_range in self._release_ranges: + assert release_range.end is not None + + start, end = release_range.start, release_range.end + if start <= self._counter < end: + token = release_range.tokens[self._counter - start] + break + else: + token = next(self._tokens) + self._counter += 1 + return token + + def can_advance(self, to: int) -> bool: + # Try to eat, fail if it can't. The eat operation is cached + # so there won't be any additional cost of eating here + try: + self.eat(to) + except StopIteration: + return False + else: + return True + + +class Driver: + def __init__(self, grammar: Grammar, logger: Optional[Logger] = None) -> None: + self.grammar = grammar + if logger is None: + logger = logging.getLogger(__name__) + self.logger = logger + + def parse_tokens(self, tokens: Iterable[GoodTokenInfo], debug: bool = False) -> NL: + """Parse a series of tokens and return the syntax tree.""" + # XXX Move the prefix computation into a wrapper around tokenize. + proxy = TokenProxy(tokens) + + p = parse.Parser(self.grammar) + p.setup(proxy=proxy) + + lineno = 1 + column = 0 + indent_columns: List[int] = [] + type = value = start = end = line_text = None + prefix = "" + + for quintuple in proxy: + type, value, start, end, line_text = quintuple + if start != (lineno, column): + assert (lineno, column) <= start, ((lineno, column), start) + s_lineno, s_column = start + if lineno < s_lineno: + prefix += "\n" * (s_lineno - lineno) + lineno = s_lineno + column = 0 + if column < s_column: + prefix += line_text[column:s_column] + column = s_column + if type in (tokenize.COMMENT, tokenize.NL): + prefix += value + lineno, column = end + if value.endswith("\n"): + lineno += 1 + column = 0 + continue + if type == token.OP: + type = grammar.opmap[value] + if debug: + assert type is not None + self.logger.debug( + "%s %r (prefix=%r)", token.tok_name[type], value, prefix + ) + if type == token.INDENT: + indent_columns.append(len(value)) + _prefix = prefix + value + prefix = "" + value = "" + elif type == token.DEDENT: + _indent_col = indent_columns.pop() + prefix, _prefix = self._partially_consume_prefix(prefix, _indent_col) + if p.addtoken(cast(int, type), value, (prefix, start)): + if debug: + self.logger.debug("Stop.") + break + prefix = "" + if type in {token.INDENT, token.DEDENT}: + prefix = _prefix + lineno, column = end + # FSTRING_MIDDLE is the only token that can end with a newline, and + # `end` will point to the next line. For that case, don't increment lineno. + if value.endswith("\n") and type != token.FSTRING_MIDDLE: + lineno += 1 + column = 0 + else: + # We never broke out -- EOF is too soon (how can this happen???) + assert start is not None + raise parse.ParseError("incomplete input", type, value, (prefix, start)) + assert p.rootnode is not None + return p.rootnode + + def parse_stream_raw(self, stream: IO[str], debug: bool = False) -> NL: + """Parse a stream and return the syntax tree.""" + tokens = tokenize.generate_tokens(stream.readline, grammar=self.grammar) + return self.parse_tokens(tokens, debug) + + def parse_stream(self, stream: IO[str], debug: bool = False) -> NL: + """Parse a stream and return the syntax tree.""" + return self.parse_stream_raw(stream, debug) + + def parse_file( + self, filename: Path, encoding: Optional[str] = None, debug: bool = False + ) -> NL: + """Parse a file and return the syntax tree.""" + with open(filename, encoding=encoding) as stream: + return self.parse_stream(stream, debug) + + def parse_string(self, text: str, debug: bool = False) -> NL: + """Parse a string and return the syntax tree.""" + tokens = tokenize.generate_tokens( + io.StringIO(text).readline, grammar=self.grammar + ) + return self.parse_tokens(tokens, debug) + + def _partially_consume_prefix(self, prefix: str, column: int) -> Tuple[str, str]: + lines: List[str] = [] + current_line = "" + current_column = 0 + wait_for_nl = False + for char in prefix: + current_line += char + if wait_for_nl: + if char == "\n": + if current_line.strip() and current_column < column: + res = "".join(lines) + return res, prefix[len(res) :] + + lines.append(current_line) + current_line = "" + current_column = 0 + wait_for_nl = False + elif char in " \t": + current_column += 1 + elif char == "\n": + # unexpected empty line + current_column = 0 + elif char == "\f": + current_column = 0 + else: + # indent is finished + wait_for_nl = True + return "".join(lines), current_line + + +def _generate_pickle_name(gt: Path, cache_dir: Optional[Path] = None) -> str: + head, tail = os.path.splitext(gt) + if tail == ".txt": + tail = "" + name = head + tail + ".".join(map(str, sys.version_info)) + ".pickle" + if cache_dir: + return os.path.join(cache_dir, os.path.basename(name)) + else: + return name + + +def load_grammar( + gt: str = "Grammar.txt", + gp: Optional[str] = None, + save: bool = True, + force: bool = False, + logger: Optional[Logger] = None, +) -> Grammar: + """Load the grammar (maybe from a pickle).""" + if logger is None: + logger = logging.getLogger(__name__) + gp = _generate_pickle_name(gt) if gp is None else gp + if force or not _newer(gp, gt): + g: grammar.Grammar = pgen.generate_grammar(gt) + if save: + try: + g.dump(gp) + except OSError: + # Ignore error, caching is not vital. + pass + else: + g = grammar.Grammar() + g.load(gp) + return g + + +def _newer(a: str, b: str) -> bool: + """Inquire whether file a was written since file b.""" + if not os.path.exists(a): + return False + if not os.path.exists(b): + return True + return os.path.getmtime(a) >= os.path.getmtime(b) + + +def load_packaged_grammar( + package: str, grammar_source: str, cache_dir: Optional[Path] = None +) -> grammar.Grammar: + """Normally, loads a pickled grammar by doing + pkgutil.get_data(package, pickled_grammar) + where *pickled_grammar* is computed from *grammar_source* by adding the + Python version and using a ``.pickle`` extension. + + However, if *grammar_source* is an extant file, load_grammar(grammar_source) + is called instead. This facilitates using a packaged grammar file when needed + but preserves load_grammar's automatic regeneration behavior when possible. + + """ + if os.path.isfile(grammar_source): + gp = _generate_pickle_name(grammar_source, cache_dir) if cache_dir else None + return load_grammar(grammar_source, gp=gp) + pickled_name = _generate_pickle_name(os.path.basename(grammar_source), cache_dir) + data = pkgutil.get_data(package, pickled_name) + assert data is not None + g = grammar.Grammar() + g.loads(data) + return g + + +def main(*args: str) -> bool: + """Main program, when run as a script: produce grammar pickle files. + + Calls load_grammar for each argument, a path to a grammar text file. + """ + if not args: + args = tuple(sys.argv[1:]) + logging.basicConfig(level=logging.INFO, stream=sys.stdout, format="%(message)s") + for gt in args: + load_grammar(gt, save=True, force=True) + return True + + +if __name__ == "__main__": + sys.exit(int(not main())) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/grammar.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/grammar.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..8e2b5bb5f Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/grammar.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/grammar.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/grammar.py new file mode 100644 index 000000000..804db1ad9 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/grammar.py @@ -0,0 +1,228 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""This module defines the data structures used to represent a grammar. + +These are a bit arcane because they are derived from the data +structures used by Python's 'pgen' parser generator. + +There's also a table here mapping operators to their names in the +token module; the Python tokenize module reports all operators as the +fallback token code OP, but the parser needs the actual token code. + +""" + +# Python imports +import os +import pickle +import tempfile +from typing import Any, Dict, List, Optional, Tuple, TypeVar, Union + +# Local imports +from . import token + +_P = TypeVar("_P", bound="Grammar") +Label = Tuple[int, Optional[str]] +DFA = List[List[Tuple[int, int]]] +DFAS = Tuple[DFA, Dict[int, int]] +Path = Union[str, "os.PathLike[str]"] + + +class Grammar: + """Pgen parsing tables conversion class. + + Once initialized, this class supplies the grammar tables for the + parsing engine implemented by parse.py. The parsing engine + accesses the instance variables directly. The class here does not + provide initialization of the tables; several subclasses exist to + do this (see the conv and pgen modules). + + The load() method reads the tables from a pickle file, which is + much faster than the other ways offered by subclasses. The pickle + file is written by calling dump() (after loading the grammar + tables using a subclass). The report() method prints a readable + representation of the tables to stdout, for debugging. + + The instance variables are as follows: + + symbol2number -- a dict mapping symbol names to numbers. Symbol + numbers are always 256 or higher, to distinguish + them from token numbers, which are between 0 and + 255 (inclusive). + + number2symbol -- a dict mapping numbers to symbol names; + these two are each other's inverse. + + states -- a list of DFAs, where each DFA is a list of + states, each state is a list of arcs, and each + arc is a (i, j) pair where i is a label and j is + a state number. The DFA number is the index into + this list. (This name is slightly confusing.) + Final states are represented by a special arc of + the form (0, j) where j is its own state number. + + dfas -- a dict mapping symbol numbers to (DFA, first) + pairs, where DFA is an item from the states list + above, and first is a set of tokens that can + begin this grammar rule (represented by a dict + whose values are always 1). + + labels -- a list of (x, y) pairs where x is either a token + number or a symbol number, and y is either None + or a string; the strings are keywords. The label + number is the index in this list; label numbers + are used to mark state transitions (arcs) in the + DFAs. + + start -- the number of the grammar's start symbol. + + keywords -- a dict mapping keyword strings to arc labels. + + tokens -- a dict mapping token numbers to arc labels. + + """ + + def __init__(self) -> None: + self.symbol2number: Dict[str, int] = {} + self.number2symbol: Dict[int, str] = {} + self.states: List[DFA] = [] + self.dfas: Dict[int, DFAS] = {} + self.labels: List[Label] = [(0, "EMPTY")] + self.keywords: Dict[str, int] = {} + self.soft_keywords: Dict[str, int] = {} + self.tokens: Dict[int, int] = {} + self.symbol2label: Dict[str, int] = {} + self.version: Tuple[int, int] = (0, 0) + self.start = 256 + # Python 3.7+ parses async as a keyword, not an identifier + self.async_keywords = False + + def dump(self, filename: Path) -> None: + """Dump the grammar tables to a pickle file.""" + + # mypyc generates objects that don't have a __dict__, but they + # do have __getstate__ methods that will return an equivalent + # dictionary + if hasattr(self, "__dict__"): + d = self.__dict__ + else: + d = self.__getstate__() # type: ignore + + with tempfile.NamedTemporaryFile( + dir=os.path.dirname(filename), delete=False + ) as f: + pickle.dump(d, f, pickle.HIGHEST_PROTOCOL) + os.replace(f.name, filename) + + def _update(self, attrs: Dict[str, Any]) -> None: + for k, v in attrs.items(): + setattr(self, k, v) + + def load(self, filename: Path) -> None: + """Load the grammar tables from a pickle file.""" + with open(filename, "rb") as f: + d = pickle.load(f) + self._update(d) + + def loads(self, pkl: bytes) -> None: + """Load the grammar tables from a pickle bytes object.""" + self._update(pickle.loads(pkl)) + + def copy(self: _P) -> _P: + """ + Copy the grammar. + """ + new = self.__class__() + for dict_attr in ( + "symbol2number", + "number2symbol", + "dfas", + "keywords", + "soft_keywords", + "tokens", + "symbol2label", + ): + setattr(new, dict_attr, getattr(self, dict_attr).copy()) + new.labels = self.labels[:] + new.states = self.states[:] + new.start = self.start + new.version = self.version + new.async_keywords = self.async_keywords + return new + + def report(self) -> None: + """Dump the grammar tables to standard output, for debugging.""" + from pprint import pprint + + print("s2n") + pprint(self.symbol2number) + print("n2s") + pprint(self.number2symbol) + print("states") + pprint(self.states) + print("dfas") + pprint(self.dfas) + print("labels") + pprint(self.labels) + print("start", self.start) + + +# Map from operator to number (since tokenize doesn't do this) + +opmap_raw = """ +( LPAR +) RPAR +[ LSQB +] RSQB +: COLON +, COMMA +; SEMI ++ PLUS +- MINUS +* STAR +/ SLASH +| VBAR +& AMPER +< LESS +> GREATER += EQUAL +. DOT +% PERCENT +` BACKQUOTE +{ LBRACE +} RBRACE +@ AT +@= ATEQUAL +== EQEQUAL +!= NOTEQUAL +<> NOTEQUAL +<= LESSEQUAL +>= GREATEREQUAL +~ TILDE +^ CIRCUMFLEX +<< LEFTSHIFT +>> RIGHTSHIFT +** DOUBLESTAR ++= PLUSEQUAL +-= MINEQUAL +*= STAREQUAL +/= SLASHEQUAL +%= PERCENTEQUAL +&= AMPEREQUAL +|= VBAREQUAL +^= CIRCUMFLEXEQUAL +<<= LEFTSHIFTEQUAL +>>= RIGHTSHIFTEQUAL +**= DOUBLESTAREQUAL +// DOUBLESLASH +//= DOUBLESLASHEQUAL +-> RARROW +:= COLONEQUAL +! BANG +""" + +opmap = {} +for line in opmap_raw.splitlines(): + if line: + op, name = line.split() + opmap[op] = getattr(token, name) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/literals.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/literals.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..f14b03cde Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/literals.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/literals.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/literals.py new file mode 100644 index 000000000..53c0b8ac2 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/literals.py @@ -0,0 +1,66 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Safely evaluate Python string literals without using eval().""" + +import re +from typing import Dict, Match + +simple_escapes: Dict[str, str] = { + "a": "\a", + "b": "\b", + "f": "\f", + "n": "\n", + "r": "\r", + "t": "\t", + "v": "\v", + "'": "'", + '"': '"', + "\\": "\\", +} + + +def escape(m: Match[str]) -> str: + all, tail = m.group(0, 1) + assert all.startswith("\\") + esc = simple_escapes.get(tail) + if esc is not None: + return esc + if tail.startswith("x"): + hexes = tail[1:] + if len(hexes) < 2: + raise ValueError("invalid hex string escape ('\\%s')" % tail) + try: + i = int(hexes, 16) + except ValueError: + raise ValueError("invalid hex string escape ('\\%s')" % tail) from None + else: + try: + i = int(tail, 8) + except ValueError: + raise ValueError("invalid octal string escape ('\\%s')" % tail) from None + return chr(i) + + +def evalString(s: str) -> str: + assert s.startswith("'") or s.startswith('"'), repr(s[:1]) + q = s[0] + if s[:3] == q * 3: + q = q * 3 + assert s.endswith(q), repr(s[-len(q) :]) + assert len(s) >= 2 * len(q) + s = s[len(q) : -len(q)] + return re.sub(r"\\(\'|\"|\\|[abfnrtv]|x.{0,2}|[0-7]{1,3})", escape, s) + + +def test() -> None: + for i in range(256): + c = chr(i) + s = repr(c) + e = evalString(s) + if e != c: + print(i, c, s, e) + + +if __name__ == "__main__": + test() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/parse.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/parse.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..13b0b918a Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/parse.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/parse.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/parse.py new file mode 100644 index 000000000..ad1d795b5 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/parse.py @@ -0,0 +1,411 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Parser engine for the grammar tables generated by pgen. + +The grammar table must be loaded first. + +See Parser/parser.c in the Python distribution for additional info on +how this parsing engine works. + +""" +from contextlib import contextmanager +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Iterator, + List, + Optional, + Set, + Tuple, + Union, + cast, +) + +from blib2to3.pgen2.grammar import Grammar +from blib2to3.pytree import NL, Context, Leaf, Node, RawNode, convert + +# Local imports +from . import grammar, token, tokenize + +if TYPE_CHECKING: + from blib2to3.pgen2.driver import TokenProxy + + +Results = Dict[str, NL] +Convert = Callable[[Grammar, RawNode], Union[Node, Leaf]] +DFA = List[List[Tuple[int, int]]] +DFAS = Tuple[DFA, Dict[int, int]] + + +def lam_sub(grammar: Grammar, node: RawNode) -> NL: + assert node[3] is not None + return Node(type=node[0], children=node[3], context=node[2]) + + +# A placeholder node, used when parser is backtracking. +DUMMY_NODE = (-1, None, None, None) + + +def stack_copy( + stack: List[Tuple[DFAS, int, RawNode]], +) -> List[Tuple[DFAS, int, RawNode]]: + """Nodeless stack copy.""" + return [(dfa, label, DUMMY_NODE) for dfa, label, _ in stack] + + +class Recorder: + def __init__(self, parser: "Parser", ilabels: List[int], context: Context) -> None: + self.parser = parser + self._ilabels = ilabels + self.context = context # not really matter + + self._dead_ilabels: Set[int] = set() + self._start_point = self.parser.stack + self._points = {ilabel: stack_copy(self._start_point) for ilabel in ilabels} + + @property + def ilabels(self) -> Set[int]: + return self._dead_ilabels.symmetric_difference(self._ilabels) + + @contextmanager + def switch_to(self, ilabel: int) -> Iterator[None]: + with self.backtrack(): + self.parser.stack = self._points[ilabel] + try: + yield + except ParseError: + self._dead_ilabels.add(ilabel) + finally: + self.parser.stack = self._start_point + + @contextmanager + def backtrack(self) -> Iterator[None]: + """ + Use the node-level invariant ones for basic parsing operations (push/pop/shift). + These still will operate on the stack; but they won't create any new nodes, or + modify the contents of any other existing nodes. + + This saves us a ton of time when we are backtracking, since we + want to restore to the initial state as quick as possible, which + can only be done by having as little mutatations as possible. + """ + is_backtracking = self.parser.is_backtracking + try: + self.parser.is_backtracking = True + yield + finally: + self.parser.is_backtracking = is_backtracking + + def add_token(self, tok_type: int, tok_val: str, raw: bool = False) -> None: + func: Callable[..., Any] + if raw: + func = self.parser._addtoken + else: + func = self.parser.addtoken + + for ilabel in self.ilabels: + with self.switch_to(ilabel): + args = [tok_type, tok_val, self.context] + if raw: + args.insert(0, ilabel) + func(*args) + + def determine_route( + self, value: Optional[str] = None, force: bool = False + ) -> Optional[int]: + alive_ilabels = self.ilabels + if len(alive_ilabels) == 0: + *_, most_successful_ilabel = self._dead_ilabels + raise ParseError("bad input", most_successful_ilabel, value, self.context) + + ilabel, *rest = alive_ilabels + if force or not rest: + return ilabel + else: + return None + + +class ParseError(Exception): + """Exception to signal the parser is stuck.""" + + def __init__( + self, msg: str, type: Optional[int], value: Optional[str], context: Context + ) -> None: + Exception.__init__( + self, f"{msg}: type={type!r}, value={value!r}, context={context!r}" + ) + self.msg = msg + self.type = type + self.value = value + self.context = context + + +class Parser: + """Parser engine. + + The proper usage sequence is: + + p = Parser(grammar, [converter]) # create instance + p.setup([start]) # prepare for parsing + : + if p.addtoken(...): # parse a token; may raise ParseError + break + root = p.rootnode # root of abstract syntax tree + + A Parser instance may be reused by calling setup() repeatedly. + + A Parser instance contains state pertaining to the current token + sequence, and should not be used concurrently by different threads + to parse separate token sequences. + + See driver.py for how to get input tokens by tokenizing a file or + string. + + Parsing is complete when addtoken() returns True; the root of the + abstract syntax tree can then be retrieved from the rootnode + instance variable. When a syntax error occurs, addtoken() raises + the ParseError exception. There is no error recovery; the parser + cannot be used after a syntax error was reported (but it can be + reinitialized by calling setup()). + + """ + + def __init__(self, grammar: Grammar, convert: Optional[Convert] = None) -> None: + """Constructor. + + The grammar argument is a grammar.Grammar instance; see the + grammar module for more information. + + The parser is not ready yet for parsing; you must call the + setup() method to get it started. + + The optional convert argument is a function mapping concrete + syntax tree nodes to abstract syntax tree nodes. If not + given, no conversion is done and the syntax tree produced is + the concrete syntax tree. If given, it must be a function of + two arguments, the first being the grammar (a grammar.Grammar + instance), and the second being the concrete syntax tree node + to be converted. The syntax tree is converted from the bottom + up. + + **post-note: the convert argument is ignored since for Black's + usage, convert will always be blib2to3.pytree.convert. Allowing + this to be dynamic hurts mypyc's ability to use early binding. + These docs are left for historical and informational value. + + A concrete syntax tree node is a (type, value, context, nodes) + tuple, where type is the node type (a token or symbol number), + value is None for symbols and a string for tokens, context is + None or an opaque value used for error reporting (typically a + (lineno, offset) pair), and nodes is a list of children for + symbols, and None for tokens. + + An abstract syntax tree node may be anything; this is entirely + up to the converter function. + + """ + self.grammar = grammar + # See note in docstring above. TL;DR this is ignored. + self.convert = convert or lam_sub + self.is_backtracking = False + self.last_token: Optional[int] = None + + def setup(self, proxy: "TokenProxy", start: Optional[int] = None) -> None: + """Prepare for parsing. + + This *must* be called before starting to parse. + + The optional argument is an alternative start symbol; it + defaults to the grammar's start symbol. + + You can use a Parser instance to parse any number of programs; + each time you call setup() the parser is reset to an initial + state determined by the (implicit or explicit) start symbol. + + """ + if start is None: + start = self.grammar.start + # Each stack entry is a tuple: (dfa, state, node). + # A node is a tuple: (type, value, context, children), + # where children is a list of nodes or None, and context may be None. + newnode: RawNode = (start, None, None, []) + stackentry = (self.grammar.dfas[start], 0, newnode) + self.stack: List[Tuple[DFAS, int, RawNode]] = [stackentry] + self.rootnode: Optional[NL] = None + self.used_names: Set[str] = set() + self.proxy = proxy + self.last_token = None + + def addtoken(self, type: int, value: str, context: Context) -> bool: + """Add a token; return True iff this is the end of the program.""" + # Map from token to label + ilabels = self.classify(type, value, context) + assert len(ilabels) >= 1 + + # If we have only one state to advance, we'll directly + # take it as is. + if len(ilabels) == 1: + [ilabel] = ilabels + return self._addtoken(ilabel, type, value, context) + + # If there are multiple states which we can advance (only + # happen under soft-keywords), then we will try all of them + # in parallel and as soon as one state can reach further than + # the rest, we'll choose that one. This is a pretty hacky + # and hopefully temporary algorithm. + # + # For a more detailed explanation, check out this post: + # https://tree.science/what-the-backtracking.html + + with self.proxy.release() as proxy: + counter, force = 0, False + recorder = Recorder(self, ilabels, context) + recorder.add_token(type, value, raw=True) + + next_token_value = value + while recorder.determine_route(next_token_value) is None: + if not proxy.can_advance(counter): + force = True + break + + next_token_type, next_token_value, *_ = proxy.eat(counter) + if next_token_type in (tokenize.COMMENT, tokenize.NL): + counter += 1 + continue + + if next_token_type == tokenize.OP: + next_token_type = grammar.opmap[next_token_value] + + recorder.add_token(next_token_type, next_token_value) + counter += 1 + + ilabel = cast(int, recorder.determine_route(next_token_value, force=force)) + assert ilabel is not None + + return self._addtoken(ilabel, type, value, context) + + def _addtoken(self, ilabel: int, type: int, value: str, context: Context) -> bool: + # Loop until the token is shifted; may raise exceptions + while True: + dfa, state, node = self.stack[-1] + states, first = dfa + arcs = states[state] + # Look for a state with this label + for i, newstate in arcs: + t = self.grammar.labels[i][0] + if t >= 256: + # See if it's a symbol and if we're in its first set + itsdfa = self.grammar.dfas[t] + itsstates, itsfirst = itsdfa + if ilabel in itsfirst: + # Push a symbol + self.push(t, itsdfa, newstate, context) + break # To continue the outer while loop + + elif ilabel == i: + # Look it up in the list of labels + # Shift a token; we're done with it + self.shift(type, value, newstate, context) + # Pop while we are in an accept-only state + state = newstate + while states[state] == [(0, state)]: + self.pop() + if not self.stack: + # Done parsing! + return True + dfa, state, node = self.stack[-1] + states, first = dfa + # Done with this token + self.last_token = type + return False + + else: + if (0, state) in arcs: + # An accepting state, pop it and try something else + self.pop() + if not self.stack: + # Done parsing, but another token is input + raise ParseError("too much input", type, value, context) + else: + # No success finding a transition + raise ParseError("bad input", type, value, context) + + def classify(self, type: int, value: str, context: Context) -> List[int]: + """Turn a token into a label. (Internal) + + Depending on whether the value is a soft-keyword or not, + this function may return multiple labels to choose from.""" + if type == token.NAME: + # Keep a listing of all used names + self.used_names.add(value) + # Check for reserved words + if value in self.grammar.keywords: + return [self.grammar.keywords[value]] + elif value in self.grammar.soft_keywords: + assert type in self.grammar.tokens + # Current soft keywords (match, case, type) can only appear at the + # beginning of a statement. So as a shortcut, don't try to treat them + # like keywords in any other context. + # ('_' is also a soft keyword in the real grammar, but for our grammar + # it's just an expression, so we don't need to treat it specially.) + if self.last_token not in ( + None, + token.INDENT, + token.DEDENT, + token.NEWLINE, + token.SEMI, + token.COLON, + ): + return [self.grammar.tokens[type]] + return [ + self.grammar.tokens[type], + self.grammar.soft_keywords[value], + ] + + ilabel = self.grammar.tokens.get(type) + if ilabel is None: + raise ParseError("bad token", type, value, context) + return [ilabel] + + def shift(self, type: int, value: str, newstate: int, context: Context) -> None: + """Shift a token. (Internal)""" + if self.is_backtracking: + dfa, state, _ = self.stack[-1] + self.stack[-1] = (dfa, newstate, DUMMY_NODE) + else: + dfa, state, node = self.stack[-1] + rawnode: RawNode = (type, value, context, None) + newnode = convert(self.grammar, rawnode) + assert node[-1] is not None + node[-1].append(newnode) + self.stack[-1] = (dfa, newstate, node) + + def push(self, type: int, newdfa: DFAS, newstate: int, context: Context) -> None: + """Push a nonterminal. (Internal)""" + if self.is_backtracking: + dfa, state, _ = self.stack[-1] + self.stack[-1] = (dfa, newstate, DUMMY_NODE) + self.stack.append((newdfa, 0, DUMMY_NODE)) + else: + dfa, state, node = self.stack[-1] + newnode: RawNode = (type, None, context, []) + self.stack[-1] = (dfa, newstate, node) + self.stack.append((newdfa, 0, newnode)) + + def pop(self) -> None: + """Pop a nonterminal. (Internal)""" + if self.is_backtracking: + self.stack.pop() + else: + popdfa, popstate, popnode = self.stack.pop() + newnode = convert(self.grammar, popnode) + if self.stack: + dfa, state, node = self.stack[-1] + assert node[-1] is not None + node[-1].append(newnode) + else: + self.rootnode = newnode + self.rootnode.used_names = self.used_names diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/pgen.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/pgen.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..bbf1b8c2f Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/pgen.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/pgen.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/pgen.py new file mode 100644 index 000000000..3ece9bb41 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/pgen.py @@ -0,0 +1,428 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +import os +from typing import ( + IO, + Any, + Dict, + Iterator, + List, + NoReturn, + Optional, + Sequence, + Tuple, + Union, +) + +from blib2to3.pgen2 import grammar, token, tokenize +from blib2to3.pgen2.tokenize import GoodTokenInfo + +Path = Union[str, "os.PathLike[str]"] + + +class PgenGrammar(grammar.Grammar): + pass + + +class ParserGenerator: + filename: Path + stream: IO[str] + generator: Iterator[GoodTokenInfo] + first: Dict[str, Optional[Dict[str, int]]] + + def __init__(self, filename: Path, stream: Optional[IO[str]] = None) -> None: + close_stream = None + if stream is None: + stream = open(filename, encoding="utf-8") + close_stream = stream.close + self.filename = filename + self.stream = stream + self.generator = tokenize.generate_tokens(stream.readline) + self.gettoken() # Initialize lookahead + self.dfas, self.startsymbol = self.parse() + if close_stream is not None: + close_stream() + self.first = {} # map from symbol name to set of tokens + self.addfirstsets() + + def make_grammar(self) -> PgenGrammar: + c = PgenGrammar() + names = list(self.dfas.keys()) + names.sort() + names.remove(self.startsymbol) + names.insert(0, self.startsymbol) + for name in names: + i = 256 + len(c.symbol2number) + c.symbol2number[name] = i + c.number2symbol[i] = name + for name in names: + dfa = self.dfas[name] + states = [] + for state in dfa: + arcs = [] + for label, next in sorted(state.arcs.items()): + arcs.append((self.make_label(c, label), dfa.index(next))) + if state.isfinal: + arcs.append((0, dfa.index(state))) + states.append(arcs) + c.states.append(states) + c.dfas[c.symbol2number[name]] = (states, self.make_first(c, name)) + c.start = c.symbol2number[self.startsymbol] + return c + + def make_first(self, c: PgenGrammar, name: str) -> Dict[int, int]: + rawfirst = self.first[name] + assert rawfirst is not None + first = {} + for label in sorted(rawfirst): + ilabel = self.make_label(c, label) + ##assert ilabel not in first # XXX failed on <> ... != + first[ilabel] = 1 + return first + + def make_label(self, c: PgenGrammar, label: str) -> int: + # XXX Maybe this should be a method on a subclass of converter? + ilabel = len(c.labels) + if label[0].isalpha(): + # Either a symbol name or a named token + if label in c.symbol2number: + # A symbol name (a non-terminal) + if label in c.symbol2label: + return c.symbol2label[label] + else: + c.labels.append((c.symbol2number[label], None)) + c.symbol2label[label] = ilabel + return ilabel + else: + # A named token (NAME, NUMBER, STRING) + itoken = getattr(token, label, None) + assert isinstance(itoken, int), label + assert itoken in token.tok_name, label + if itoken in c.tokens: + return c.tokens[itoken] + else: + c.labels.append((itoken, None)) + c.tokens[itoken] = ilabel + return ilabel + else: + # Either a keyword or an operator + assert label[0] in ('"', "'"), label + value = eval(label) + if value[0].isalpha(): + if label[0] == '"': + keywords = c.soft_keywords + else: + keywords = c.keywords + + # A keyword + if value in keywords: + return keywords[value] + else: + c.labels.append((token.NAME, value)) + keywords[value] = ilabel + return ilabel + else: + # An operator (any non-numeric token) + itoken = grammar.opmap[value] # Fails if unknown token + if itoken in c.tokens: + return c.tokens[itoken] + else: + c.labels.append((itoken, None)) + c.tokens[itoken] = ilabel + return ilabel + + def addfirstsets(self) -> None: + names = list(self.dfas.keys()) + names.sort() + for name in names: + if name not in self.first: + self.calcfirst(name) + # print name, self.first[name].keys() + + def calcfirst(self, name: str) -> None: + dfa = self.dfas[name] + self.first[name] = None # dummy to detect left recursion + state = dfa[0] + totalset: Dict[str, int] = {} + overlapcheck = {} + for label in state.arcs: + if label in self.dfas: + if label in self.first: + fset = self.first[label] + if fset is None: + raise ValueError("recursion for rule %r" % name) + else: + self.calcfirst(label) + fset = self.first[label] + assert fset is not None + totalset.update(fset) + overlapcheck[label] = fset + else: + totalset[label] = 1 + overlapcheck[label] = {label: 1} + inverse: Dict[str, str] = {} + for label, itsfirst in overlapcheck.items(): + for symbol in itsfirst: + if symbol in inverse: + raise ValueError( + "rule %s is ambiguous; %s is in the first sets of %s as well" + " as %s" % (name, symbol, label, inverse[symbol]) + ) + inverse[symbol] = label + self.first[name] = totalset + + def parse(self) -> Tuple[Dict[str, List["DFAState"]], str]: + dfas = {} + startsymbol: Optional[str] = None + # MSTART: (NEWLINE | RULE)* ENDMARKER + while self.type != token.ENDMARKER: + while self.type == token.NEWLINE: + self.gettoken() + # RULE: NAME ':' RHS NEWLINE + name = self.expect(token.NAME) + self.expect(token.OP, ":") + a, z = self.parse_rhs() + self.expect(token.NEWLINE) + # self.dump_nfa(name, a, z) + dfa = self.make_dfa(a, z) + # self.dump_dfa(name, dfa) + # oldlen = len(dfa) + self.simplify_dfa(dfa) + # newlen = len(dfa) + dfas[name] = dfa + # print name, oldlen, newlen + if startsymbol is None: + startsymbol = name + assert startsymbol is not None + return dfas, startsymbol + + def make_dfa(self, start: "NFAState", finish: "NFAState") -> List["DFAState"]: + # To turn an NFA into a DFA, we define the states of the DFA + # to correspond to *sets* of states of the NFA. Then do some + # state reduction. Let's represent sets as dicts with 1 for + # values. + assert isinstance(start, NFAState) + assert isinstance(finish, NFAState) + + def closure(state: NFAState) -> Dict[NFAState, int]: + base: Dict[NFAState, int] = {} + addclosure(state, base) + return base + + def addclosure(state: NFAState, base: Dict[NFAState, int]) -> None: + assert isinstance(state, NFAState) + if state in base: + return + base[state] = 1 + for label, next in state.arcs: + if label is None: + addclosure(next, base) + + states = [DFAState(closure(start), finish)] + for state in states: # NB states grows while we're iterating + arcs: Dict[str, Dict[NFAState, int]] = {} + for nfastate in state.nfaset: + for label, next in nfastate.arcs: + if label is not None: + addclosure(next, arcs.setdefault(label, {})) + for label, nfaset in sorted(arcs.items()): + for st in states: + if st.nfaset == nfaset: + break + else: + st = DFAState(nfaset, finish) + states.append(st) + state.addarc(st, label) + return states # List of DFAState instances; first one is start + + def dump_nfa(self, name: str, start: "NFAState", finish: "NFAState") -> None: + print("Dump of NFA for", name) + todo = [start] + for i, state in enumerate(todo): + print(" State", i, state is finish and "(final)" or "") + for label, next in state.arcs: + if next in todo: + j = todo.index(next) + else: + j = len(todo) + todo.append(next) + if label is None: + print(" -> %d" % j) + else: + print(" %s -> %d" % (label, j)) + + def dump_dfa(self, name: str, dfa: Sequence["DFAState"]) -> None: + print("Dump of DFA for", name) + for i, state in enumerate(dfa): + print(" State", i, state.isfinal and "(final)" or "") + for label, next in sorted(state.arcs.items()): + print(" %s -> %d" % (label, dfa.index(next))) + + def simplify_dfa(self, dfa: List["DFAState"]) -> None: + # This is not theoretically optimal, but works well enough. + # Algorithm: repeatedly look for two states that have the same + # set of arcs (same labels pointing to the same nodes) and + # unify them, until things stop changing. + + # dfa is a list of DFAState instances + changes = True + while changes: + changes = False + for i, state_i in enumerate(dfa): + for j in range(i + 1, len(dfa)): + state_j = dfa[j] + if state_i == state_j: + # print " unify", i, j + del dfa[j] + for state in dfa: + state.unifystate(state_j, state_i) + changes = True + break + + def parse_rhs(self) -> Tuple["NFAState", "NFAState"]: + # RHS: ALT ('|' ALT)* + a, z = self.parse_alt() + if self.value != "|": + return a, z + else: + aa = NFAState() + zz = NFAState() + aa.addarc(a) + z.addarc(zz) + while self.value == "|": + self.gettoken() + a, z = self.parse_alt() + aa.addarc(a) + z.addarc(zz) + return aa, zz + + def parse_alt(self) -> Tuple["NFAState", "NFAState"]: + # ALT: ITEM+ + a, b = self.parse_item() + while self.value in ("(", "[") or self.type in (token.NAME, token.STRING): + c, d = self.parse_item() + b.addarc(c) + b = d + return a, b + + def parse_item(self) -> Tuple["NFAState", "NFAState"]: + # ITEM: '[' RHS ']' | ATOM ['+' | '*'] + if self.value == "[": + self.gettoken() + a, z = self.parse_rhs() + self.expect(token.OP, "]") + a.addarc(z) + return a, z + else: + a, z = self.parse_atom() + value = self.value + if value not in ("+", "*"): + return a, z + self.gettoken() + z.addarc(a) + if value == "+": + return a, z + else: + return a, a + + def parse_atom(self) -> Tuple["NFAState", "NFAState"]: + # ATOM: '(' RHS ')' | NAME | STRING + if self.value == "(": + self.gettoken() + a, z = self.parse_rhs() + self.expect(token.OP, ")") + return a, z + elif self.type in (token.NAME, token.STRING): + a = NFAState() + z = NFAState() + a.addarc(z, self.value) + self.gettoken() + return a, z + else: + self.raise_error( + "expected (...) or NAME or STRING, got %s/%s", self.type, self.value + ) + raise AssertionError + + def expect(self, type: int, value: Optional[Any] = None) -> str: + if self.type != type or (value is not None and self.value != value): + self.raise_error( + "expected %s/%s, got %s/%s", type, value, self.type, self.value + ) + value = self.value + self.gettoken() + return value + + def gettoken(self) -> None: + tup = next(self.generator) + while tup[0] in (tokenize.COMMENT, tokenize.NL): + tup = next(self.generator) + self.type, self.value, self.begin, self.end, self.line = tup + # print token.tok_name[self.type], repr(self.value) + + def raise_error(self, msg: str, *args: Any) -> NoReturn: + if args: + try: + msg = msg % args + except Exception: + msg = " ".join([msg] + list(map(str, args))) + raise SyntaxError(msg, (self.filename, self.end[0], self.end[1], self.line)) + + +class NFAState: + arcs: List[Tuple[Optional[str], "NFAState"]] + + def __init__(self) -> None: + self.arcs = [] # list of (label, NFAState) pairs + + def addarc(self, next: "NFAState", label: Optional[str] = None) -> None: + assert label is None or isinstance(label, str) + assert isinstance(next, NFAState) + self.arcs.append((label, next)) + + +class DFAState: + nfaset: Dict[NFAState, Any] + isfinal: bool + arcs: Dict[str, "DFAState"] + + def __init__(self, nfaset: Dict[NFAState, Any], final: NFAState) -> None: + assert isinstance(nfaset, dict) + assert isinstance(next(iter(nfaset)), NFAState) + assert isinstance(final, NFAState) + self.nfaset = nfaset + self.isfinal = final in nfaset + self.arcs = {} # map from label to DFAState + + def addarc(self, next: "DFAState", label: str) -> None: + assert isinstance(label, str) + assert label not in self.arcs + assert isinstance(next, DFAState) + self.arcs[label] = next + + def unifystate(self, old: "DFAState", new: "DFAState") -> None: + for label, next in self.arcs.items(): + if next is old: + self.arcs[label] = new + + def __eq__(self, other: Any) -> bool: + # Equality test -- ignore the nfaset instance variable + assert isinstance(other, DFAState) + if self.isfinal != other.isfinal: + return False + # Can't just return self.arcs == other.arcs, because that + # would invoke this method recursively, with cycles... + if len(self.arcs) != len(other.arcs): + return False + for label, next in self.arcs.items(): + if next is not other.arcs.get(label): + return False + return True + + __hash__: Any = None # For Py3 compatibility. + + +def generate_grammar(filename: Path = "Grammar.txt") -> PgenGrammar: + p = ParserGenerator(filename) + return p.make_grammar() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/token.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/token.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..2bf3414f6 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/token.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/token.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/token.py new file mode 100644 index 000000000..3068c3157 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/token.py @@ -0,0 +1,92 @@ +"""Token constants (from "token.h").""" + +from typing import Dict, Final + +# Taken from Python (r53757) and modified to include some tokens +# originally monkeypatched in by pgen2.tokenize + +# --start constants-- +ENDMARKER: Final = 0 +NAME: Final = 1 +NUMBER: Final = 2 +STRING: Final = 3 +NEWLINE: Final = 4 +INDENT: Final = 5 +DEDENT: Final = 6 +LPAR: Final = 7 +RPAR: Final = 8 +LSQB: Final = 9 +RSQB: Final = 10 +COLON: Final = 11 +COMMA: Final = 12 +SEMI: Final = 13 +PLUS: Final = 14 +MINUS: Final = 15 +STAR: Final = 16 +SLASH: Final = 17 +VBAR: Final = 18 +AMPER: Final = 19 +LESS: Final = 20 +GREATER: Final = 21 +EQUAL: Final = 22 +DOT: Final = 23 +PERCENT: Final = 24 +BACKQUOTE: Final = 25 +LBRACE: Final = 26 +RBRACE: Final = 27 +EQEQUAL: Final = 28 +NOTEQUAL: Final = 29 +LESSEQUAL: Final = 30 +GREATEREQUAL: Final = 31 +TILDE: Final = 32 +CIRCUMFLEX: Final = 33 +LEFTSHIFT: Final = 34 +RIGHTSHIFT: Final = 35 +DOUBLESTAR: Final = 36 +PLUSEQUAL: Final = 37 +MINEQUAL: Final = 38 +STAREQUAL: Final = 39 +SLASHEQUAL: Final = 40 +PERCENTEQUAL: Final = 41 +AMPEREQUAL: Final = 42 +VBAREQUAL: Final = 43 +CIRCUMFLEXEQUAL: Final = 44 +LEFTSHIFTEQUAL: Final = 45 +RIGHTSHIFTEQUAL: Final = 46 +DOUBLESTAREQUAL: Final = 47 +DOUBLESLASH: Final = 48 +DOUBLESLASHEQUAL: Final = 49 +AT: Final = 50 +ATEQUAL: Final = 51 +OP: Final = 52 +COMMENT: Final = 53 +NL: Final = 54 +RARROW: Final = 55 +AWAIT: Final = 56 +ASYNC: Final = 57 +ERRORTOKEN: Final = 58 +COLONEQUAL: Final = 59 +FSTRING_START: Final = 60 +FSTRING_MIDDLE: Final = 61 +FSTRING_END: Final = 62 +BANG: Final = 63 +N_TOKENS: Final = 64 +NT_OFFSET: Final = 256 +# --end constants-- + +tok_name: Final[Dict[int, str]] = {} +for _name, _value in list(globals().items()): + if type(_value) is int: + tok_name[_value] = _name + + +def ISTERMINAL(x: int) -> bool: + return x < NT_OFFSET + + +def ISNONTERMINAL(x: int) -> bool: + return x >= NT_OFFSET + + +def ISEOF(x: int) -> bool: + return x == ENDMARKER diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/tokenize.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/tokenize.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..8dbb20f41 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/tokenize.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/tokenize.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/tokenize.py new file mode 100644 index 000000000..f66087bd0 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pgen2/tokenize.py @@ -0,0 +1,1117 @@ +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Python Software Foundation. +# All rights reserved. + +# mypy: allow-untyped-defs, allow-untyped-calls + +"""Tokenization help for Python programs. + +generate_tokens(readline) is a generator that breaks a stream of +text into Python tokens. It accepts a readline-like method which is called +repeatedly to get the next line of input (or "" for EOF). It generates +5-tuples with these members: + + the token type (see token.py) + the token (a string) + the starting (row, column) indices of the token (a 2-tuple of ints) + the ending (row, column) indices of the token (a 2-tuple of ints) + the original line (string) + +It is designed to match the working of the Python tokenizer exactly, except +that it produces COMMENT tokens for comments and gives type OP for all +operators + +Older entry points + tokenize_loop(readline, tokeneater) + tokenize(readline, tokeneater=printtoken) +are the same, except instead of generating tokens, tokeneater is a callback +function to which the 5 fields described above are passed as 5 arguments, +each time a new token is found.""" + +import builtins +import sys +from typing import ( + Callable, + Final, + Iterable, + Iterator, + List, + Optional, + Pattern, + Set, + Tuple, + Union, +) + +from blib2to3.pgen2.grammar import Grammar +from blib2to3.pgen2.token import ( + ASYNC, + AWAIT, + COMMENT, + DEDENT, + ENDMARKER, + ERRORTOKEN, + FSTRING_END, + FSTRING_MIDDLE, + FSTRING_START, + INDENT, + LBRACE, + NAME, + NEWLINE, + NL, + NUMBER, + OP, + RBRACE, + STRING, + tok_name, +) + +__author__ = "Ka-Ping Yee " +__credits__ = "GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro" + +import re +from codecs import BOM_UTF8, lookup + +from . import token + +__all__ = [x for x in dir(token) if x[0] != "_"] + [ + "tokenize", + "generate_tokens", + "untokenize", +] +del token + + +def group(*choices: str) -> str: + return "(" + "|".join(choices) + ")" + + +def any(*choices: str) -> str: + return group(*choices) + "*" + + +def maybe(*choices: str) -> str: + return group(*choices) + "?" + + +def _combinations(*l: str) -> Set[str]: + return {x + y for x in l for y in l + ("",) if x.casefold() != y.casefold()} + + +Whitespace = r"[ \f\t]*" +Comment = r"#[^\r\n]*" +Ignore = Whitespace + any(r"\\\r?\n" + Whitespace) + maybe(Comment) +Name = ( # this is invalid but it's fine because Name comes after Number in all groups + r"[^\s#\(\)\[\]\{\}+\-*/!@$%^&=|;:'\",\.<>/?`~\\]+" +) + +Binnumber = r"0[bB]_?[01]+(?:_[01]+)*" +Hexnumber = r"0[xX]_?[\da-fA-F]+(?:_[\da-fA-F]+)*[lL]?" +Octnumber = r"0[oO]?_?[0-7]+(?:_[0-7]+)*[lL]?" +Decnumber = group(r"[1-9]\d*(?:_\d+)*[lL]?", "0[lL]?") +Intnumber = group(Binnumber, Hexnumber, Octnumber, Decnumber) +Exponent = r"[eE][-+]?\d+(?:_\d+)*" +Pointfloat = group(r"\d+(?:_\d+)*\.(?:\d+(?:_\d+)*)?", r"\.\d+(?:_\d+)*") + maybe( + Exponent +) +Expfloat = r"\d+(?:_\d+)*" + Exponent +Floatnumber = group(Pointfloat, Expfloat) +Imagnumber = group(r"\d+(?:_\d+)*[jJ]", Floatnumber + r"[jJ]") +Number = group(Imagnumber, Floatnumber, Intnumber) + +# Tail end of ' string. +Single = r"(?:\\.|[^'\\])*'" +# Tail end of " string. +Double = r'(?:\\.|[^"\\])*"' +# Tail end of ''' string. +Single3 = r"(?:\\.|'(?!'')|[^'\\])*'''" +# Tail end of """ string. +Double3 = r'(?:\\.|"(?!"")|[^"\\])*"""' +_litprefix = r"(?:[uUrRbB]|[rR][bB]|[bBuU][rR])?" +_fstringlitprefix = r"(?:rF|FR|Fr|fr|RF|F|rf|f|Rf|fR)" +Triple = group( + _litprefix + "'''", + _litprefix + '"""', + _fstringlitprefix + '"""', + _fstringlitprefix + "'''", +) + +# beginning of a single quoted f-string. must not end with `{{` or `\N{` +SingleLbrace = r"(?:\\N{|\\.|{{|[^'\\{])*(?>=?", + r"<<=?", + r"<>", + r"!=", + r"//=?", + r"->", + r"[+\-*/%&@|^=<>:]=?", + r"~", +) + +Bracket = "[][(){}]" +Special = group(r"\r?\n", r"[:;.,`@]") +Funny = group(Operator, Bracket, Special) + +_string_middle_single = r"(?:[^\n'\\]|\\.)*" +_string_middle_double = r'(?:[^\n"\\]|\\.)*' + +# FSTRING_MIDDLE and LBRACE, must not end with a `{{` or `\N{` +_fstring_middle_single = r"(?:\\N{|\\[^{]|{{|[^\n'{\\])*(? None: # for testing + (srow, scol) = srow_col + (erow, ecol) = erow_col + print( + "%d,%d-%d,%d:\t%s\t%s" % (srow, scol, erow, ecol, tok_name[type], repr(token)) + ) + + +TokenEater = Callable[[int, str, Coord, Coord, str], None] + + +def tokenize(readline: Callable[[], str], tokeneater: TokenEater = printtoken) -> None: + """ + The tokenize() function accepts two parameters: one representing the + input stream, and one providing an output mechanism for tokenize(). + + The first parameter, readline, must be a callable object which provides + the same interface as the readline() method of built-in file objects. + Each call to the function should return one line of input as a string. + + The second parameter, tokeneater, must also be a callable object. It is + called once for each token, with five arguments, corresponding to the + tuples generated by generate_tokens(). + """ + try: + tokenize_loop(readline, tokeneater) + except StopTokenizing: + pass + + +# backwards compatible interface +def tokenize_loop(readline: Callable[[], str], tokeneater: TokenEater) -> None: + for token_info in generate_tokens(readline): + tokeneater(*token_info) + + +GoodTokenInfo = Tuple[int, str, Coord, Coord, str] +TokenInfo = Union[Tuple[int, str], GoodTokenInfo] + + +class Untokenizer: + tokens: List[str] + prev_row: int + prev_col: int + + def __init__(self) -> None: + self.tokens = [] + self.prev_row = 1 + self.prev_col = 0 + + def add_whitespace(self, start: Coord) -> None: + row, col = start + assert row <= self.prev_row + col_offset = col - self.prev_col + if col_offset: + self.tokens.append(" " * col_offset) + + def untokenize(self, iterable: Iterable[TokenInfo]) -> str: + for t in iterable: + if len(t) == 2: + self.compat(t, iterable) + break + tok_type, token, start, end, line = t + self.add_whitespace(start) + self.tokens.append(token) + self.prev_row, self.prev_col = end + if tok_type in (NEWLINE, NL): + self.prev_row += 1 + self.prev_col = 0 + return "".join(self.tokens) + + def compat(self, token: Tuple[int, str], iterable: Iterable[TokenInfo]) -> None: + startline = False + indents = [] + toks_append = self.tokens.append + toknum, tokval = token + if toknum in (NAME, NUMBER): + tokval += " " + if toknum in (NEWLINE, NL): + startline = True + for tok in iterable: + toknum, tokval = tok[:2] + + if toknum in (NAME, NUMBER, ASYNC, AWAIT): + tokval += " " + + if toknum == INDENT: + indents.append(tokval) + continue + elif toknum == DEDENT: + indents.pop() + continue + elif toknum in (NEWLINE, NL): + startline = True + elif startline and indents: + toks_append(indents[-1]) + startline = False + toks_append(tokval) + + +cookie_re = re.compile(r"^[ \t\f]*#.*?coding[:=][ \t]*([-\w.]+)", re.ASCII) +blank_re = re.compile(rb"^[ \t\f]*(?:[#\r\n]|$)", re.ASCII) + + +def _get_normal_name(orig_enc: str) -> str: + """Imitates get_normal_name in tokenizer.c.""" + # Only care about the first 12 characters. + enc = orig_enc[:12].lower().replace("_", "-") + if enc == "utf-8" or enc.startswith("utf-8-"): + return "utf-8" + if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or enc.startswith( + ("latin-1-", "iso-8859-1-", "iso-latin-1-") + ): + return "iso-8859-1" + return orig_enc + + +def detect_encoding(readline: Callable[[], bytes]) -> Tuple[str, List[bytes]]: + """ + The detect_encoding() function is used to detect the encoding that should + be used to decode a Python source file. It requires one argument, readline, + in the same way as the tokenize() generator. + + It will call readline a maximum of twice, and return the encoding used + (as a string) and a list of any lines (left as bytes) it has read + in. + + It detects the encoding from the presence of a utf-8 bom or an encoding + cookie as specified in pep-0263. If both a bom and a cookie are present, but + disagree, a SyntaxError will be raised. If the encoding cookie is an invalid + charset, raise a SyntaxError. Note that if a utf-8 bom is found, + 'utf-8-sig' is returned. + + If no encoding is specified, then the default of 'utf-8' will be returned. + """ + bom_found = False + encoding = None + default = "utf-8" + + def read_or_stop() -> bytes: + try: + return readline() + except StopIteration: + return b"" + + def find_cookie(line: bytes) -> Optional[str]: + try: + line_string = line.decode("ascii") + except UnicodeDecodeError: + return None + match = cookie_re.match(line_string) + if not match: + return None + encoding = _get_normal_name(match.group(1)) + try: + codec = lookup(encoding) + except LookupError: + # This behaviour mimics the Python interpreter + raise SyntaxError("unknown encoding: " + encoding) + + if bom_found: + if codec.name != "utf-8": + # This behaviour mimics the Python interpreter + raise SyntaxError("encoding problem: utf-8") + encoding += "-sig" + return encoding + + first = read_or_stop() + if first.startswith(BOM_UTF8): + bom_found = True + first = first[3:] + default = "utf-8-sig" + if not first: + return default, [] + + encoding = find_cookie(first) + if encoding: + return encoding, [first] + if not blank_re.match(first): + return default, [first] + + second = read_or_stop() + if not second: + return default, [first] + + encoding = find_cookie(second) + if encoding: + return encoding, [first, second] + + return default, [first, second] + + +def untokenize(iterable: Iterable[TokenInfo]) -> str: + """Transform tokens back into Python source code. + + Each element returned by the iterable must be a token sequence + with at least two elements, a token number and token value. If + only two tokens are passed, the resulting output is poor. + + Round-trip invariant for full input: + Untokenized source will match input source exactly + + Round-trip invariant for limited input: + # Output text will tokenize the back to the input + t1 = [tok[:2] for tok in generate_tokens(f.readline)] + newcode = untokenize(t1) + readline = iter(newcode.splitlines(1)).next + t2 = [tok[:2] for tokin generate_tokens(readline)] + assert t1 == t2 + """ + ut = Untokenizer() + return ut.untokenize(iterable) + + +def is_fstring_start(token: str) -> bool: + return builtins.any(token.startswith(prefix) for prefix in fstring_prefix) + + +def _split_fstring_start_and_middle(token: str) -> Tuple[str, str]: + for prefix in fstring_prefix: + _, prefix, rest = token.partition(prefix) + if prefix != "": + return prefix, rest + + raise ValueError(f"Token {token!r} is not a valid f-string start") + + +STATE_NOT_FSTRING: Final = 0 # not in an f-string +STATE_MIDDLE: Final = 1 # in the string portion of an f-string (outside braces) +STATE_IN_BRACES: Final = 2 # between braces in an f-string +# in the format specifier (between the colon and the closing brace) +STATE_IN_COLON: Final = 3 + + +class FStringState: + """Keeps track of state around f-strings. + + The tokenizer should call the appropriate method on this class when + it transitions to a different part of an f-string. This is needed + because the tokenization depends on knowing where exactly we are in + the f-string. + + For example, consider the following f-string: + + f"a{1:b{2}c}d" + + The following is the tokenization of this string and the states + tracked by this class: + + 1,0-1,2: FSTRING_START 'f"' # [STATE_NOT_FSTRING, STATE_MIDDLE] + 1,2-1,3: FSTRING_MIDDLE 'a' + 1,3-1,4: LBRACE '{' # [STATE_NOT_FSTRING, STATE_IN_BRACES] + 1,4-1,5: NUMBER '1' + 1,5-1,6: OP ':' # [STATE_NOT_FSTRING, STATE_IN_COLON] + 1,6-1,7: FSTRING_MIDDLE 'b' + 1,7-1,8: LBRACE '{' # [STATE_NOT_FSTRING, STATE_IN_COLON, STATE_IN_BRACES] + 1,8-1,9: NUMBER '2' + 1,9-1,10: RBRACE '}' # [STATE_NOT_FSTRING, STATE_IN_COLON] + 1,10-1,11: FSTRING_MIDDLE 'c' + 1,11-1,12: RBRACE '}' # [STATE_NOT_FSTRING, STATE_MIDDLE] + 1,12-1,13: FSTRING_MIDDLE 'd' + 1,13-1,14: FSTRING_END '"' # [STATE_NOT_FSTRING] + 1,14-1,15: NEWLINE '\n' + 2,0-2,0: ENDMARKER '' + + Notice that the nested braces in the format specifier are represented + by adding a STATE_IN_BRACES entry to the state stack. The stack is + also used if there are nested f-strings. + + """ + + def __init__(self) -> None: + self.stack: List[int] = [STATE_NOT_FSTRING] + + def is_in_fstring_expression(self) -> bool: + return self.stack[-1] not in (STATE_MIDDLE, STATE_NOT_FSTRING) + + def current(self) -> int: + return self.stack[-1] + + def enter_fstring(self) -> None: + self.stack.append(STATE_MIDDLE) + + def leave_fstring(self) -> None: + state = self.stack.pop() + assert state == STATE_MIDDLE + + def consume_lbrace(self) -> None: + current_state = self.stack[-1] + if current_state == STATE_MIDDLE: + self.stack[-1] = STATE_IN_BRACES + elif current_state == STATE_IN_COLON: + self.stack.append(STATE_IN_BRACES) + else: + assert False, current_state + + def consume_rbrace(self) -> None: + current_state = self.stack[-1] + assert current_state in (STATE_IN_BRACES, STATE_IN_COLON) + if len(self.stack) > 1 and self.stack[-2] == STATE_IN_COLON: + self.stack.pop() + else: + self.stack[-1] = STATE_MIDDLE + + def consume_colon(self) -> None: + assert self.stack[-1] == STATE_IN_BRACES, self.stack + self.stack[-1] = STATE_IN_COLON + + +def generate_tokens( + readline: Callable[[], str], grammar: Optional[Grammar] = None +) -> Iterator[GoodTokenInfo]: + """ + The generate_tokens() generator requires one argument, readline, which + must be a callable object which provides the same interface as the + readline() method of built-in file objects. Each call to the function + should return one line of input as a string. Alternately, readline + can be a callable function terminating with StopIteration: + readline = open(myfile).next # Example of alternate readline + + The generator produces 5-tuples with these members: the token type; the + token string; a 2-tuple (srow, scol) of ints specifying the row and + column where the token begins in the source; a 2-tuple (erow, ecol) of + ints specifying the row and column where the token ends in the source; + and the line on which the token was found. The line passed is the + logical line; continuation lines are included. + """ + lnum = parenlev = continued = 0 + parenlev_stack: List[int] = [] + fstring_state = FStringState() + formatspec = "" + numchars: Final[str] = "0123456789" + contstr, needcont = "", 0 + contline: Optional[str] = None + indents = [0] + + # If we know we're parsing 3.7+, we can unconditionally parse `async` and + # `await` as keywords. + async_keywords = False if grammar is None else grammar.async_keywords + # 'stashed' and 'async_*' are used for async/await parsing + stashed: Optional[GoodTokenInfo] = None + async_def = False + async_def_indent = 0 + async_def_nl = False + + strstart: Tuple[int, int] + endprog_stack: List[Pattern[str]] = [] + formatspec_start: Tuple[int, int] + + while 1: # loop over lines in stream + try: + line = readline() + except StopIteration: + line = "" + lnum += 1 + pos, max = 0, len(line) + + if contstr: # continued string + assert contline is not None + if not line: + raise TokenError("EOF in multi-line string", strstart) + endprog = endprog_stack[-1] + endmatch = endprog.match(line) + if endmatch: + end = endmatch.end(0) + token = contstr + line[:end] + spos = strstart + epos = (lnum, end) + tokenline = contline + line + if ( + fstring_state.current() == STATE_NOT_FSTRING + and not is_fstring_start(token) + ): + yield (STRING, token, spos, epos, tokenline) + endprog_stack.pop() + parenlev = parenlev_stack.pop() + else: + if is_fstring_start(token): + fstring_start, token = _split_fstring_start_and_middle(token) + fstring_start_epos = (lnum, spos[1] + len(fstring_start)) + yield ( + FSTRING_START, + fstring_start, + spos, + fstring_start_epos, + tokenline, + ) + fstring_state.enter_fstring() + # increase spos to the end of the fstring start + spos = fstring_start_epos + + if token.endswith("{"): + fstring_middle, lbrace = token[:-1], token[-1] + fstring_middle_epos = lbrace_spos = (lnum, end - 1) + yield ( + FSTRING_MIDDLE, + fstring_middle, + spos, + fstring_middle_epos, + line, + ) + yield (LBRACE, lbrace, lbrace_spos, epos, line) + fstring_state.consume_lbrace() + else: + if token.endswith(('"""', "'''")): + fstring_middle, fstring_end = token[:-3], token[-3:] + fstring_middle_epos = end_spos = (lnum, end - 3) + else: + fstring_middle, fstring_end = token[:-1], token[-1] + fstring_middle_epos = end_spos = (lnum, end - 1) + yield ( + FSTRING_MIDDLE, + fstring_middle, + spos, + fstring_middle_epos, + line, + ) + yield ( + FSTRING_END, + fstring_end, + end_spos, + epos, + line, + ) + fstring_state.leave_fstring() + endprog_stack.pop() + parenlev = parenlev_stack.pop() + pos = end + contstr, needcont = "", 0 + contline = None + elif needcont and line[-2:] != "\\\n" and line[-3:] != "\\\r\n": + yield ( + ERRORTOKEN, + contstr + line, + strstart, + (lnum, len(line)), + contline, + ) + contstr = "" + contline = None + continue + else: + contstr = contstr + line + contline = contline + line + continue + + # new statement + elif ( + parenlev == 0 + and not continued + and not fstring_state.is_in_fstring_expression() + ): + if not line: + break + column = 0 + while pos < max: # measure leading whitespace + if line[pos] == " ": + column += 1 + elif line[pos] == "\t": + column = (column // tabsize + 1) * tabsize + elif line[pos] == "\f": + column = 0 + else: + break + pos += 1 + if pos == max: + break + + if stashed: + yield stashed + stashed = None + + if line[pos] in "\r\n": # skip blank lines + yield (NL, line[pos:], (lnum, pos), (lnum, len(line)), line) + continue + + if line[pos] == "#": # skip comments + comment_token = line[pos:].rstrip("\r\n") + nl_pos = pos + len(comment_token) + yield ( + COMMENT, + comment_token, + (lnum, pos), + (lnum, nl_pos), + line, + ) + yield (NL, line[nl_pos:], (lnum, nl_pos), (lnum, len(line)), line) + continue + + if column > indents[-1]: # count indents + indents.append(column) + yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line) + + while column < indents[-1]: # count dedents + if column not in indents: + raise IndentationError( + "unindent does not match any outer indentation level", + ("", lnum, pos, line), + ) + indents = indents[:-1] + + if async_def and async_def_indent >= indents[-1]: + async_def = False + async_def_nl = False + async_def_indent = 0 + + yield (DEDENT, "", (lnum, pos), (lnum, pos), line) + + if async_def and async_def_nl and async_def_indent >= indents[-1]: + async_def = False + async_def_nl = False + async_def_indent = 0 + + else: # continued statement + if not line: + raise TokenError("EOF in multi-line statement", (lnum, 0)) + continued = 0 + + while pos < max: + if fstring_state.current() == STATE_MIDDLE: + endprog = endprog_stack[-1] + endmatch = endprog.match(line, pos) + if endmatch: # all on one line + start, end = endmatch.span(0) + token = line[start:end] + if token.endswith(('"""', "'''")): + middle_token, end_token = token[:-3], token[-3:] + middle_epos = end_spos = (lnum, end - 3) + else: + middle_token, end_token = token[:-1], token[-1] + middle_epos = end_spos = (lnum, end - 1) + # TODO: unsure if this can be safely removed + if stashed: + yield stashed + stashed = None + yield ( + FSTRING_MIDDLE, + middle_token, + (lnum, pos), + middle_epos, + line, + ) + if not token.endswith("{"): + yield ( + FSTRING_END, + end_token, + end_spos, + (lnum, end), + line, + ) + fstring_state.leave_fstring() + endprog_stack.pop() + parenlev = parenlev_stack.pop() + else: + yield (LBRACE, "{", (lnum, end - 1), (lnum, end), line) + fstring_state.consume_lbrace() + pos = end + continue + else: # multiple lines + strstart = (lnum, end) + contstr = line[end:] + contline = line + break + + if fstring_state.current() == STATE_IN_COLON: + match = fstring_middle_after_colon.match(line, pos) + if match is None: + formatspec += line[pos:] + pos = max + continue + + start, end = match.span(1) + token = line[start:end] + formatspec += token + + brace_start, brace_end = match.span(2) + brace_or_nl = line[brace_start:brace_end] + if brace_or_nl == "\n": + pos = brace_end + + yield (FSTRING_MIDDLE, formatspec, formatspec_start, (lnum, end), line) + formatspec = "" + + if brace_or_nl == "{": + yield (LBRACE, "{", (lnum, brace_start), (lnum, brace_end), line) + fstring_state.consume_lbrace() + end = brace_end + elif brace_or_nl == "}": + yield (RBRACE, "}", (lnum, brace_start), (lnum, brace_end), line) + fstring_state.consume_rbrace() + end = brace_end + formatspec_start = (lnum, brace_end) + + pos = end + continue + + if fstring_state.current() == STATE_IN_BRACES and parenlev == 0: + match = bang.match(line, pos) + if match: + start, end = match.span(1) + yield (OP, "!", (lnum, start), (lnum, end), line) + pos = end + continue + + match = colon.match(line, pos) + if match: + start, end = match.span(1) + yield (OP, ":", (lnum, start), (lnum, end), line) + fstring_state.consume_colon() + formatspec_start = (lnum, end) + pos = end + continue + + pseudomatch = pseudoprog.match(line, pos) + if pseudomatch: # scan for tokens + start, end = pseudomatch.span(1) + spos, epos, pos = (lnum, start), (lnum, end), end + token, initial = line[start:end], line[start] + + if initial in numchars or ( + initial == "." and token != "." + ): # ordinary number + yield (NUMBER, token, spos, epos, line) + elif initial in "\r\n": + newline = NEWLINE + if parenlev > 0 or fstring_state.is_in_fstring_expression(): + newline = NL + elif async_def: + async_def_nl = True + if stashed: + yield stashed + stashed = None + yield (newline, token, spos, epos, line) + + elif initial == "#": + assert not token.endswith("\n") + if stashed: + yield stashed + stashed = None + yield (COMMENT, token, spos, epos, line) + elif token in triple_quoted: + endprog = endprogs[token] + endprog_stack.append(endprog) + parenlev_stack.append(parenlev) + parenlev = 0 + if is_fstring_start(token): + yield (FSTRING_START, token, spos, epos, line) + fstring_state.enter_fstring() + + endmatch = endprog.match(line, pos) + if endmatch: # all on one line + if stashed: + yield stashed + stashed = None + if not is_fstring_start(token): + pos = endmatch.end(0) + token = line[start:pos] + epos = (lnum, pos) + yield (STRING, token, spos, epos, line) + endprog_stack.pop() + parenlev = parenlev_stack.pop() + else: + end = endmatch.end(0) + token = line[pos:end] + spos, epos = (lnum, pos), (lnum, end) + if not token.endswith("{"): + fstring_middle, fstring_end = token[:-3], token[-3:] + fstring_middle_epos = fstring_end_spos = (lnum, end - 3) + yield ( + FSTRING_MIDDLE, + fstring_middle, + spos, + fstring_middle_epos, + line, + ) + yield ( + FSTRING_END, + fstring_end, + fstring_end_spos, + epos, + line, + ) + fstring_state.leave_fstring() + endprog_stack.pop() + parenlev = parenlev_stack.pop() + else: + fstring_middle, lbrace = token[:-1], token[-1] + fstring_middle_epos = lbrace_spos = (lnum, end - 1) + yield ( + FSTRING_MIDDLE, + fstring_middle, + spos, + fstring_middle_epos, + line, + ) + yield (LBRACE, lbrace, lbrace_spos, epos, line) + fstring_state.consume_lbrace() + pos = end + else: + # multiple lines + if is_fstring_start(token): + strstart = (lnum, pos) + contstr = line[pos:] + else: + strstart = (lnum, start) + contstr = line[start:] + contline = line + break + elif ( + initial in single_quoted + or token[:2] in single_quoted + or token[:3] in single_quoted + ): + maybe_endprog = ( + endprogs.get(initial) + or endprogs.get(token[:2]) + or endprogs.get(token[:3]) + ) + assert maybe_endprog is not None, f"endprog not found for {token}" + endprog = maybe_endprog + if token[-1] == "\n": # continued string + endprog_stack.append(endprog) + parenlev_stack.append(parenlev) + parenlev = 0 + strstart = (lnum, start) + contstr, needcont = line[start:], 1 + contline = line + break + else: # ordinary string + if stashed: + yield stashed + stashed = None + + if not is_fstring_start(token): + yield (STRING, token, spos, epos, line) + else: + if pseudomatch[20] is not None: + fstring_start = pseudomatch[20] + offset = pseudomatch.end(20) - pseudomatch.start(1) + elif pseudomatch[22] is not None: + fstring_start = pseudomatch[22] + offset = pseudomatch.end(22) - pseudomatch.start(1) + elif pseudomatch[24] is not None: + fstring_start = pseudomatch[24] + offset = pseudomatch.end(24) - pseudomatch.start(1) + else: + fstring_start = pseudomatch[26] + offset = pseudomatch.end(26) - pseudomatch.start(1) + + start_epos = (lnum, start + offset) + yield (FSTRING_START, fstring_start, spos, start_epos, line) + fstring_state.enter_fstring() + endprog = endprogs[fstring_start] + endprog_stack.append(endprog) + parenlev_stack.append(parenlev) + parenlev = 0 + + end_offset = pseudomatch.end(1) - 1 + fstring_middle = line[start + offset : end_offset] + middle_spos = (lnum, start + offset) + middle_epos = (lnum, end_offset) + yield ( + FSTRING_MIDDLE, + fstring_middle, + middle_spos, + middle_epos, + line, + ) + if not token.endswith("{"): + end_spos = (lnum, end_offset) + end_epos = (lnum, end_offset + 1) + yield (FSTRING_END, token[-1], end_spos, end_epos, line) + fstring_state.leave_fstring() + endprog_stack.pop() + parenlev = parenlev_stack.pop() + else: + end_spos = (lnum, end_offset) + end_epos = (lnum, end_offset + 1) + yield (LBRACE, "{", end_spos, end_epos, line) + fstring_state.consume_lbrace() + + elif initial.isidentifier(): # ordinary name + if token in ("async", "await"): + if async_keywords or async_def: + yield ( + ASYNC if token == "async" else AWAIT, + token, + spos, + epos, + line, + ) + continue + + tok = (NAME, token, spos, epos, line) + if token == "async" and not stashed: + stashed = tok + continue + + if token in ("def", "for"): + if stashed and stashed[0] == NAME and stashed[1] == "async": + if token == "def": + async_def = True + async_def_indent = indents[-1] + + yield ( + ASYNC, + stashed[1], + stashed[2], + stashed[3], + stashed[4], + ) + stashed = None + + if stashed: + yield stashed + stashed = None + + yield tok + elif initial == "\\": # continued stmt + # This yield is new; needed for better idempotency: + if stashed: + yield stashed + stashed = None + yield (NL, token, spos, (lnum, pos), line) + continued = 1 + elif ( + initial == "}" + and parenlev == 0 + and fstring_state.is_in_fstring_expression() + ): + yield (RBRACE, token, spos, epos, line) + fstring_state.consume_rbrace() + formatspec_start = epos + else: + if initial in "([{": + parenlev += 1 + elif initial in ")]}": + parenlev -= 1 + if stashed: + yield stashed + stashed = None + yield (OP, token, spos, epos, line) + else: + yield (ERRORTOKEN, line[pos], (lnum, pos), (lnum, pos + 1), line) + pos += 1 + + if stashed: + yield stashed + stashed = None + + for _indent in indents[1:]: # pop remaining indent levels + yield (DEDENT, "", (lnum, 0), (lnum, 0), "") + yield (ENDMARKER, "", (lnum, 0), (lnum, 0), "") + assert len(endprog_stack) == 0 + assert len(parenlev_stack) == 0 + + +if __name__ == "__main__": # testing + if len(sys.argv) > 1: + tokenize(open(sys.argv[1]).readline) + else: + tokenize(sys.stdin.readline) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pygram.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pygram.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..463702c71 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pygram.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pygram.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pygram.py new file mode 100644 index 000000000..70a5684bb --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pygram.py @@ -0,0 +1,204 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Export the Python grammar and symbols.""" + +# Python imports +import os +from typing import Union + +# Local imports +from .pgen2 import driver +from .pgen2.grammar import Grammar + +# Moved into initialize because mypyc can't handle __file__ (XXX bug) +# # The grammar file +# _GRAMMAR_FILE = os.path.join(os.path.dirname(__file__), "Grammar.txt") +# _PATTERN_GRAMMAR_FILE = os.path.join(os.path.dirname(__file__), +# "PatternGrammar.txt") + + +class Symbols: + def __init__(self, grammar: Grammar) -> None: + """Initializer. + + Creates an attribute for each grammar symbol (nonterminal), + whose value is the symbol's type (an int >= 256). + """ + for name, symbol in grammar.symbol2number.items(): + setattr(self, name, symbol) + + +class _python_symbols(Symbols): + and_expr: int + and_test: int + annassign: int + arglist: int + argument: int + arith_expr: int + asexpr_test: int + assert_stmt: int + async_funcdef: int + async_stmt: int + atom: int + augassign: int + break_stmt: int + case_block: int + classdef: int + comp_for: int + comp_if: int + comp_iter: int + comp_op: int + comparison: int + compound_stmt: int + continue_stmt: int + decorated: int + decorator: int + decorators: int + del_stmt: int + dictsetmaker: int + dotted_as_name: int + dotted_as_names: int + dotted_name: int + encoding_decl: int + eval_input: int + except_clause: int + expr: int + expr_stmt: int + exprlist: int + factor: int + file_input: int + flow_stmt: int + for_stmt: int + fstring: int + fstring_format_spec: int + fstring_middle: int + fstring_replacement_field: int + funcdef: int + global_stmt: int + guard: int + if_stmt: int + import_as_name: int + import_as_names: int + import_from: int + import_name: int + import_stmt: int + lambdef: int + listmaker: int + match_stmt: int + namedexpr_test: int + not_test: int + old_comp_for: int + old_comp_if: int + old_comp_iter: int + old_lambdef: int + old_test: int + or_test: int + parameters: int + paramspec: int + pass_stmt: int + pattern: int + patterns: int + power: int + raise_stmt: int + return_stmt: int + shift_expr: int + simple_stmt: int + single_input: int + sliceop: int + small_stmt: int + subject_expr: int + star_expr: int + stmt: int + subscript: int + subscriptlist: int + suite: int + term: int + test: int + testlist: int + testlist1: int + testlist_gexp: int + testlist_safe: int + testlist_star_expr: int + tfpdef: int + tfplist: int + tname: int + tname_star: int + trailer: int + try_stmt: int + type_stmt: int + typedargslist: int + typeparam: int + typeparams: int + typevar: int + typevartuple: int + varargslist: int + vfpdef: int + vfplist: int + vname: int + while_stmt: int + with_stmt: int + xor_expr: int + yield_arg: int + yield_expr: int + yield_stmt: int + + +class _pattern_symbols(Symbols): + Alternative: int + Alternatives: int + Details: int + Matcher: int + NegatedUnit: int + Repeater: int + Unit: int + + +python_grammar: Grammar +python_grammar_async_keywords: Grammar +python_grammar_soft_keywords: Grammar +pattern_grammar: Grammar +python_symbols: _python_symbols +pattern_symbols: _pattern_symbols + + +def initialize(cache_dir: Union[str, "os.PathLike[str]", None] = None) -> None: + global python_grammar + global python_grammar_async_keywords + global python_grammar_soft_keywords + global python_symbols + global pattern_grammar + global pattern_symbols + + # The grammar file + _GRAMMAR_FILE = os.path.join(os.path.dirname(__file__), "Grammar.txt") + _PATTERN_GRAMMAR_FILE = os.path.join( + os.path.dirname(__file__), "PatternGrammar.txt" + ) + + python_grammar = driver.load_packaged_grammar("blib2to3", _GRAMMAR_FILE, cache_dir) + assert "print" not in python_grammar.keywords + assert "exec" not in python_grammar.keywords + + soft_keywords = python_grammar.soft_keywords.copy() + python_grammar.soft_keywords.clear() + + python_symbols = _python_symbols(python_grammar) + + # Python 3.0-3.6 + python_grammar.version = (3, 0) + + # Python 3.7+ + python_grammar_async_keywords = python_grammar.copy() + python_grammar_async_keywords.async_keywords = True + python_grammar_async_keywords.version = (3, 7) + + # Python 3.10+ + python_grammar_soft_keywords = python_grammar_async_keywords.copy() + python_grammar_soft_keywords.soft_keywords = soft_keywords + python_grammar_soft_keywords.version = (3, 10) + + pattern_grammar = driver.load_packaged_grammar( + "blib2to3", _PATTERN_GRAMMAR_FILE, cache_dir + ) + pattern_symbols = _pattern_symbols(pattern_grammar) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pytree.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pytree.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..133614212 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pytree.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pytree.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pytree.py new file mode 100644 index 000000000..4c55d7ac7 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/blib2to3/pytree.py @@ -0,0 +1,986 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +""" +Python parse tree definitions. + +This is a very concrete parse tree; we need to keep every token and +even the comments and whitespace between tokens. + +There's also a pattern matching implementation here. +""" + +# mypy: allow-untyped-defs, allow-incomplete-defs + +from typing import ( + Any, + Dict, + Iterable, + Iterator, + List, + Optional, + Set, + Tuple, + TypeVar, + Union, +) + +from blib2to3.pgen2.grammar import Grammar + +__author__ = "Guido van Rossum " + +import sys +from io import StringIO + +HUGE: int = 0x7FFFFFFF # maximum repeat count, default max + +_type_reprs: Dict[int, Union[str, int]] = {} + + +def type_repr(type_num: int) -> Union[str, int]: + global _type_reprs + if not _type_reprs: + from . import pygram + + if not hasattr(pygram, "python_symbols"): + pygram.initialize(cache_dir=None) + + # printing tokens is possible but not as useful + # from .pgen2 import token // token.__dict__.items(): + for name in dir(pygram.python_symbols): + val = getattr(pygram.python_symbols, name) + if type(val) == int: + _type_reprs[val] = name + return _type_reprs.setdefault(type_num, type_num) + + +_P = TypeVar("_P", bound="Base") + +NL = Union["Node", "Leaf"] +Context = Tuple[str, Tuple[int, int]] +RawNode = Tuple[int, Optional[str], Optional[Context], Optional[List[NL]]] + + +class Base: + """ + Abstract base class for Node and Leaf. + + This provides some default functionality and boilerplate using the + template pattern. + + A node may be a subnode of at most one parent. + """ + + # Default values for instance variables + type: int # int: token number (< 256) or symbol number (>= 256) + parent: Optional["Node"] = None # Parent node pointer, or None + children: List[NL] # List of subnodes + was_changed: bool = False + was_checked: bool = False + + def __new__(cls, *args, **kwds): + """Constructor that prevents Base from being instantiated.""" + assert cls is not Base, "Cannot instantiate Base" + return object.__new__(cls) + + def __eq__(self, other: Any) -> bool: + """ + Compare two nodes for equality. + + This calls the method _eq(). + """ + if self.__class__ is not other.__class__: + return NotImplemented + return self._eq(other) + + @property + def prefix(self) -> str: + raise NotImplementedError + + def _eq(self: _P, other: _P) -> bool: + """ + Compare two nodes for equality. + + This is called by __eq__ and __ne__. It is only called if the two nodes + have the same type. This must be implemented by the concrete subclass. + Nodes should be considered equal if they have the same structure, + ignoring the prefix string and other context information. + """ + raise NotImplementedError + + def __deepcopy__(self: _P, memo: Any) -> _P: + return self.clone() + + def clone(self: _P) -> _P: + """ + Return a cloned (deep) copy of self. + + This must be implemented by the concrete subclass. + """ + raise NotImplementedError + + def post_order(self) -> Iterator[NL]: + """ + Return a post-order iterator for the tree. + + This must be implemented by the concrete subclass. + """ + raise NotImplementedError + + def pre_order(self) -> Iterator[NL]: + """ + Return a pre-order iterator for the tree. + + This must be implemented by the concrete subclass. + """ + raise NotImplementedError + + def replace(self, new: Union[NL, List[NL]]) -> None: + """Replace this node with a new one in the parent.""" + assert self.parent is not None, str(self) + assert new is not None + if not isinstance(new, list): + new = [new] + l_children = [] + found = False + for ch in self.parent.children: + if ch is self: + assert not found, (self.parent.children, self, new) + if new is not None: + l_children.extend(new) + found = True + else: + l_children.append(ch) + assert found, (self.children, self, new) + self.parent.children = l_children + self.parent.changed() + self.parent.invalidate_sibling_maps() + for x in new: + x.parent = self.parent + self.parent = None + + def get_lineno(self) -> Optional[int]: + """Return the line number which generated the invocant node.""" + node = self + while not isinstance(node, Leaf): + if not node.children: + return None + node = node.children[0] + return node.lineno + + def changed(self) -> None: + if self.was_changed: + return + if self.parent: + self.parent.changed() + self.was_changed = True + + def remove(self) -> Optional[int]: + """ + Remove the node from the tree. Returns the position of the node in its + parent's children before it was removed. + """ + if self.parent: + for i, node in enumerate(self.parent.children): + if node is self: + del self.parent.children[i] + self.parent.changed() + self.parent.invalidate_sibling_maps() + self.parent = None + return i + return None + + @property + def next_sibling(self) -> Optional[NL]: + """ + The node immediately following the invocant in their parent's children + list. If the invocant does not have a next sibling, it is None + """ + if self.parent is None: + return None + + if self.parent.next_sibling_map is None: + self.parent.update_sibling_maps() + assert self.parent.next_sibling_map is not None + return self.parent.next_sibling_map[id(self)] + + @property + def prev_sibling(self) -> Optional[NL]: + """ + The node immediately preceding the invocant in their parent's children + list. If the invocant does not have a previous sibling, it is None. + """ + if self.parent is None: + return None + + if self.parent.prev_sibling_map is None: + self.parent.update_sibling_maps() + assert self.parent.prev_sibling_map is not None + return self.parent.prev_sibling_map[id(self)] + + def leaves(self) -> Iterator["Leaf"]: + for child in self.children: + yield from child.leaves() + + def depth(self) -> int: + if self.parent is None: + return 0 + return 1 + self.parent.depth() + + def get_suffix(self) -> str: + """ + Return the string immediately following the invocant node. This is + effectively equivalent to node.next_sibling.prefix + """ + next_sib = self.next_sibling + if next_sib is None: + return "" + prefix = next_sib.prefix + return prefix + + +class Node(Base): + """Concrete implementation for interior nodes.""" + + fixers_applied: Optional[List[Any]] + used_names: Optional[Set[str]] + + def __init__( + self, + type: int, + children: List[NL], + context: Optional[Any] = None, + prefix: Optional[str] = None, + fixers_applied: Optional[List[Any]] = None, + ) -> None: + """ + Initializer. + + Takes a type constant (a symbol number >= 256), a sequence of + child nodes, and an optional context keyword argument. + + As a side effect, the parent pointers of the children are updated. + """ + assert type >= 256, type + self.type = type + self.children = list(children) + for ch in self.children: + assert ch.parent is None, repr(ch) + ch.parent = self + self.invalidate_sibling_maps() + if prefix is not None: + self.prefix = prefix + if fixers_applied: + self.fixers_applied = fixers_applied[:] + else: + self.fixers_applied = None + + def __repr__(self) -> str: + """Return a canonical string representation.""" + assert self.type is not None + return "{}({}, {!r})".format( + self.__class__.__name__, + type_repr(self.type), + self.children, + ) + + def __str__(self) -> str: + """ + Return a pretty string representation. + + This reproduces the input source exactly. + """ + return "".join(map(str, self.children)) + + def _eq(self, other: Base) -> bool: + """Compare two nodes for equality.""" + return (self.type, self.children) == (other.type, other.children) + + def clone(self) -> "Node": + assert self.type is not None + """Return a cloned (deep) copy of self.""" + return Node( + self.type, + [ch.clone() for ch in self.children], + fixers_applied=self.fixers_applied, + ) + + def post_order(self) -> Iterator[NL]: + """Return a post-order iterator for the tree.""" + for child in self.children: + yield from child.post_order() + yield self + + def pre_order(self) -> Iterator[NL]: + """Return a pre-order iterator for the tree.""" + yield self + for child in self.children: + yield from child.pre_order() + + @property + def prefix(self) -> str: + """ + The whitespace and comments preceding this node in the input. + """ + if not self.children: + return "" + return self.children[0].prefix + + @prefix.setter + def prefix(self, prefix: str) -> None: + if self.children: + self.children[0].prefix = prefix + + def set_child(self, i: int, child: NL) -> None: + """ + Equivalent to 'node.children[i] = child'. This method also sets the + child's parent attribute appropriately. + """ + child.parent = self + self.children[i].parent = None + self.children[i] = child + self.changed() + self.invalidate_sibling_maps() + + def insert_child(self, i: int, child: NL) -> None: + """ + Equivalent to 'node.children.insert(i, child)'. This method also sets + the child's parent attribute appropriately. + """ + child.parent = self + self.children.insert(i, child) + self.changed() + self.invalidate_sibling_maps() + + def append_child(self, child: NL) -> None: + """ + Equivalent to 'node.children.append(child)'. This method also sets the + child's parent attribute appropriately. + """ + child.parent = self + self.children.append(child) + self.changed() + self.invalidate_sibling_maps() + + def invalidate_sibling_maps(self) -> None: + self.prev_sibling_map: Optional[Dict[int, Optional[NL]]] = None + self.next_sibling_map: Optional[Dict[int, Optional[NL]]] = None + + def update_sibling_maps(self) -> None: + _prev: Dict[int, Optional[NL]] = {} + _next: Dict[int, Optional[NL]] = {} + self.prev_sibling_map = _prev + self.next_sibling_map = _next + previous: Optional[NL] = None + for current in self.children: + _prev[id(current)] = previous + _next[id(previous)] = current + previous = current + _next[id(current)] = None + + +class Leaf(Base): + """Concrete implementation for leaf nodes.""" + + # Default values for instance variables + value: str + fixers_applied: List[Any] + bracket_depth: int + # Changed later in brackets.py + opening_bracket: Optional["Leaf"] = None + used_names: Optional[Set[str]] + _prefix = "" # Whitespace and comments preceding this token in the input + lineno: int = 0 # Line where this token starts in the input + column: int = 0 # Column where this token starts in the input + # If not None, this Leaf is created by converting a block of fmt off/skip + # code, and `fmt_pass_converted_first_leaf` points to the first Leaf in the + # converted code. + fmt_pass_converted_first_leaf: Optional["Leaf"] = None + + def __init__( + self, + type: int, + value: str, + context: Optional[Context] = None, + prefix: Optional[str] = None, + fixers_applied: List[Any] = [], + opening_bracket: Optional["Leaf"] = None, + fmt_pass_converted_first_leaf: Optional["Leaf"] = None, + ) -> None: + """ + Initializer. + + Takes a type constant (a token number < 256), a string value, and an + optional context keyword argument. + """ + + assert 0 <= type < 256, type + if context is not None: + self._prefix, (self.lineno, self.column) = context + self.type = type + self.value = value + if prefix is not None: + self._prefix = prefix + self.fixers_applied: Optional[List[Any]] = fixers_applied[:] + self.children = [] + self.opening_bracket = opening_bracket + self.fmt_pass_converted_first_leaf = fmt_pass_converted_first_leaf + + def __repr__(self) -> str: + """Return a canonical string representation.""" + from .pgen2.token import tok_name + + assert self.type is not None + return "{}({}, {!r})".format( + self.__class__.__name__, + tok_name.get(self.type, self.type), + self.value, + ) + + def __str__(self) -> str: + """ + Return a pretty string representation. + + This reproduces the input source exactly. + """ + return self._prefix + str(self.value) + + def _eq(self, other: "Leaf") -> bool: + """Compare two nodes for equality.""" + return (self.type, self.value) == (other.type, other.value) + + def clone(self) -> "Leaf": + assert self.type is not None + """Return a cloned (deep) copy of self.""" + return Leaf( + self.type, + self.value, + (self.prefix, (self.lineno, self.column)), + fixers_applied=self.fixers_applied, + ) + + def leaves(self) -> Iterator["Leaf"]: + yield self + + def post_order(self) -> Iterator["Leaf"]: + """Return a post-order iterator for the tree.""" + yield self + + def pre_order(self) -> Iterator["Leaf"]: + """Return a pre-order iterator for the tree.""" + yield self + + @property + def prefix(self) -> str: + """ + The whitespace and comments preceding this token in the input. + """ + return self._prefix + + @prefix.setter + def prefix(self, prefix: str) -> None: + self.changed() + self._prefix = prefix + + +def convert(gr: Grammar, raw_node: RawNode) -> NL: + """ + Convert raw node information to a Node or Leaf instance. + + This is passed to the parser driver which calls it whenever a reduction of a + grammar rule produces a new complete node, so that the tree is build + strictly bottom-up. + """ + type, value, context, children = raw_node + if children or type in gr.number2symbol: + # If there's exactly one child, return that child instead of + # creating a new node. + assert children is not None + if len(children) == 1: + return children[0] + return Node(type, children, context=context) + else: + return Leaf(type, value or "", context=context) + + +_Results = Dict[str, NL] + + +class BasePattern: + """ + A pattern is a tree matching pattern. + + It looks for a specific node type (token or symbol), and + optionally for a specific content. + + This is an abstract base class. There are three concrete + subclasses: + + - LeafPattern matches a single leaf node; + - NodePattern matches a single node (usually non-leaf); + - WildcardPattern matches a sequence of nodes of variable length. + """ + + # Defaults for instance variables + type: Optional[int] + type = None # Node type (token if < 256, symbol if >= 256) + content: Any = None # Optional content matching pattern + name: Optional[str] = None # Optional name used to store match in results dict + + def __new__(cls, *args, **kwds): + """Constructor that prevents BasePattern from being instantiated.""" + assert cls is not BasePattern, "Cannot instantiate BasePattern" + return object.__new__(cls) + + def __repr__(self) -> str: + assert self.type is not None + args = [type_repr(self.type), self.content, self.name] + while args and args[-1] is None: + del args[-1] + return "{}({})".format(self.__class__.__name__, ", ".join(map(repr, args))) + + def _submatch(self, node, results=None) -> bool: + raise NotImplementedError + + def optimize(self) -> "BasePattern": + """ + A subclass can define this as a hook for optimizations. + + Returns either self or another node with the same effect. + """ + return self + + def match(self, node: NL, results: Optional[_Results] = None) -> bool: + """ + Does this pattern exactly match a node? + + Returns True if it matches, False if not. + + If results is not None, it must be a dict which will be + updated with the nodes matching named subpatterns. + + Default implementation for non-wildcard patterns. + """ + if self.type is not None and node.type != self.type: + return False + if self.content is not None: + r: Optional[_Results] = None + if results is not None: + r = {} + if not self._submatch(node, r): + return False + if r: + assert results is not None + results.update(r) + if results is not None and self.name: + results[self.name] = node + return True + + def match_seq(self, nodes: List[NL], results: Optional[_Results] = None) -> bool: + """ + Does this pattern exactly match a sequence of nodes? + + Default implementation for non-wildcard patterns. + """ + if len(nodes) != 1: + return False + return self.match(nodes[0], results) + + def generate_matches(self, nodes: List[NL]) -> Iterator[Tuple[int, _Results]]: + """ + Generator yielding all matches for this pattern. + + Default implementation for non-wildcard patterns. + """ + r: _Results = {} + if nodes and self.match(nodes[0], r): + yield 1, r + + +class LeafPattern(BasePattern): + def __init__( + self, + type: Optional[int] = None, + content: Optional[str] = None, + name: Optional[str] = None, + ) -> None: + """ + Initializer. Takes optional type, content, and name. + + The type, if given must be a token type (< 256). If not given, + this matches any *leaf* node; the content may still be required. + + The content, if given, must be a string. + + If a name is given, the matching node is stored in the results + dict under that key. + """ + if type is not None: + assert 0 <= type < 256, type + if content is not None: + assert isinstance(content, str), repr(content) + self.type = type + self.content = content + self.name = name + + def match(self, node: NL, results=None) -> bool: + """Override match() to insist on a leaf node.""" + if not isinstance(node, Leaf): + return False + return BasePattern.match(self, node, results) + + def _submatch(self, node, results=None): + """ + Match the pattern's content to the node's children. + + This assumes the node type matches and self.content is not None. + + Returns True if it matches, False if not. + + If results is not None, it must be a dict which will be + updated with the nodes matching named subpatterns. + + When returning False, the results dict may still be updated. + """ + return self.content == node.value + + +class NodePattern(BasePattern): + wildcards: bool = False + + def __init__( + self, + type: Optional[int] = None, + content: Optional[Iterable[str]] = None, + name: Optional[str] = None, + ) -> None: + """ + Initializer. Takes optional type, content, and name. + + The type, if given, must be a symbol type (>= 256). If the + type is None this matches *any* single node (leaf or not), + except if content is not None, in which it only matches + non-leaf nodes that also match the content pattern. + + The content, if not None, must be a sequence of Patterns that + must match the node's children exactly. If the content is + given, the type must not be None. + + If a name is given, the matching node is stored in the results + dict under that key. + """ + if type is not None: + assert type >= 256, type + if content is not None: + assert not isinstance(content, str), repr(content) + newcontent = list(content) + for i, item in enumerate(newcontent): + assert isinstance(item, BasePattern), (i, item) + # I don't even think this code is used anywhere, but it does cause + # unreachable errors from mypy. This function's signature does look + # odd though *shrug*. + if isinstance(item, WildcardPattern): # type: ignore[unreachable] + self.wildcards = True # type: ignore[unreachable] + self.type = type + self.content = newcontent # TODO: this is unbound when content is None + self.name = name + + def _submatch(self, node, results=None) -> bool: + """ + Match the pattern's content to the node's children. + + This assumes the node type matches and self.content is not None. + + Returns True if it matches, False if not. + + If results is not None, it must be a dict which will be + updated with the nodes matching named subpatterns. + + When returning False, the results dict may still be updated. + """ + if self.wildcards: + for c, r in generate_matches(self.content, node.children): + if c == len(node.children): + if results is not None: + results.update(r) + return True + return False + if len(self.content) != len(node.children): + return False + for subpattern, child in zip(self.content, node.children): + if not subpattern.match(child, results): + return False + return True + + +class WildcardPattern(BasePattern): + """ + A wildcard pattern can match zero or more nodes. + + This has all the flexibility needed to implement patterns like: + + .* .+ .? .{m,n} + (a b c | d e | f) + (...)* (...)+ (...)? (...){m,n} + + except it always uses non-greedy matching. + """ + + min: int + max: int + + def __init__( + self, + content: Optional[str] = None, + min: int = 0, + max: int = HUGE, + name: Optional[str] = None, + ) -> None: + """ + Initializer. + + Args: + content: optional sequence of subsequences of patterns; + if absent, matches one node; + if present, each subsequence is an alternative [*] + min: optional minimum number of times to match, default 0 + max: optional maximum number of times to match, default HUGE + name: optional name assigned to this match + + [*] Thus, if content is [[a, b, c], [d, e], [f, g, h]] this is + equivalent to (a b c | d e | f g h); if content is None, + this is equivalent to '.' in regular expression terms. + The min and max parameters work as follows: + min=0, max=maxint: .* + min=1, max=maxint: .+ + min=0, max=1: .? + min=1, max=1: . + If content is not None, replace the dot with the parenthesized + list of alternatives, e.g. (a b c | d e | f g h)* + """ + assert 0 <= min <= max <= HUGE, (min, max) + if content is not None: + f = lambda s: tuple(s) + wrapped_content = tuple(map(f, content)) # Protect against alterations + # Check sanity of alternatives + assert len(wrapped_content), repr( + wrapped_content + ) # Can't have zero alternatives + for alt in wrapped_content: + assert len(alt), repr(alt) # Can have empty alternatives + self.content = wrapped_content + self.min = min + self.max = max + self.name = name + + def optimize(self) -> Any: + """Optimize certain stacked wildcard patterns.""" + subpattern = None + if ( + self.content is not None + and len(self.content) == 1 + and len(self.content[0]) == 1 + ): + subpattern = self.content[0][0] + if self.min == 1 and self.max == 1: + if self.content is None: + return NodePattern(name=self.name) + if subpattern is not None and self.name == subpattern.name: + return subpattern.optimize() + if ( + self.min <= 1 + and isinstance(subpattern, WildcardPattern) + and subpattern.min <= 1 + and self.name == subpattern.name + ): + return WildcardPattern( + subpattern.content, + self.min * subpattern.min, + self.max * subpattern.max, + subpattern.name, + ) + return self + + def match(self, node, results=None) -> bool: + """Does this pattern exactly match a node?""" + return self.match_seq([node], results) + + def match_seq(self, nodes, results=None) -> bool: + """Does this pattern exactly match a sequence of nodes?""" + for c, r in self.generate_matches(nodes): + if c == len(nodes): + if results is not None: + results.update(r) + if self.name: + results[self.name] = list(nodes) + return True + return False + + def generate_matches(self, nodes) -> Iterator[Tuple[int, _Results]]: + """ + Generator yielding matches for a sequence of nodes. + + Args: + nodes: sequence of nodes + + Yields: + (count, results) tuples where: + count: the match comprises nodes[:count]; + results: dict containing named submatches. + """ + if self.content is None: + # Shortcut for special case (see __init__.__doc__) + for count in range(self.min, 1 + min(len(nodes), self.max)): + r = {} + if self.name: + r[self.name] = nodes[:count] + yield count, r + elif self.name == "bare_name": + yield self._bare_name_matches(nodes) + else: + # The reason for this is that hitting the recursion limit usually + # results in some ugly messages about how RuntimeErrors are being + # ignored. We only have to do this on CPython, though, because other + # implementations don't have this nasty bug in the first place. + if hasattr(sys, "getrefcount"): + save_stderr = sys.stderr + sys.stderr = StringIO() + try: + for count, r in self._recursive_matches(nodes, 0): + if self.name: + r[self.name] = nodes[:count] + yield count, r + except RuntimeError: + # We fall back to the iterative pattern matching scheme if the recursive + # scheme hits the recursion limit. + for count, r in self._iterative_matches(nodes): + if self.name: + r[self.name] = nodes[:count] + yield count, r + finally: + if hasattr(sys, "getrefcount"): + sys.stderr = save_stderr + + def _iterative_matches(self, nodes) -> Iterator[Tuple[int, _Results]]: + """Helper to iteratively yield the matches.""" + nodelen = len(nodes) + if 0 >= self.min: + yield 0, {} + + results = [] + # generate matches that use just one alt from self.content + for alt in self.content: + for c, r in generate_matches(alt, nodes): + yield c, r + results.append((c, r)) + + # for each match, iterate down the nodes + while results: + new_results = [] + for c0, r0 in results: + # stop if the entire set of nodes has been matched + if c0 < nodelen and c0 <= self.max: + for alt in self.content: + for c1, r1 in generate_matches(alt, nodes[c0:]): + if c1 > 0: + r = {} + r.update(r0) + r.update(r1) + yield c0 + c1, r + new_results.append((c0 + c1, r)) + results = new_results + + def _bare_name_matches(self, nodes) -> Tuple[int, _Results]: + """Special optimized matcher for bare_name.""" + count = 0 + r = {} # type: _Results + done = False + max = len(nodes) + while not done and count < max: + done = True + for leaf in self.content: + if leaf[0].match(nodes[count], r): + count += 1 + done = False + break + assert self.name is not None + r[self.name] = nodes[:count] + return count, r + + def _recursive_matches(self, nodes, count) -> Iterator[Tuple[int, _Results]]: + """Helper to recursively yield the matches.""" + assert self.content is not None + if count >= self.min: + yield 0, {} + if count < self.max: + for alt in self.content: + for c0, r0 in generate_matches(alt, nodes): + for c1, r1 in self._recursive_matches(nodes[c0:], count + 1): + r = {} + r.update(r0) + r.update(r1) + yield c0 + c1, r + + +class NegatedPattern(BasePattern): + def __init__(self, content: Optional[BasePattern] = None) -> None: + """ + Initializer. + + The argument is either a pattern or None. If it is None, this + only matches an empty sequence (effectively '$' in regex + lingo). If it is not None, this matches whenever the argument + pattern doesn't have any matches. + """ + if content is not None: + assert isinstance(content, BasePattern), repr(content) + self.content = content + + def match(self, node, results=None) -> bool: + # We never match a node in its entirety + return False + + def match_seq(self, nodes, results=None) -> bool: + # We only match an empty sequence of nodes in its entirety + return len(nodes) == 0 + + def generate_matches(self, nodes: List[NL]) -> Iterator[Tuple[int, _Results]]: + if self.content is None: + # Return a match if there is an empty sequence + if len(nodes) == 0: + yield 0, {} + else: + # Return a match if the argument pattern has no matches + for c, r in self.content.generate_matches(nodes): + return + yield 0, {} + + +def generate_matches( + patterns: List[BasePattern], nodes: List[NL] +) -> Iterator[Tuple[int, _Results]]: + """ + Generator yielding matches for a sequence of patterns and nodes. + + Args: + patterns: a sequence of patterns + nodes: a sequence of nodes + + Yields: + (count, results) tuples where: + count: the entire sequence of patterns matches nodes[:count]; + results: dict containing named submatches. + """ + if not patterns: + yield 0, {} + else: + p, rest = patterns[0], patterns[1:] + for c0, r0 in p.generate_matches(nodes): + if not rest: + yield c0, r0 + else: + for c1, r1 in generate_matches(rest, nodes[c0:]): + r = {} + r.update(r0) + r.update(r1) + yield c0 + c1, r diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi-2024.2.2.dist-info/INSTALLER b/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi-2024.2.2.dist-info/INSTALLER new file mode 100644 index 000000000..ce3133dc1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi-2024.2.2.dist-info/INSTALLER @@ -0,0 +1 @@ +Poetry 2.2.1 \ No newline at end of file diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi-2024.2.2.dist-info/LICENSE b/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi-2024.2.2.dist-info/LICENSE new file mode 100644 index 000000000..62b076cde --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi-2024.2.2.dist-info/LICENSE @@ -0,0 +1,20 @@ +This package contains a modified version of ca-bundle.crt: + +ca-bundle.crt -- Bundle of CA Root Certificates + +This is a bundle of X.509 certificates of public Certificate Authorities +(CA). These were automatically extracted from Mozilla's root certificates +file (certdata.txt). This file can be found in the mozilla source tree: +https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt +It contains the certificates in PEM format and therefore +can be directly used with curl / libcurl / php_curl, or with +an Apache+mod_ssl webserver for SSL client authentication. +Just configure this file as the SSLCACertificateFile.# + +***** BEGIN LICENSE BLOCK ***** +This Source Code Form is subject to the terms of the Mozilla Public License, +v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at http://mozilla.org/MPL/2.0/. + +***** END LICENSE BLOCK ***** +@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi-2024.2.2.dist-info/METADATA b/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi-2024.2.2.dist-info/METADATA new file mode 100644 index 000000000..c688a6284 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi-2024.2.2.dist-info/METADATA @@ -0,0 +1,66 @@ +Metadata-Version: 2.1 +Name: certifi +Version: 2024.2.2 +Summary: Python package for providing Mozilla's CA Bundle. +Home-page: https://github.com/certifi/python-certifi +Author: Kenneth Reitz +Author-email: me@kennethreitz.com +License: MPL-2.0 +Project-URL: Source, https://github.com/certifi/python-certifi +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) +Classifier: Natural Language :: English +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Requires-Python: >=3.6 +License-File: LICENSE + +Certifi: Python SSL Certificates +================================ + +Certifi provides Mozilla's carefully curated collection of Root Certificates for +validating the trustworthiness of SSL certificates while verifying the identity +of TLS hosts. It has been extracted from the `Requests`_ project. + +Installation +------------ + +``certifi`` is available on PyPI. Simply install it with ``pip``:: + + $ pip install certifi + +Usage +----- + +To reference the installed certificate authority (CA) bundle, you can use the +built-in function:: + + >>> import certifi + + >>> certifi.where() + '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' + +Or from the command line:: + + $ python -m certifi + /usr/local/lib/python3.7/site-packages/certifi/cacert.pem + +Enjoy! + +.. _`Requests`: https://requests.readthedocs.io/en/master/ + +Addition/Removal of Certificates +-------------------------------- + +Certifi does not support any addition/removal or other modification of the +CA trust store content. This project is intended to provide a reliable and +highly portable root of trust to python deployments. Look to upstream projects +for methods to use alternate trust. diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi-2024.2.2.dist-info/RECORD b/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi-2024.2.2.dist-info/RECORD new file mode 100644 index 000000000..7ad9e8014 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi-2024.2.2.dist-info/RECORD @@ -0,0 +1,11 @@ +certifi/__init__.py,sha256=ljtEx-EmmPpTe2SOd5Kzsujm_lUD0fKJVnE9gzce320,94 +certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 +certifi/cacert.pem,sha256=ejR8qP724p-CtuR4U1WmY1wX-nVeCUD2XxWqj8e9f5I,292541 +certifi/core.py,sha256=qRDDFyXVJwTB_EmoGppaXU_R9qCZvhl-EzxPMuV3nTA,4426 +certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +certifi-2024.2.2.dist-info/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989 +certifi-2024.2.2.dist-info/METADATA,sha256=1noreLRChpOgeSj0uJT1mehiBl8ngh33Guc7KdvzYYM,2170 +certifi-2024.2.2.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92 +certifi-2024.2.2.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 +certifi-2024.2.2.dist-info/INSTALLER,sha256=sz0Tnp99msIbbLB51q7U9nA6AvRKcsOeUKhrHH0Ulg4,12 +certifi-2024.2.2.dist-info/RECORD,, diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi-2024.2.2.dist-info/WHEEL b/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi-2024.2.2.dist-info/WHEEL new file mode 100644 index 000000000..98c0d20b7 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi-2024.2.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.42.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi-2024.2.2.dist-info/top_level.txt b/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi-2024.2.2.dist-info/top_level.txt new file mode 100644 index 000000000..963eac530 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi-2024.2.2.dist-info/top_level.txt @@ -0,0 +1 @@ +certifi diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi/__init__.py new file mode 100644 index 000000000..1c91f3ec9 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi/__init__.py @@ -0,0 +1,4 @@ +from .core import contents, where + +__all__ = ["contents", "where"] +__version__ = "2024.02.02" diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi/__main__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi/__main__.py new file mode 100644 index 000000000..8945b5da8 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi/__main__.py @@ -0,0 +1,12 @@ +import argparse + +from certifi import contents, where + +parser = argparse.ArgumentParser() +parser.add_argument("-c", "--contents", action="store_true") +args = parser.parse_args() + +if args.contents: + print(contents()) +else: + print(where()) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi/cacert.pem b/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi/cacert.pem new file mode 100644 index 000000000..fac3c3190 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi/cacert.pem @@ -0,0 +1,4814 @@ + +# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Label: "GlobalSign Root CA" +# Serial: 4835703278459707669005204 +# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a +# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c +# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG +A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv +b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw +MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i +YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT +aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ +jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp +xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp +1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG +snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ +U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 +9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B +AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz +yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE +38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP +AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad +DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME +HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Premium 2048 Secure Server CA" +# Serial: 946069240 +# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 +# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 +# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML +RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp +bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 +IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 +MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 +LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp +YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG +A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq +K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe +sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX +MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT +XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ +HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH +4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub +j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo +U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf +zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b +u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ +bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er +fF6adulZkMV8gzURZVE= +-----END CERTIFICATE----- + +# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Label: "Baltimore CyberTrust Root" +# Serial: 33554617 +# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 +# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 +# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ +RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD +VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX +DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y +ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy +VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr +mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr +IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK +mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu +XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy +dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye +jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 +BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 +DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 +9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx +jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 +Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz +ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS +R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=AAA Certificate Services O=Comodo CA Limited +# Subject: CN=AAA Certificate Services O=Comodo CA Limited +# Label: "Comodo AAA Services root" +# Serial: 1 +# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 +# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 +# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 +-----BEGIN CERTIFICATE----- +MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj +YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM +GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua +BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe +3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 +YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR +rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm +ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU +oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF +MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v +QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t +b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF +AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q +GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz +Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 +G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi +l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 +smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2" +# Serial: 1289 +# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b +# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 +# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa +GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg +Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J +WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB +rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp ++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 +ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i +Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz +PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og +/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH +oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI +yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud +EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 +A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL +MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f +BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn +g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl +fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K +WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha +B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc +hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR +TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD +mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z +ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y +4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza +8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3" +# Serial: 1478 +# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf +# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 +# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM +V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB +4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr +H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd +8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv +vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT +mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe +btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc +T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt +WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ +c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A +4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD +VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG +CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 +aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu +dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw +czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G +A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg +Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 +7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem +d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd ++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B +4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN +t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x +DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 +k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s +zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j +Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT +mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK +4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Label: "XRamp Global CA Root" +# Serial: 107108908803651509692980124233745014957 +# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 +# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 +# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB +gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk +MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY +UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx +NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 +dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy +dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 +38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP +KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q +DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 +qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa +JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi +PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P +BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs +jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 +eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD +ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR +vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt +qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa +IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy +i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ +O+7ETPTsJ3xCwnR8gooJybQDJbw= +-----END CERTIFICATE----- + +# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Label: "Go Daddy Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 +# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 +# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 +-----BEGIN CERTIFICATE----- +MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh +MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE +YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 +MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo +ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg +MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN +ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA +PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w +wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi +EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY +avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ +YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE +sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h +/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 +IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD +ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy +OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P +TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ +HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER +dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf +ReYNnyicsbkqWletNw+vHX/bvZ8= +-----END CERTIFICATE----- + +# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Label: "Starfield Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 +# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a +# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 +-----BEGIN CERTIFICATE----- +MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl +MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp +U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw +NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE +ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp +ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 +DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf +8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN ++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 +X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa +K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA +1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G +A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR +zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 +YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD +bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w +DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 +L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D +eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl +xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp +VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY +WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Label: "SwissSign Gold CA - G2" +# Serial: 13492815561806991280 +# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 +# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 +# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln +biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF +MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT +d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 +76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ +bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c +6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE +emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd +MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt +MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y +MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y +FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi +aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM +gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB +qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 +lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn +8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 +45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO +UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 +O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC +bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv +GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a +77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC +hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 +92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp +Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w +ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt +Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Label: "SwissSign Silver CA - G2" +# Serial: 5700383053117599563 +# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 +# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb +# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 +-----BEGIN CERTIFICATE----- +MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE +BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu +IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow +RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY +U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A +MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv +Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br +YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF +nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH +6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt +eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ +c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ +MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH +HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf +jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 +5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB +rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c +wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 +cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB +AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp +WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 +xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ +2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ +IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 +aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X +em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR +dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ +OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ +hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy +tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u +-----END CERTIFICATE----- + +# Issuer: CN=SecureTrust CA O=SecureTrust Corporation +# Subject: CN=SecureTrust CA O=SecureTrust Corporation +# Label: "SecureTrust CA" +# Serial: 17199774589125277788362757014266862032 +# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 +# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 +# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz +MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv +cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz +Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO +0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao +wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj +7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS +8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT +BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg +JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 +6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ +3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm +D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS +CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= +-----END CERTIFICATE----- + +# Issuer: CN=Secure Global CA O=SecureTrust Corporation +# Subject: CN=Secure Global CA O=SecureTrust Corporation +# Label: "Secure Global CA" +# Serial: 9751836167731051554232119481456978597 +# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de +# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b +# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx +MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg +Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ +iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa +/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ +jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI +HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 +sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w +gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw +KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG +AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L +URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO +H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm +I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY +iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: CN=Certigna O=Dhimyotis +# Subject: CN=Certigna O=Dhimyotis +# Label: "Certigna" +# Serial: 18364802974209362175 +# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff +# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 +# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X +DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ +BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 +QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny +gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw +zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q +130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 +JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw +ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT +AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj +AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG +9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h +bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc +fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu +HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w +t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== +-----END CERTIFICATE----- + +# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Label: "ePKI Root Certification Authority" +# Serial: 28956088682735189655030529057352760477 +# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 +# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 +# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw +IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL +SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH +SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh +ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X +DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 +TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ +fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA +sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU +WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS +nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH +dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip +NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC +AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF +MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB +uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl +PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP +JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ +gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 +j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 +5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB +o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS +/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z +Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE +W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D +hNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +# Issuer: O=certSIGN OU=certSIGN ROOT CA +# Subject: O=certSIGN OU=certSIGN ROOT CA +# Label: "certSIGN ROOT CA" +# Serial: 35210227249154 +# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 +# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b +# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT +AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD +QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP +MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do +0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ +UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d +RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ +OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv +JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C +AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O +BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ +LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY +MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ +44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I +Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw +i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN +9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" +# Serial: 80544274841616 +# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 +# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 +# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG +EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 +MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl +cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR +dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB +pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM +b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm +aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz +IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT +lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz +AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 +VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG +ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 +BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG +AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M +U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh +bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C ++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F +uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 +XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Label: "SecureSign RootCA11" +# Serial: 1 +# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 +# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 +# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr +MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG +A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 +MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp +Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD +QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz +i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 +h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV +MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 +UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni +8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC +h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD +VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB +AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm +KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ +X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr +QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 +pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN +QSdJQO7e5iNEOdyhIta6A/I= +-----END CERTIFICATE----- + +# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Label: "Microsec e-Szigno Root CA 2009" +# Serial: 14014712776195784473 +# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 +# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e +# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD +VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 +ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G +CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y +OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx +FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp +Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP +kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc +cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U +fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 +N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC +xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 ++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM +Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG +SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h +mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk +ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c +2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t +HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=Izenpe.com O=IZENPE S.A. +# Subject: CN=Izenpe.com O=IZENPE S.A. +# Label: "Izenpe.com" +# Serial: 917563065490389241595536686991402621 +# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 +# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 +# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 +MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 +ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD +VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j +b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq +scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO +xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H +LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX +uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD +yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ +JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q +rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN +BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L +hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB +QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ +HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu +Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg +QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB +BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA +A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb +laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 +awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo +JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw +LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT +VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk +LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb +UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ +QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ +naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls +QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA" +# Serial: 279744 +# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 +# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e +# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM +MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D +ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU +cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 +WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg +Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw +IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH +UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM +TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU +BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM +kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x +AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV +HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y +sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL +I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 +J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY +VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Label: "TWCA Root Certification Authority" +# Serial: 1 +# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 +# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 +# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES +MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU +V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz +WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO +LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE +AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH +K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX +RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z +rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx +3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq +hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC +MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls +XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D +lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn +aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ +YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Label: "Security Communication RootCA2" +# Serial: 0 +# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 +# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 +# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX +DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy +dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj +YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV +OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr +zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM +VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ +hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO +ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw +awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs +OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 +DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF +coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc +okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 +t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy +1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ +SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Label: "Actalis Authentication Root CA" +# Serial: 6271844772424770508 +# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 +# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac +# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 +-----BEGIN CERTIFICATE----- +MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE +BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w +MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 +IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC +SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 +ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv +UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX +4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 +KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ +gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb +rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ +51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F +be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe +KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F +v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn +fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 +jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz +ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt +ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL +e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 +jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz +WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V +SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j +pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX +X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok +fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R +K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU +ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU +LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT +LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 2 Root CA" +# Serial: 2 +# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 +# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 +# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr +6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV +L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 +1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx +MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ +QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB +arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr +Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi +FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS +P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN +9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz +uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h +9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s +A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t +OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo ++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 +KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 +DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us +H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ +I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 +5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h +3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz +Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 3 Root CA" +# Serial: 2 +# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec +# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 +# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y +ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E +N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 +tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX +0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c +/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X +KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY +zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS +O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D +34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP +K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv +Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj +QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV +cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS +IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 +HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa +O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv +033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u +dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE +kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 +3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD +u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq +4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 3" +# Serial: 1 +# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef +# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 +# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN +8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ +RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 +hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 +ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM +EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 +A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy +WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ +1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 +6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT +91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml +e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p +TpPDpFQUWw== +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 2009" +# Serial: 623603 +# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f +# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 +# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 +-----BEGIN CERTIFICATE----- +MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha +ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM +HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 +UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 +tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R +ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM +lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp +/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G +A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G +A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj +dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy +MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl +cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js +L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL +BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni +acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 +o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K +zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 +PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y +Johw1+qRzT65ysCQblrGXnRl11z+o+I= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 EV 2009" +# Serial: 623604 +# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 +# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 +# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw +NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV +BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn +ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 +3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z +qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR +p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 +HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw +ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea +HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw +Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh +c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E +RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt +dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku +Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp +3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 +nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF +CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na +xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX +KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R2 O=Disig a.s. +# Subject: CN=CA Disig Root R2 O=Disig a.s. +# Label: "CA Disig Root R2" +# Serial: 10572350602393338211 +# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 +# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 +# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy +MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe +NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH +PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I +x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe +QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR +yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO +QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 +H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ +QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD +i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs +nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 +rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI +hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM +tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf +GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb +lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka ++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal +TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i +nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 +gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr +G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os +zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x +L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL +-----END CERTIFICATE----- + +# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Label: "ACCVRAIZ1" +# Serial: 6828503384748696800 +# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 +# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 +# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 +-----BEGIN CERTIFICATE----- +MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE +AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw +CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ +BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND +VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb +qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY +HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo +G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA +lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr +IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ +0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH +k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 +4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO +m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa +cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl +uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI +KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls +ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG +AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 +VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT +VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG +CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA +cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA +QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA +7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA +cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA +QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA +czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu +aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt +aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud +DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF +BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp +D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU +JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m +AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD +vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms +tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH +7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h +I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA +h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF +d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H +pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA Global Root CA" +# Serial: 3262 +# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 +# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 +# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx +EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT +VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 +NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT +B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF +10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz +0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh +MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH +zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc +46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 +yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi +laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP +oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA +BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE +qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm +4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL +1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn +LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF +H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo +RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ +nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh +15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW +6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW +nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j +wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz +aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy +KwbQBM0= +-----END CERTIFICATE----- + +# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Label: "TeliaSonera Root CA v1" +# Serial: 199041966741090107964904287217786801558 +# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c +# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 +# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw +NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv +b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD +VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F +VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 +7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X +Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ +/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs +81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm +dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe +Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu +sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 +pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs +slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ +arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD +VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG +9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl +dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj +TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed +Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 +Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI +OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 +vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW +t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn +HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx +SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 2" +# Serial: 1 +# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a +# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 +# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd +AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC +FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi +1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq +jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ +wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ +WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy +NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC +uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw +IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 +g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP +BSeOE6Fuwg== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot 2011 O=Atos +# Subject: CN=Atos TrustedRoot 2011 O=Atos +# Label: "Atos TrustedRoot 2011" +# Serial: 6643877497813316402 +# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 +# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 +# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE +AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG +EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM +FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC +REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp +Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM +VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ +SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ +4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L +cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi +eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG +A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 +DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j +vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP +DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc +maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D +lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv +KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 1 G3" +# Serial: 687049649626669250736271037606554624078720034195 +# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab +# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 +# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 +MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV +wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe +rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 +68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh +4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp +UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o +abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc +3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G +KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt +hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO +Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt +zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD +ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 +cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN +qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 +YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv +b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 +8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k +NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj +ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp +q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt +nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2 G3" +# Serial: 390156079458959257446133169266079962026824725800 +# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 +# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 +# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 +MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf +qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW +n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym +c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ +O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 +o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j +IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq +IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz +8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh +vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l +7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG +cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD +ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC +roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga +W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n +lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE ++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV +csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd +dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg +KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM +HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 +WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3 G3" +# Serial: 268090761170461462463995952157327242137089239581 +# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 +# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d +# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 +MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR +/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu +FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR +U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c +ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR +FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k +A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw +eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl +sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp +VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q +A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ +ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD +ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI +FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv +oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg +u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP +0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf +3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl +8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ +DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN +PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ +ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G2" +# Serial: 15385348160840213938643033620894905419 +# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d +# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f +# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA +n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc +biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp +EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA +bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu +YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW +BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI +QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I +0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni +lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 +B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv +ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +IhNzbM8m9Yop5w== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G3" +# Serial: 15459312981008553731928384953135426796 +# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb +# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 +# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg +RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf +Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q +RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD +AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY +JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv +6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G2" +# Serial: 4293743540046975378534879503202253541 +# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 +# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 +# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G3" +# Serial: 7089244469030293291760083333884364146 +# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca +# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e +# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe +Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw +EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x +IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG +fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO +Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd +BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx +AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ +oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 +sycX +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Trusted Root G4" +# Serial: 7451500558977370777930084869016614236 +# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 +# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 +# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg +RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y +ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If +xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV +ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO +DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ +jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ +CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi +EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM +fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY +uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK +chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t +9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 +SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd ++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc +fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa +sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N +cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N +0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie +4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI +r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 +/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm +gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ +-----END CERTIFICATE----- + +# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Label: "COMODO RSA Certification Authority" +# Serial: 101909084537582093308941363524873193117 +# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 +# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 +# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB +hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV +BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT +EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR +6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X +pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC +9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV +/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf +Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z ++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w +qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah +SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC +u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf +Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq +crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB +/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl +wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM +4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV +2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna +FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ +CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK +boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke +jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL +S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb +QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl +0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB +NVOFBkpdn627G190 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Label: "USERTrust RSA Certification Authority" +# Serial: 2645093764781058787591871645665788717 +# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 +# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e +# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB +iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl +cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV +BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw +MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV +BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B +3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY +tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ +Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 +VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT +79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 +c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT +Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l +c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee +UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE +Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G +A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF +Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO +VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 +ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs +8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR +iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze +Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ +XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ +qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB +VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB +L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG +jjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Label: "USERTrust ECC Certification Authority" +# Serial: 123013823720199481456569720443997572134 +# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 +# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 +# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl +eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT +JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg +VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G +A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB +zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW +RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Label: "GlobalSign ECC Root CA - R5" +# Serial: 32785792099990507226680698011560947931244 +# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 +# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa +# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc +8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke +hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI +KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg +515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO +xwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Label: "IdenTrust Commercial Root CA 1" +# Serial: 13298821034946342390520003877796839426 +# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 +# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 +# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu +VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw +MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw +JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT +3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU ++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp +S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 +bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi +T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL +vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK +Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK +dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT +c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv +l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N +iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD +ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt +LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 +nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 ++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK +W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT +AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq +l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG +4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ +mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A +7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Label: "IdenTrust Public Sector Root CA 1" +# Serial: 13298821034946342390521976156843933698 +# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba +# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd +# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu +VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN +MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 +MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 +ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy +RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS +bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF +/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R +3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw +EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy +9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V +GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ +2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV +WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD +W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN +AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV +DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 +TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G +lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW +mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df +WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 ++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ +tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA +GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv +8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G2" +# Serial: 1246989352 +# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 +# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 +# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 +-----BEGIN CERTIFICATE----- +MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 +cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs +IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz +dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy +NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu +dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt +dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 +aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T +RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN +cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW +wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 +U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 +jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN +BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ +jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ +Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v +1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R +nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH +VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - EC1" +# Serial: 51543124481930649114116133369 +# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc +# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 +# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 +-----BEGIN CERTIFICATE----- +MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG +A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 +d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu +dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq +RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy +MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD +VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 +L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g +Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi +A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt +ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH +Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC +R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX +hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G +-----END CERTIFICATE----- + +# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority +# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority +# Label: "CFCA EV ROOT" +# Serial: 407555286 +# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 +# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 +# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD +TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y +aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx +MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j +aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP +T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 +sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL +TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 +/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp +7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz +EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt +hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP +a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot +aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg +TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV +PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv +cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL +tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd +BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT +ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL +jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS +ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy +P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 +xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d +Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN +5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe +/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z +AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ +5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GB CA" +# Serial: 157768595616588414422159278966750757568 +# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d +# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed +# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 +-----BEGIN CERTIFICATE----- +MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt +MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg +Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i +YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x +CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG +b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh +bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 +HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx +WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX +1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk +u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P +99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r +M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB +BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh +cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 +gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO +ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf +aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic +Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= +-----END CERTIFICATE----- + +# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Label: "SZAFIR ROOT CA2" +# Serial: 357043034767186914217277344587386743377558296292 +# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 +# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de +# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 +ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw +NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L +cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg +Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN +QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT +3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw +3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 +3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 +BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN +XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF +AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw +8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG +nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP +oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy +d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg +LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA 2" +# Serial: 44979900017204383099463764357512596969 +# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 +# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 +# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 +-----BEGIN CERTIFICATE----- +MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB +gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu +QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG +A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz +OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ +VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 +b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA +DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn +0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB +OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE +fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E +Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m +o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i +sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW +OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez +Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS +adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n +3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ +F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf +CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 +XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm +djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ +WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb +AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq +P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko +b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj +XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P +5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi +DrW5viSP +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce +# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 +# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix +DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k +IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT +N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v +dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG +A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh +ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx +QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA +4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 +AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 +4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C +ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV +9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD +gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 +Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq +NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko +LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd +ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I +XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI +M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot +9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V +Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea +j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh +X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ +l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf +bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 +pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK +e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 +vm9qp/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef +# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 +# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN +BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl +bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv +b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ +BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj +YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 +MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 +dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg +QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa +jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi +C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep +lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof +TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X1 O=Internet Security Research Group +# Subject: CN=ISRG Root X1 O=Internet Security Research Group +# Label: "ISRG Root X1" +# Serial: 172886928669790476064670243504169061120 +# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e +# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 +# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- + +# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Label: "AC RAIZ FNMT-RCM" +# Serial: 485876308206448804701554682760554759 +# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d +# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 +# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx +CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ +WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ +BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG +Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ +yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf +BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz +WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF +tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z +374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC +IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL +mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 +wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS +MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 +ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet +UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H +YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 +LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD +nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 +RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM +LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf +77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N +JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm +fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp +6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp +1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B +9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok +RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv +uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 1 O=Amazon +# Subject: CN=Amazon Root CA 1 O=Amazon +# Label: "Amazon Root CA 1" +# Serial: 143266978916655856878034712317230054538369994 +# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 +# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 +# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e +-----BEGIN CERTIFICATE----- +MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj +ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM +9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw +IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 +VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L +93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm +jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA +A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI +U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs +N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv +o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU +5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy +rqXRfboQnoZsG4q5WTP468SQvvG5 +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 2 O=Amazon +# Subject: CN=Amazon Root CA 2 O=Amazon +# Label: "Amazon Root CA 2" +# Serial: 143266982885963551818349160658925006970653239 +# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 +# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a +# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK +gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ +W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg +1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K +8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r +2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me +z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR +8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj +mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz +7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 ++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI +0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm +UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 +LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY ++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS +k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl +7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm +btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl +urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ +fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 +n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE +76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H +9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT +4PsJYGw= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 3 O=Amazon +# Subject: CN=Amazon Root CA 3 O=Amazon +# Label: "Amazon Root CA 3" +# Serial: 143266986699090766294700635381230934788665930 +# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 +# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e +# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 +-----BEGIN CERTIFICATE----- +MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl +ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr +ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr +BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM +YyRIHN8wfdVoOw== +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 4 O=Amazon +# Subject: CN=Amazon Root CA 4 O=Amazon +# Label: "Amazon Root CA 4" +# Serial: 143266989758080763974105200630763877849284878 +# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd +# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be +# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 +-----BEGIN CERTIFICATE----- +MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi +9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk +M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB +MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw +CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW +1KyLa2tJElMzrdfkviT8tQp21KW8EA== +-----END CERTIFICATE----- + +# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" +# Serial: 1 +# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 +# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca +# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 +-----BEGIN CERTIFICATE----- +MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx +GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp +bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w +KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 +BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy +dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG +EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll +IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU +QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT +TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg +LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 +a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr +LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr +N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X +YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ +iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f +AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH +V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh +AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf +IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 +lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c +8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf +lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= +-----END CERTIFICATE----- + +# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Label: "GDCA TrustAUTH R5 ROOT" +# Serial: 9009899650740120186 +# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 +# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 +# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 +-----BEGIN CERTIFICATE----- +MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE +BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ +IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 +MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV +BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w +HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj +Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj +TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u +KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj +qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm +MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 +ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP +zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk +L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC +jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA +HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC +AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg +p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm +DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 +COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry +L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf +JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg +IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io +2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV +09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ +XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq +T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe +MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Label: "SSL.com Root Certification Authority RSA" +# Serial: 8875640296558310041 +# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 +# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb +# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 +-----BEGIN CERTIFICATE----- +MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE +BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK +DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz +OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv +bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R +xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX +qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC +C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 +6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh +/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF +YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E +JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc +US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 +ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm ++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi +M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G +A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV +cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc +Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs +PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ +q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 +cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr +a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I +H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y +K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu +nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf +oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY +Ic2wBlX7Jz9TkHCpBB5XJ7k= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com Root Certification Authority ECC" +# Serial: 8495723813297216424 +# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e +# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a +# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 +-----BEGIN CERTIFICATE----- +MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz +WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 +b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS +b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI +7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg +CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud +EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD +VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T +kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ +gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority RSA R2" +# Serial: 6248227494352943350 +# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 +# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a +# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c +-----BEGIN CERTIFICATE----- +MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV +BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE +CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy +MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G +A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD +DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq +M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf +OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa +4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 +HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR +aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA +b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ +Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV +PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO +pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu +UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY +MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV +HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 +9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW +s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 +Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg +cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM +79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz +/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt +ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm +Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK +QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ +w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi +S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 +mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority ECC" +# Serial: 3182246526754555285 +# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 +# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d +# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 +-----BEGIN CERTIFICATE----- +MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx +NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv +bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA +VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku +WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX +5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ +ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg +h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Label: "GlobalSign Root CA - R6" +# Serial: 1417766617973444989252670301619537 +# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae +# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 +# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg +MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx +MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET +MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI +xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k +ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD +aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw +LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw +1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX +k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 +SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h +bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n +WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY +rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce +MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu +bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN +nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt +Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 +55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj +vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf +cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz +oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp +nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs +pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v +JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R +8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 +5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GC CA" +# Serial: 44084345621038548146064804565436152554 +# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 +# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 +# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d +-----BEGIN CERTIFICATE----- +MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw +CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 +bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg +Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ +BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu +ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS +b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni +eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W +p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T +rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV +57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg +Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 +-----END CERTIFICATE----- + +# Issuer: CN=UCA Global G2 Root O=UniTrust +# Subject: CN=UCA Global G2 Root O=UniTrust +# Label: "UCA Global G2 Root" +# Serial: 124779693093741543919145257850076631279 +# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 +# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a +# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH +bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x +CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds +b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr +b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 +kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm +VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R +VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc +C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj +tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY +D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv +j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl +NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 +iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP +O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV +ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj +L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 +1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl +1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU +b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV +PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj +y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb +EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg +DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI ++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy +YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX +UB+K+wb1whnw0A== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Extended Validation Root O=UniTrust +# Subject: CN=UCA Extended Validation Root O=UniTrust +# Label: "UCA Extended Validation Root" +# Serial: 106100277556486529736699587978573607008 +# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 +# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a +# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF +eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx +MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV +BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog +D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS +sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop +O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk +sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi +c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj +VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz +KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ +TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G +sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs +1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD +fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T +AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN +l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR +ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ +VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 +c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp +4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s +t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj +2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO +vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C +xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx +cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM +fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax +-----END CERTIFICATE----- + +# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Label: "Certigna Root CA" +# Serial: 269714418870597844693661054334862075617 +# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 +# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 +# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 +-----BEGIN CERTIFICATE----- +MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw +WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw +MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x +MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD +VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX +BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw +ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO +ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M +CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu +I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm +TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh +C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf +ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz +IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT +Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k +JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 +hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB +GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of +1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov +L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo +dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr +aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq +hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L +6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG +HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 +0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB +lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi +o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 +gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v +faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 +Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh +jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw +3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign Root CA - G1" +# Serial: 235931866688319308814040 +# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac +# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c +# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 +-----BEGIN CERTIFICATE----- +MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD +VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU +ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH +MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO +MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv +Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz +f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO +8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq +d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM +tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt +Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB +o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD +AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x +PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM +wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d +GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH +6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby +RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx +iN66zB+Afko= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign ECC Root CA - G3" +# Serial: 287880440101571086945156 +# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 +# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 +# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b +-----BEGIN CERTIFICATE----- +MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG +EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo +bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g +RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ +TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s +b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 +WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS +fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB +zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq +hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB +CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD ++JbNR6iC8hZVdyR+EhCVBCyj +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Label: "emSign Root CA - C1" +# Serial: 825510296613316004955058 +# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 +# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 +# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f +-----BEGIN CERTIFICATE----- +MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG +A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg +SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v +dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ +BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ +HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH +3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH +GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c +xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 +aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq +TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 +/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 +kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG +YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT ++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo +WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Label: "emSign ECC Root CA - C3" +# Serial: 582948710642506000014504 +# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 +# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 +# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 +-----BEGIN CERTIFICATE----- +MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG +EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx +IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND +IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci +MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti +sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O +BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB +Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c +3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J +0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Label: "Hongkong Post Root CA 3" +# Serial: 46170865288971385588281144162979347873371282084 +# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 +# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 +# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 +-----BEGIN CERTIFICATE----- +MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL +BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ +SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n +a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 +NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT +CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u +Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO +dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI +VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV +9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY +2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY +vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt +bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb +x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ +l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK +TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj +Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e +i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw +DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG +7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk +MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr +gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk +GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS +3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm +Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ +l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c +JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP +L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa +LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG +mpv0 +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G4" +# Serial: 289383649854506086828220374796556676440 +# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88 +# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01 +# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88 +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw +gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL +Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg +MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw +BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0 +MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1 +c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ +bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg +Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B +AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ +2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E +T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j +5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM +C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T +DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX +wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A +2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm +nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8 +dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl +N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj +c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS +5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS +Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr +hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/ +B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI +AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw +H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+ +b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk +2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol +IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk +5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY +n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft ECC Root Certificate Authority 2017" +# Serial: 136839042543790627607696632466672567020 +# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 +# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 +# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 +-----BEGIN CERTIFICATE----- +MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD +VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw +MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV +UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy +b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR +ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb +hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 +FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV +L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB +iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft RSA Root Certificate Authority 2017" +# Serial: 40975477897264996090493496164228220339 +# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 +# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 +# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 +-----BEGIN CERTIFICATE----- +MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl +MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw +NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 +IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG +EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N +aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ +Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 +ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 +HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm +gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ +jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc +aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG +YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 +W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K +UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH ++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q +W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC +LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC +gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 +tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh +SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 +TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 +pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR +xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp +GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 +dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN +AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB +RA+GsCyRxj3qrg+E +-----END CERTIFICATE----- + +# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Label: "e-Szigno Root CA 2017" +# Serial: 411379200276854331539784714 +# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 +# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 +# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 +-----BEGIN CERTIFICATE----- +MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV +BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk +LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv +b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ +BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg +THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v +IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv +xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H +Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB +eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo +jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ ++efcMQ== +-----END CERTIFICATE----- + +# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Label: "certSIGN Root CA G2" +# Serial: 313609486401300475190 +# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 +# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 +# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV +BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g +Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ +BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ +R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF +dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw +vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ +uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp +n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs +cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW +xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P +rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF +DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx +DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy +LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C +eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ +d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq +kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC +b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl +qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 +OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c +NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk +ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO +pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj +03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk +PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE +1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX +QRBdJ3NghVdJIgc= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global Certification Authority" +# Serial: 1846098327275375458322922162 +# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e +# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 +# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 +-----BEGIN CERTIFICATE----- +MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw +CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x +ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 +c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx +OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI +SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI +b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn +swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu +7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 +1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW +80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP +JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l +RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw +hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 +coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc +BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n +twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud +EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud +DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W +0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe +uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q +lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB +aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE +sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT +MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe +qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh +VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 +h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 +EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK +yeC2nOnOcXHebD8WpHk= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P256 Certification Authority" +# Serial: 4151900041497450638097112925 +# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 +# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf +# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 +-----BEGIN CERTIFICATE----- +MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG +SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN +FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w +DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw +CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh +DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P384 Certification Authority" +# Serial: 2704997926503831671788816187 +# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 +# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 +# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 +-----BEGIN CERTIFICATE----- +MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB +BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ +j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF +1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G +A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 +AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC +MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu +Sw== +-----END CERTIFICATE----- + +# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Label: "NAVER Global Root Certification Authority" +# Serial: 9013692873798656336226253319739695165984492813 +# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b +# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 +# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 +-----BEGIN CERTIFICATE----- +MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM +BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG +T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx +CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD +b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA +iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH +38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE +HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz +kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP +szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq +vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf +nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG +YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo +0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a +CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K +AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I +36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB +Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN +qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj +cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm ++LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL +hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe +lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 +p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 +piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR +LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX +5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO +dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul +9XXeifdy +-----END CERTIFICATE----- + +# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS" +# Serial: 131542671362353147877283741781055151509 +# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb +# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a +# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb +-----BEGIN CERTIFICATE----- +MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw +CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw +FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S +Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5 +MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL +DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS +QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH +sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK +Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu +SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC +MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy +v+c= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Label: "GlobalSign Root R46" +# Serial: 1552617688466950547958867513931858518042577 +# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef +# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90 +# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA +MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD +VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy +MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt +c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ +OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG +vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud +316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo +0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE +y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF +zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE ++cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN +I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs +x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa +ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC +4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4 +7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg +JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti +2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk +pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF +FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt +rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk +ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5 +u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP +4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6 +N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3 +vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6 +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Label: "GlobalSign Root E46" +# Serial: 1552617690338932563915843282459653771421763 +# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f +# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84 +# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58 +-----BEGIN CERTIFICATE----- +MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx +CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD +ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw +MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex +HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq +R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd +yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ +7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8 ++RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= +-----END CERTIFICATE----- + +# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Label: "GLOBALTRUST 2020" +# Serial: 109160994242082918454945253 +# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8 +# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2 +# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a +-----BEGIN CERTIFICATE----- +MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG +A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw +FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx +MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u +aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq +hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b +RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z +YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3 +QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw +yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+ +BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ +SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH +r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0 +4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me +dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw +q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2 +nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu +H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA +VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC +XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd +6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf ++I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi +kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7 +wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB +TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C +MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn +4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I +aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy +qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg== +-----END CERTIFICATE----- + +# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Label: "ANF Secure Server Root CA" +# Serial: 996390341000653745 +# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96 +# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74 +# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99 +-----BEGIN CERTIFICATE----- +MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV +BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk +YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV +BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN +MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF +UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD +VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v +dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj +cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q +yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH +2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX +H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL +zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR +p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz +W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/ +SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn +LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3 +n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B +u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj +o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC +AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L +9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej +rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK +pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0 +vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq +OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ +/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9 +2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI ++PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2 +MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo +tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw= +-----END CERTIFICATE----- + +# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum EC-384 CA" +# Serial: 160250656287871593594747141429395092468 +# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1 +# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed +# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6 +-----BEGIN CERTIFICATE----- +MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw +CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw +JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT +EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0 +WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT +LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX +BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE +KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm +Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8 +EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J +UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn +nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k= +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Root CA" +# Serial: 40870380103424195783807378461123655149 +# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29 +# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5 +# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd +-----BEGIN CERTIFICATE----- +MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6 +MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu +MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV +BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw +MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg +U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo +b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ +n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q +p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq +NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF +8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3 +HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa +mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi +7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF +ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P +qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ +v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6 +Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1 +vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD +ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4 +WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo +zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR +5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ +GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf +5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq +0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D +P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM +qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP +0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf +E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb +-----END CERTIFICATE----- + +# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Label: "TunTrust Root CA" +# Serial: 108534058042236574382096126452369648152337120275 +# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4 +# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb +# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL +BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg +Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv +b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG +EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u +IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ +n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd +2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF +VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ +GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF +li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU +r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2 +eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb +MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg +jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB +7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW +5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE +ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0 +90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z +xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu +QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4 +FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH +22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP +xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn +dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5 +Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b +nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ +CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH +u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj +d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS RSA Root CA 2021" +# Serial: 76817823531813593706434026085292783742 +# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91 +# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d +# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d +-----BEGIN CERTIFICATE----- +MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs +MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg +Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL +MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl +YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv +b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l +mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE +4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv +a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M +pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw +Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b +LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY +AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB +AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq +E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr +W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ +CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE +AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU +X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3 +f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja +H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP +JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P +zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt +jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0 +/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT +BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79 +aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW +xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU +63ZTGI0RmLo= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS ECC Root CA 2021" +# Serial: 137515985548005187474074462014555733966 +# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0 +# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48 +# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01 +-----BEGIN CERTIFICATE----- +MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw +CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh +cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v +dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG +A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj +aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg +Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7 +KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y +STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD +AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw +SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN +nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 1977337328857672817 +# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3 +# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe +# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1 +MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc +tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd +IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j +b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC +AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw +ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m +iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF +Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ +hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P +Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE +EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV +1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t +CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR +5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw +f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9 +ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK +GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV +-----END CERTIFICATE----- + +# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus ECC Root CA" +# Serial: 630369271402956006249506845124680065938238527194 +# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85 +# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1 +# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3 +-----BEGIN CERTIFICATE----- +MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw +RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY +BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz +MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u +LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0 +v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd +e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw +V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA +AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG +GJTO +-----END CERTIFICATE----- + +# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus Root CA" +# Serial: 387574501246983434957692974888460947164905180485 +# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc +# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7 +# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87 +-----BEGIN CERTIFICATE----- +MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL +BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x +FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx +MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s +THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD +ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc +IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU +AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+ +GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9 +8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH +flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt +J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim +0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN +pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ +UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW +OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB +AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet +8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd +nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j +bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM +Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv +TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS +S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr +I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9 +b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB +UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P +Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven +sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s= +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X2 O=Internet Security Research Group +# Subject: CN=ISRG Root X2 O=Internet Security Research Group +# Label: "ISRG Root X2" +# Serial: 87493402998870891108772069816698636114 +# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5 +# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af +# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70 +-----BEGIN CERTIFICATE----- +MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw +CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg +R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00 +MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT +ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw +EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW ++1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9 +ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI +zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW +tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1 +/q4AaOeMSQ+2b1tbFfLn +-----END CERTIFICATE----- + +# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Label: "HiPKI Root CA - G1" +# Serial: 60966262342023497858655262305426234976 +# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3 +# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60 +# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc +-----BEGIN CERTIFICATE----- +MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa +Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3 +YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw +qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv +Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6 +lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz +Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ +KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK +FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj +HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr +y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ +/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM +a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6 +fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG +SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi +7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc +SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza +ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc +XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg +iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho +L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF +Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr +kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+ +vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU +YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 159662223612894884239637590694 +# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc +# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28 +# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2 +-----BEGIN CERTIFICATE----- +MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD +VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw +MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g +UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT +BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx +uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV +HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/ ++wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147 +bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R1 O=Google Trust Services LLC +# Subject: CN=GTS Root R1 O=Google Trust Services LLC +# Label: "GTS Root R1" +# Serial: 159662320309726417404178440727 +# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40 +# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a +# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo +27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w +Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw +TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl +qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH +szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8 +Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk +MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92 +wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p +aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN +VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb +C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe +QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy +h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4 +7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J +ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef +MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/ +Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT +6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ +0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm +2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb +bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R2 O=Google Trust Services LLC +# Subject: CN=GTS Root R2 O=Google Trust Services LLC +# Label: "GTS Root R2" +# Serial: 159662449406622349769042896298 +# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc +# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94 +# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8 +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt +nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY +6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu +MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k +RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg +f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV ++3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo +dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW +Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa +G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq +gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H +vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8 +0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC +B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u +NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg +yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev +HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6 +xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR +TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg +JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV +7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl +6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R3 O=Google Trust Services LLC +# Subject: CN=GTS Root R3 O=Google Trust Services LLC +# Label: "GTS Root R3" +# Serial: 159662495401136852707857743206 +# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73 +# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46 +# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48 +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G +jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2 +4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7 +VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm +ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R4 O=Google Trust Services LLC +# Subject: CN=GTS Root R4 O=Google Trust Services LLC +# Label: "GTS Root R4" +# Serial: 159662532700760215368942768210 +# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8 +# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47 +# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi +QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR +HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D +9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8 +p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD +-----END CERTIFICATE----- + +# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj +# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj +# Label: "Telia Root CA v2" +# Serial: 7288924052977061235122729490515358 +# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48 +# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd +# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx +CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE +AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1 +NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ +MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP +ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq +AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9 +vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9 +lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD +n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT +7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o +6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC +TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6 +WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R +DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI +pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj +YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy +rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw +AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ +8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi +0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM +A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS +SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K +TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF +6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er +3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt +Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT +VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW +ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA +rBPuUBQemMc= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST BR Root CA 1 2020" +# Serial: 165870826978392376648679885835942448534 +# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed +# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67 +# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44 +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5 +NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS +zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0 +QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/ +VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW +wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV +dWNbFJWcHwHP2NVypw87 +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST EV Root CA 1 2020" +# Serial: 126288379621884218666039612629459926992 +# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e +# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07 +# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5 +NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC +/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD +wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3 +OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA +y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb +gfM0agPnIjhQW+0ZT0MW +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS ECC P384 Root G5" +# Serial: 13129116028163249804115411775095713523 +# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed +# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee +# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05 +-----BEGIN CERTIFICATE----- +MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp +Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2 +MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ +bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS +7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp +0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS +B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49 +BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ +LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4 +DXZDjC5Ty3zfDBeWUA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS RSA4096 Root G5" +# Serial: 11930366277458970227240571539258396554 +# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1 +# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35 +# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75 +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN +MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT +HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN +NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs +IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+ +ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0 +2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp +wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM +pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD +nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po +sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx +Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd +Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX +KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe +XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL +tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv +TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN +AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw +GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H +PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF +O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ +REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik +AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv +/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+ +p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw +MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF +qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK +ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+ +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root R1 O=Certainly +# Subject: CN=Certainly Root R1 O=Certainly +# Label: "Certainly Root R1" +# Serial: 188833316161142517227353805653483829216 +# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12 +# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af +# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw +PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy +dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9 +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0 +YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2 +1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT +vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed +aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0 +1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5 +r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5 +cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ +wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ +6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA +2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH +Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR +eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB +/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u +d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr +PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d +8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi +1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd +rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di +taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7 +lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj +yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn +Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy +yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n +wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6 +OV+KmalBWQewLK8= +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root E1 O=Certainly +# Subject: CN=Certainly Root E1 O=Certainly +# Label: "Certainly Root E1" +# Serial: 8168531406727139161245376702891150584 +# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9 +# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b +# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2 +-----BEGIN CERTIFICATE----- +MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw +CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu +bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ +BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s +eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK ++IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2 +QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4 +hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm +ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG +BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR +-----END CERTIFICATE----- + +# Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. +# Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. +# Label: "Security Communication RootCA3" +# Serial: 16247922307909811815 +# MD5 Fingerprint: 1c:9a:16:ff:9e:5c:e0:4d:8a:14:01:f4:35:5d:29:26 +# SHA1 Fingerprint: c3:03:c8:22:74:92:e5:61:a2:9c:5f:79:91:2b:1e:44:13:91:30:3a +# SHA256 Fingerprint: 24:a5:5c:2a:b0:51:44:2d:06:17:76:65:41:23:9a:4a:d0:32:d7:c5:51:75:aa:34:ff:de:2f:bc:4f:5c:52:94 +-----BEGIN CERTIFICATE----- +MIIFfzCCA2egAwIBAgIJAOF8N0D9G/5nMA0GCSqGSIb3DQEBDAUAMF0xCzAJBgNV +BAYTAkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMScw +JQYDVQQDEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTMwHhcNMTYwNjE2 +MDYxNzE2WhcNMzgwMTE4MDYxNzE2WjBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMc +U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UEAxMeU2VjdXJpdHkg +Q29tbXVuaWNhdGlvbiBSb290Q0EzMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEA48lySfcw3gl8qUCBWNO0Ot26YQ+TUG5pPDXC7ltzkBtnTCHsXzW7OT4r +CmDvu20rhvtxosis5FaU+cmvsXLUIKx00rgVrVH+hXShuRD+BYD5UpOzQD11EKzA +lrenfna84xtSGc4RHwsENPXY9Wk8d/Nk9A2qhd7gCVAEF5aEt8iKvE1y/By7z/MG +TfmfZPd+pmaGNXHIEYBMwXFAWB6+oHP2/D5Q4eAvJj1+XCO1eXDe+uDRpdYMQXF7 +9+qMHIjH7Iv10S9VlkZ8WjtYO/u62C21Jdp6Ts9EriGmnpjKIG58u4iFW/vAEGK7 +8vknR+/RiTlDxN/e4UG/VHMgly1s2vPUB6PmudhvrvyMGS7TZ2crldtYXLVqAvO4 +g160a75BflcJdURQVc1aEWEhCmHCqYj9E7wtiS/NYeCVvsq1e+F7NGcLH7YMx3we +GVPKp7FKFSBWFHA9K4IsD50VHUeAR/94mQ4xr28+j+2GaR57GIgUssL8gjMunEst ++3A7caoreyYn8xrC3PsXuKHqy6C0rtOUfnrQq8PsOC0RLoi/1D+tEjtCrI8Cbn3M +0V9hvqG8OmpI6iZVIhZdXw3/JzOfGAN0iltSIEdrRU0id4xVJ/CvHozJgyJUt5rQ +T9nO/NkuHJYosQLTA70lUhw0Zk8jq/R3gpYd0VcwCBEF/VfR2ccCAwEAAaNCMEAw +HQYDVR0OBBYEFGQUfPxYchamCik0FW8qy7z8r6irMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBDAUAA4ICAQDcAiMI4u8hOscNtybS +YpOnpSNyByCCYN8Y11StaSWSntkUz5m5UoHPrmyKO1o5yGwBQ8IibQLwYs1OY0PA +FNr0Y/Dq9HHuTofjcan0yVflLl8cebsjqodEV+m9NU1Bu0soo5iyG9kLFwfl9+qd +9XbXv8S2gVj/yP9kaWJ5rW4OH3/uHWnlt3Jxs/6lATWUVCvAUm2PVcTJ0rjLyjQI +UYWg9by0F1jqClx6vWPGOi//lkkZhOpn2ASxYfQAW0q3nHE3GYV5v4GwxxMOdnE+ +OoAGrgYWp421wsTL/0ClXI2lyTrtcoHKXJg80jQDdwj98ClZXSEIx2C/pHF7uNke +gr4Jr2VvKKu/S7XuPghHJ6APbw+LP6yVGPO5DtxnVW5inkYO0QR4ynKudtml+LLf +iAlhi+8kTtFZP1rUPcmTPCtk9YENFpb3ksP+MW/oKjJ0DvRMmEoYDjBU1cXrvMUV +nuiZIesnKwkK2/HmcBhWuwzkvvnoEKQTkrgc4NtnHVMDpCKn3F2SEDzq//wbEBrD +2NCcnWXL0CsnMQMeNuE9dnUM/0Umud1RvCPHX9jYhxBAEg09ODfnRDwYwFMJZI// +1ZqmfHAuc1Uh6N//g7kdPjIe1qZ9LPFm6Vwdp6POXiUyK+OVrCoHzrQoeIY8Laad +TdJ0MN1kURXbg4NR16/9M51NZg== +-----END CERTIFICATE----- + +# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Label: "Security Communication ECC RootCA1" +# Serial: 15446673492073852651 +# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86 +# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41 +# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11 +-----BEGIN CERTIFICATE----- +MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT +AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD +VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx +NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT +HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5 +IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl +dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK +ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu +9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O +be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k= +-----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA1" +# Serial: 113562791157148395269083148143378328608 +# MD5 Fingerprint: 42:32:99:76:43:33:36:24:35:07:82:9b:28:f9:d0:90 +# SHA1 Fingerprint: d5:ec:8d:7b:4c:ba:79:f4:e7:e8:cb:9d:6b:ae:77:83:10:03:21:6a +# SHA256 Fingerprint: f3:89:6f:88:fe:7c:0a:88:27:66:a7:fa:6a:d2:74:9f:b5:7a:7f:3e:98:fb:76:9c:1f:a7:b0:9c:2c:44:d5:ae +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIQVW9l47TZkGobCdFsPsBsIDANBgkqhkiG9w0BAQsFADBU +MQswCQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRI +T1JJVFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0ExMB4XDTE5MTIxOTAz +MTYxN1oXDTQ0MTIxMjAzMTYxN1owVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJF +SUpJTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2Jh +bCBSb290IENBMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPFmCL3Z +xRVhy4QEQaVpN3cdwbB7+sN3SJATcmTRuHyQNZ0YeYjjlwE8R4HyDqKYDZ4/N+AZ +spDyRhySsTphzvq3Rp4Dhtczbu33RYx2N95ulpH3134rhxfVizXuhJFyV9xgw8O5 +58dnJCNPYwpj9mZ9S1WnP3hkSWkSl+BMDdMJoDIwOvqfwPKcxRIqLhy1BDPapDgR +at7GGPZHOiJBhyL8xIkoVNiMpTAK+BcWyqw3/XmnkRd4OJmtWO2y3syJfQOcs4ll +5+M7sSKGjwZteAf9kRJ/sGsciQ35uMt0WwfCyPQ10WRjeulumijWML3mG90Vr4Tq +nMfK9Q7q8l0ph49pczm+LiRvRSGsxdRpJQaDrXpIhRMsDQa4bHlW/KNnMoH1V6XK +V0Jp6VwkYe/iMBhORJhVb3rCk9gZtt58R4oRTklH2yiUAguUSiz5EtBP6DF+bHq/ +pj+bOT0CFqMYs2esWz8sgytnOYFcuX6U1WTdno9uruh8W7TXakdI136z1C2OVnZO +z2nxbkRs1CTqjSShGL+9V/6pmTW12xB3uD1IutbB5/EjPtffhZ0nPNRAvQoMvfXn +jSXWgXSHRtQpdaJCbPdzied9v3pKH9MiyRVVz99vfFXQpIsHETdfg6YmV6YBW37+ +WGgHqel62bno/1Afq8K0wM7o6v0PvY1NuLxxAgMBAAGjQjBAMB0GA1UdDgQWBBTF +7+3M2I0hxkjk49cULqcWk+WYATAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE +AwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAUoKsITQfI/Ki2Pm4rzc2IInRNwPWaZ+4 +YRC6ojGYWUfo0Q0lHhVBDOAqVdVXUsv45Mdpox1NcQJeXyFFYEhcCY5JEMEE3Kli +awLwQ8hOnThJdMkycFRtwUf8jrQ2ntScvd0g1lPJGKm1Vrl2i5VnZu69mP6u775u ++2D2/VnGKhs/I0qUJDAnyIm860Qkmss9vk/Ves6OF8tiwdneHg56/0OGNFK8YT88 +X7vZdrRTvJez/opMEi4r89fO4aL/3Xtw+zuhTaRjAv04l5U/BXCga99igUOLtFkN +SoxUnMW7gZ/NfaXvCyUeOiDbHPwfmGcCCtRzRBPbUYQaVQNW4AB+dAb/OMRyHdOo +P2gxXdMJxy6MW2Pg6Nwe0uxhHvLe5e/2mXZgLR6UcnHGCyoyx5JO1UbXHfmpGQrI ++pXObSOYqgs4rZpWDW+N8TEAiMEXnM0ZNjX+VVOg4DwzX5Ze4jLp3zO7Bkqp2IRz +znfSxqxx4VyjHQy7Ct9f4qNx2No3WqB4K/TUfet27fJhcKVlmtOJNBir+3I+17Q9 +eVzYH6Eze9mCUAyTF6ps3MKCuwJXNq+YJyo5UOGwifUll35HaBC07HPKs5fRJNz2 +YqAo07WjuGS3iGJCz51TzZm+ZGiPTx4SSPfSKcOYKMryMguTjClPPGAyzQWWYezy +r/6zcCwupvI= +-----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA2" +# Serial: 58605626836079930195615843123109055211 +# MD5 Fingerprint: 5e:0a:f6:47:5f:a6:14:e8:11:01:95:3f:4d:01:eb:3c +# SHA1 Fingerprint: f4:27:86:eb:6e:b8:6d:88:31:67:02:fb:ba:66:a4:53:00:aa:7a:a6 +# SHA256 Fingerprint: 57:4d:f6:93:1e:27:80:39:66:7b:72:0a:fd:c1:60:0f:c2:7e:b6:6d:d3:09:29:79:fb:73:85:64:87:21:28:82 +-----BEGIN CERTIFICATE----- +MIICJTCCAaugAwIBAgIQLBcIfWQqwP6FGFkGz7RK6zAKBggqhkjOPQQDAzBUMQsw +CQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRIT1JJ +VFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0EyMB4XDTE5MTIxOTAzMTgy +MVoXDTQ0MTIxMjAzMTgyMVowVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJFSUpJ +TkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2JhbCBS +b290IENBMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABJ3LgJGNU2e1uVCxA/jlSR9B +IgmwUVJY1is0j8USRhTFiy8shP8sbqjV8QnjAyEUxEM9fMEsxEtqSs3ph+B99iK+ ++kpRuDCK/eHeGBIK9ke35xe/J4rUQUyWPGCWwf0VHKNCMEAwHQYDVR0OBBYEFNJK +sVF/BvDRgh9Obl+rg/xI1LCRMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA +94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B +43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root E46" +# Serial: 88989738453351742415770396670917916916 +# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01 +# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a +# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83 +-----BEGIN CERTIFICATE----- +MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw +CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T +ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN +MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG +A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT +ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC +WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+ +6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B +Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa +qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q +4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root R46" +# Serial: 156256931880233212765902055439220583700 +# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5 +# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38 +# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06 +-----BEGIN CERTIFICATE----- +MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf +MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD +Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw +HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY +MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp +YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa +ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz +SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf +iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X +ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3 +IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS +VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE +SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu ++Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt +8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L +HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt +zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P +AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c +mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ +YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52 +gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA +Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB +JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX +DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui +TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5 +dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65 +LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp +0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY +QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS RSA Root CA 2022" +# Serial: 148535279242832292258835760425842727825 +# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da +# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca +# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed +-----BEGIN CERTIFICATE----- +MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO +MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD +DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX +DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw +b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC +AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP +L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY +t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins +S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3 +PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO +L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3 +R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w +dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS ++YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS +d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG +AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f +gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j +BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z +NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt +hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM +QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf +R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ +DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW +P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy +lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq +bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w +AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q +r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji +Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU +98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS ECC Root CA 2022" +# Serial: 26605119622390491762507526719404364228 +# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5 +# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39 +# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43 +-----BEGIN CERTIFICATE----- +MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT +U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2 +MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh +dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm +acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN +SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME +GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW +uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp +15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN +b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA ECC TLS 2021" +# Serial: 81873346711060652204712539181482831616 +# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8 +# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd +# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8 +-----BEGIN CERTIFICATE----- +MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w +LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w +CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0 +MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF +Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI +zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X +tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4 +AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2 +KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD +aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu +CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo +9H1/IISpQuQo +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA RSA TLS 2021" +# Serial: 111436099570196163832749341232207667876 +# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2 +# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48 +# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f +-----BEGIN CERTIFICATE----- +MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM +MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx +MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00 +MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD +QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN +BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z +4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv +Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ +kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs +GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln +nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh +3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD +0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy +geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8 +ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB +c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI +pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB +DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS +4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs +o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ +qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw +xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM +rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4 +AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR +0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY +o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5 +dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE +oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ== +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. +# Label: "TrustAsia Global Root CA G3" +# Serial: 576386314500428537169965010905813481816650257167 +# MD5 Fingerprint: 30:42:1b:b7:bb:81:75:35:e4:16:4f:53:d2:94:de:04 +# SHA1 Fingerprint: 63:cf:b6:c1:27:2b:56:e4:88:8e:1c:23:9a:b6:2e:81:47:24:c3:c7 +# SHA256 Fingerprint: e0:d3:22:6a:eb:11:63:c2:e4:8f:f9:be:3b:50:b4:c6:43:1b:e7:bb:1e:ac:c5:c3:6b:5d:5e:c5:09:03:9a:08 +-----BEGIN CERTIFICATE----- +MIIFpTCCA42gAwIBAgIUZPYOZXdhaqs7tOqFhLuxibhxkw8wDQYJKoZIhvcNAQEM +BQAwWjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dp +ZXMsIEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHMzAe +Fw0yMTA1MjAwMjEwMTlaFw00NjA1MTkwMjEwMTlaMFoxCzAJBgNVBAYTAkNOMSUw +IwYDVQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtU +cnVzdEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4IC +DwAwggIKAoICAQDAMYJhkuSUGwoqZdC+BqmHO1ES6nBBruL7dOoKjbmzTNyPtxNS +T1QY4SxzlZHFZjtqz6xjbYdT8PfxObegQ2OwxANdV6nnRM7EoYNl9lA+sX4WuDqK +AtCWHwDNBSHvBm3dIZwZQ0WhxeiAysKtQGIXBsaqvPPW5vxQfmZCHzyLpnl5hkA1 +nyDvP+uLRx+PjsXUjrYsyUQE49RDdT/VP68czH5GX6zfZBCK70bwkPAPLfSIC7Ep +qq+FqklYqL9joDiR5rPmd2jE+SoZhLsO4fWvieylL1AgdB4SQXMeJNnKziyhWTXA +yB1GJ2Faj/lN03J5Zh6fFZAhLf3ti1ZwA0pJPn9pMRJpxx5cynoTi+jm9WAPzJMs +hH/x/Gr8m0ed262IPfN2dTPXS6TIi/n1Q1hPy8gDVI+lhXgEGvNz8teHHUGf59gX +zhqcD0r83ERoVGjiQTz+LISGNzzNPy+i2+f3VANfWdP3kXjHi3dqFuVJhZBFcnAv +kV34PmVACxmZySYgWmjBNb9Pp1Hx2BErW+Canig7CjoKH8GB5S7wprlppYiU5msT +f9FkPz2ccEblooV7WIQn3MSAPmeamseaMQ4w7OYXQJXZRe0Blqq/DPNL0WP3E1jA +uPP6Z92bfW1K/zJMtSU7/xxnD4UiWQWRkUF3gdCFTIcQcf+eQxuulXUtgQIDAQAB +o2MwYTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEDk5PIj7zjKsK5Xf/Ih +MBY027ySMB0GA1UdDgQWBBRA5OTyI+84yrCuV3/yITAWNNu8kjAOBgNVHQ8BAf8E +BAMCAQYwDQYJKoZIhvcNAQEMBQADggIBACY7UeFNOPMyGLS0XuFlXsSUT9SnYaP4 +wM8zAQLpw6o1D/GUE3d3NZ4tVlFEbuHGLige/9rsR82XRBf34EzC4Xx8MnpmyFq2 +XFNFV1pF1AWZLy4jVe5jaN/TG3inEpQGAHUNcoTpLrxaatXeL1nHo+zSh2bbt1S1 +JKv0Q3jbSwTEb93mPmY+KfJLaHEih6D4sTNjduMNhXJEIlU/HHzp/LgV6FL6qj6j +ITk1dImmasI5+njPtqzn59ZW/yOSLlALqbUHM/Q4X6RJpstlcHboCoWASzY9M/eV +VHUl2qzEc4Jl6VL1XP04lQJqaTDFHApXB64ipCz5xUG3uOyfT0gA+QEEVcys+TIx +xHWVBqB/0Y0n3bOppHKH/lmLmnp0Ft0WpWIp6zqW3IunaFnT63eROfjXy9mPX1on +AX1daBli2MjN9LdyR75bl87yraKZk62Uy5P2EgmVtqvXO9A/EcswFi55gORngS1d +7XB4tmBZrOFdRWOPyN9yaFvqHbgB8X7754qz41SgOAngPN5C8sLtLpvzHzW2Ntjj +gKGLzZlkD8Kqq7HK9W+eQ42EVJmzbsASZthwEPEGNTNDqJwuuhQxzhB/HIbjj9LV ++Hfsm6vxL2PZQl/gZ4FkkfGXL/xuJvYz+NO1+MRiqzFRJQJ6+N1rZdVtTTDIZbpo +FGWsJwt0ivKH +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. +# Label: "TrustAsia Global Root CA G4" +# Serial: 451799571007117016466790293371524403291602933463 +# MD5 Fingerprint: 54:dd:b2:d7:5f:d8:3e:ed:7c:e0:0b:2e:cc:ed:eb:eb +# SHA1 Fingerprint: 57:73:a5:61:5d:80:b2:e6:ac:38:82:fc:68:07:31:ac:9f:b5:92:5a +# SHA256 Fingerprint: be:4b:56:cb:50:56:c0:13:6a:52:6d:f4:44:50:8d:aa:36:a0:b5:4f:42:e4:ac:38:f7:2a:f4:70:e4:79:65:4c +-----BEGIN CERTIFICATE----- +MIICVTCCAdygAwIBAgIUTyNkuI6XY57GU4HBdk7LKnQV1tcwCgYIKoZIzj0EAwMw +WjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs +IEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHNDAeFw0y +MTA1MjAwMjEwMjJaFw00NjA1MTkwMjEwMjJaMFoxCzAJBgNVBAYTAkNOMSUwIwYD +VQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtUcnVz +dEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATx +s8045CVD5d4ZCbuBeaIVXxVjAd7Cq92zphtnS4CDr5nLrBfbK5bKfFJV4hrhPVbw +LxYI+hW8m7tH5j/uqOFMjPXTNvk4XatwmkcN4oFBButJ+bAp3TPsUKV/eSm4IJij +YzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUpbtKl86zK3+kMd6Xg1mD +pm9xy94wHQYDVR0OBBYEFKW7SpfOsyt/pDHel4NZg6ZvccveMA4GA1UdDwEB/wQE +AwIBBjAKBggqhkjOPQQDAwNnADBkAjBe8usGzEkxn0AAbbd+NvBNEU/zy4k6LHiR +UKNbwMp1JvK/kF0LgoxgKJ/GcJpo5PECMFxYDlZ2z1jD1xCMuo6u47xkdUfFVZDj +/bpV6wfEU6s3qe4hsiFbYI89MvHVI5TWWA== +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust ECC Root-01 O=CommScope +# Subject: CN=CommScope Public Trust ECC Root-01 O=CommScope +# Label: "CommScope Public Trust ECC Root-01" +# Serial: 385011430473757362783587124273108818652468453534 +# MD5 Fingerprint: 3a:40:a7:fc:03:8c:9c:38:79:2f:3a:a2:6c:b6:0a:16 +# SHA1 Fingerprint: 07:86:c0:d8:dd:8e:c0:80:98:06:98:d0:58:7a:ef:de:a6:cc:a2:5d +# SHA256 Fingerprint: 11:43:7c:da:7b:b4:5e:41:36:5f:45:b3:9a:38:98:6b:0d:e0:0d:ef:34:8e:0c:7b:b0:87:36:33:80:0b:c3:8b +-----BEGIN CERTIFICATE----- +MIICHTCCAaOgAwIBAgIUQ3CCd89NXTTxyq4yLzf39H91oJ4wCgYIKoZIzj0EAwMw +TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t +bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMTAeFw0yMTA0MjgxNzM1NDNa +Fw00NjA0MjgxNzM1NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv +cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDEw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAARLNumuV16ocNfQj3Rid8NeeqrltqLxeP0C +flfdkXmcbLlSiFS8LwS+uM32ENEp7LXQoMPwiXAZu1FlxUOcw5tjnSCDPgYLpkJE +hRGnSjot6dZoL0hOUysHP029uax3OVejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSOB2LAUN3GGQYARnQE9/OufXVNMDAKBggq +hkjOPQQDAwNoADBlAjEAnDPfQeMjqEI2Jpc1XHvr20v4qotzVRVcrHgpD7oh2MSg +2NED3W3ROT3Ek2DS43KyAjB8xX6I01D1HiXo+k515liWpDVfG2XqYZpwI7UNo5uS +Um9poIyNStDuiw7LR47QjRE= +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust ECC Root-02 O=CommScope +# Subject: CN=CommScope Public Trust ECC Root-02 O=CommScope +# Label: "CommScope Public Trust ECC Root-02" +# Serial: 234015080301808452132356021271193974922492992893 +# MD5 Fingerprint: 59:b0:44:d5:65:4d:b8:5c:55:19:92:02:b6:d1:94:b2 +# SHA1 Fingerprint: 3c:3f:ef:57:0f:fe:65:93:86:9e:a0:fe:b0:f6:ed:8e:d1:13:c7:e5 +# SHA256 Fingerprint: 2f:fb:7f:81:3b:bb:b3:c8:9a:b4:e8:16:2d:0f:16:d7:15:09:a8:30:cc:9d:73:c2:62:e5:14:08:75:d1:ad:4a +-----BEGIN CERTIFICATE----- +MIICHDCCAaOgAwIBAgIUKP2ZYEFHpgE6yhR7H+/5aAiDXX0wCgYIKoZIzj0EAwMw +TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t +bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMjAeFw0yMTA0MjgxNzQ0NTRa +Fw00NjA0MjgxNzQ0NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv +cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDIw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAR4MIHoYx7l63FRD/cHB8o5mXxO1Q/MMDAL +j2aTPs+9xYa9+bG3tD60B8jzljHz7aRP+KNOjSkVWLjVb3/ubCK1sK9IRQq9qEmU +v4RDsNuESgMjGWdqb8FuvAY5N9GIIvejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTmGHX/72DehKT1RsfeSlXjMjZ59TAKBggq +hkjOPQQDAwNnADBkAjAmc0l6tqvmSfR9Uj/UQQSugEODZXW5hYA4O9Zv5JOGq4/n +ich/m35rChJVYaoR4HkCMHfoMXGsPHED1oQmHhS48zs73u1Z/GtMMH9ZzkXpc2AV +mkzw5l4lIhVtwodZ0LKOag== +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust RSA Root-01 O=CommScope +# Subject: CN=CommScope Public Trust RSA Root-01 O=CommScope +# Label: "CommScope Public Trust RSA Root-01" +# Serial: 354030733275608256394402989253558293562031411421 +# MD5 Fingerprint: 0e:b4:15:bc:87:63:5d:5d:02:73:d4:26:38:68:73:d8 +# SHA1 Fingerprint: 6d:0a:5f:f7:b4:23:06:b4:85:b3:b7:97:64:fc:ac:75:f5:33:f2:93 +# SHA256 Fingerprint: 02:bd:f9:6e:2a:45:dd:9b:f1:8f:c7:e1:db:df:21:a0:37:9b:a3:c9:c2:61:03:44:cf:d8:d6:06:fe:c1:ed:81 +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIUPgNJgXUWdDGOTKvVxZAplsU5EN0wDQYJKoZIhvcNAQEL +BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi +Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMTAeFw0yMTA0MjgxNjQ1 +NTRaFw00NjA0MjgxNjQ1NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t +U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt +MDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCwSGWjDR1C45FtnYSk +YZYSwu3D2iM0GXb26v1VWvZVAVMP8syMl0+5UMuzAURWlv2bKOx7dAvnQmtVzslh +suitQDy6uUEKBU8bJoWPQ7VAtYXR1HHcg0Hz9kXHgKKEUJdGzqAMxGBWBB0HW0al +DrJLpA6lfO741GIDuZNqihS4cPgugkY4Iw50x2tBt9Apo52AsH53k2NC+zSDO3Oj +WiE260f6GBfZumbCk6SP/F2krfxQapWsvCQz0b2If4b19bJzKo98rwjyGpg/qYFl +P8GMicWWMJoKz/TUyDTtnS+8jTiGU+6Xn6myY5QXjQ/cZip8UlF1y5mO6D1cv547 +KI2DAg+pn3LiLCuz3GaXAEDQpFSOm117RTYm1nJD68/A6g3czhLmfTifBSeolz7p +UcZsBSjBAg/pGG3svZwG1KdJ9FQFa2ww8esD1eo9anbCyxooSU1/ZOD6K9pzg4H/ +kQO9lLvkuI6cMmPNn7togbGEW682v3fuHX/3SZtS7NJ3Wn2RnU3COS3kuoL4b/JO +Hg9O5j9ZpSPcPYeoKFgo0fEbNttPxP/hjFtyjMcmAyejOQoBqsCyMWCDIqFPEgkB +Ea801M/XrmLTBQe0MXXgDW1XT2mH+VepuhX2yFJtocucH+X8eKg1mp9BFM6ltM6U +CBwJrVbl2rZJmkrqYxhTnCwuwwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUN12mmnQywsL5x6YVEFm45P3luG0wDQYJ +KoZIhvcNAQELBQADggIBAK+nz97/4L1CjU3lIpbfaOp9TSp90K09FlxD533Ahuh6 +NWPxzIHIxgvoLlI1pKZJkGNRrDSsBTtXAOnTYtPZKdVUvhwQkZyybf5Z/Xn36lbQ +nmhUQo8mUuJM3y+Xpi/SB5io82BdS5pYV4jvguX6r2yBS5KPQJqTRlnLX3gWsWc+ +QgvfKNmwrZggvkN80V4aCRckjXtdlemrwWCrWxhkgPut4AZ9HcpZuPN4KWfGVh2v +trV0KnahP/t1MJ+UXjulYPPLXAziDslg+MkfFoom3ecnf+slpoq9uC02EJqxWE2a +aE9gVOX2RhOOiKy8IUISrcZKiX2bwdgt6ZYD9KJ0DLwAHb/WNyVntHKLr4W96ioD +j8z7PEQkguIBpQtZtjSNMgsSDesnwv1B10A8ckYpwIzqug/xBpMu95yo9GA+o/E4 +Xo4TwbM6l4c/ksp4qRyv0LAbJh6+cOx69TOY6lz/KwsETkPdY34Op054A5U+1C0w +lREQKC6/oAI+/15Z0wUOlV9TRe9rh9VIzRamloPh37MG88EU26fsHItdkJANclHn +YfkUyq+Dj7+vsQpZXdxc1+SWrVtgHdqul7I52Qb1dgAT+GhMIbA1xNxVssnBQVoc +icCMb3SgazNNtQEo/a2tiRc7ppqEvOuM6sRxJKi6KfkIsidWNTJf6jn7MZrVGczw +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust RSA Root-02 O=CommScope +# Subject: CN=CommScope Public Trust RSA Root-02 O=CommScope +# Label: "CommScope Public Trust RSA Root-02" +# Serial: 480062499834624527752716769107743131258796508494 +# MD5 Fingerprint: e1:29:f9:62:7b:76:e2:96:6d:f3:d4:d7:0f:ae:1f:aa +# SHA1 Fingerprint: ea:b0:e2:52:1b:89:93:4c:11:68:f2:d8:9a:ac:22:4c:a3:8a:57:ae +# SHA256 Fingerprint: ff:e9:43:d7:93:42:4b:4f:7c:44:0c:1c:3d:64:8d:53:63:f3:4b:82:dc:87:aa:7a:9f:11:8f:c5:de:e1:01:f1 +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIUVBa/O345lXGN0aoApYYNK496BU4wDQYJKoZIhvcNAQEL +BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi +Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMjAeFw0yMTA0MjgxNzE2 +NDNaFw00NjA0MjgxNzE2NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t +U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt +MDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDh+g77aAASyE3VrCLE +NQE7xVTlWXZjpX/rwcRqmL0yjReA61260WI9JSMZNRTpf4mnG2I81lDnNJUDMrG0 +kyI9p+Kx7eZ7Ti6Hmw0zdQreqjXnfuU2mKKuJZ6VszKWpCtYHu8//mI0SFHRtI1C +rWDaSWqVcN3SAOLMV2MCe5bdSZdbkk6V0/nLKR8YSvgBKtJjCW4k6YnS5cciTNxz +hkcAqg2Ijq6FfUrpuzNPDlJwnZXjfG2WWy09X6GDRl224yW4fKcZgBzqZUPckXk2 +LHR88mcGyYnJ27/aaL8j7dxrrSiDeS/sOKUNNwFnJ5rpM9kzXzehxfCrPfp4sOcs +n/Y+n2Dg70jpkEUeBVF4GiwSLFworA2iI540jwXmojPOEXcT1A6kHkIfhs1w/tku +FT0du7jyU1fbzMZ0KZwYszZ1OC4PVKH4kh+Jlk+71O6d6Ts2QrUKOyrUZHk2EOH5 +kQMreyBUzQ0ZGshBMjTRsJnhkB4BQDa1t/qp5Xd1pCKBXbCL5CcSD1SIxtuFdOa3 +wNemKfrb3vOTlycEVS8KbzfFPROvCgCpLIscgSjX74Yxqa7ybrjKaixUR9gqiC6v +wQcQeKwRoi9C8DfF8rhW3Q5iLc4tVn5V8qdE9isy9COoR+jUKgF4z2rDN6ieZdIs +5fq6M8EGRPbmz6UNp2YINIos8wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUR9DnsSL/nSz12Vdgs7GxcJXvYXowDQYJ +KoZIhvcNAQELBQADggIBAIZpsU0v6Z9PIpNojuQhmaPORVMbc0RTAIFhzTHjCLqB +KCh6krm2qMhDnscTJk3C2OVVnJJdUNjCK9v+5qiXz1I6JMNlZFxHMaNlNRPDk7n3 ++VGXu6TwYofF1gbTl4MgqX67tiHCpQ2EAOHyJxCDut0DgdXdaMNmEMjRdrSzbyme +APnCKfWxkxlSaRosTKCL4BWaMS/TiJVZbuXEs1DIFAhKm4sTg7GkcrI7djNB3Nyq +pgdvHSQSn8h2vS/ZjvQs7rfSOBAkNlEv41xdgSGn2rtO/+YHqP65DSdsu3BaVXoT +6fEqSWnHX4dXTEN5bTpl6TBcQe7rd6VzEojov32u5cSoHw2OHG1QAk8mGEPej1WF +sQs3BWDJVTkSBKEqz3EWnzZRSb9wO55nnPt7eck5HHisd5FUmrh1CoFSl+NmYWvt +PjgelmFV4ZFUjO2MJB+ByRCac5krFk5yAD9UG/iNuovnFNa2RU9g7Jauwy8CTl2d +lklyALKrdVwPaFsdZcJfMw8eD/A7hvWwTruc9+olBdytoptLFwG+Qt81IR2tq670 +v64fG9PiO/yzcnMcmyiQiRM9HcEARwmWmjgb3bHPDcK0RPOWlc4yOo80nOAXx17O +rg3bhzjlP1v9mxnhMUF6cKojawHhRUzNlM47ni3niAIi9G7oyOzWPPO5std3eqx7 +-----END CERTIFICATE----- + +# Issuer: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH +# Subject: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH +# Label: "Telekom Security TLS ECC Root 2020" +# Serial: 72082518505882327255703894282316633856 +# MD5 Fingerprint: c1:ab:fe:6a:10:2c:03:8d:bc:1c:22:32:c0:85:a7:fd +# SHA1 Fingerprint: c0:f8:96:c5:a9:3b:01:06:21:07:da:18:42:48:bc:e9:9d:88:d5:ec +# SHA256 Fingerprint: 57:8a:f4:de:d0:85:3f:4e:59:98:db:4a:ea:f9:cb:ea:8d:94:5f:60:b6:20:a3:8d:1a:3c:13:b2:bc:7b:a8:e1 +-----BEGIN CERTIFICATE----- +MIICQjCCAcmgAwIBAgIQNjqWjMlcsljN0AFdxeVXADAKBggqhkjOPQQDAzBjMQsw +CQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0eSBH +bWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBFQ0MgUm9vdCAyMDIw +MB4XDTIwMDgyNTA3NDgyMFoXDTQ1MDgyNTIzNTk1OVowYzELMAkGA1UEBhMCREUx +JzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkGA1UE +AwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgRUNDIFJvb3QgMjAyMDB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABM6//leov9Wq9xCazbzREaK9Z0LMkOsVGJDZos0MKiXrPk/O +tdKPD/M12kOLAoC+b1EkHQ9rK8qfwm9QMuU3ILYg/4gND21Ju9sGpIeQkpT0CdDP +f8iAC8GXs7s1J8nCG6NCMEAwHQYDVR0OBBYEFONyzG6VmUex5rNhTNHLq+O6zd6f +MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2cA +MGQCMHVSi7ekEE+uShCLsoRbQuHmKjYC2qBuGT8lv9pZMo7k+5Dck2TOrbRBR2Di +z6fLHgIwN0GMZt9Ba9aDAEH9L1r3ULRn0SyocddDypwnJJGDSA3PzfdUga/sf+Rn +27iQ7t0l +-----END CERTIFICATE----- + +# Issuer: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH +# Subject: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH +# Label: "Telekom Security TLS RSA Root 2023" +# Serial: 44676229530606711399881795178081572759 +# MD5 Fingerprint: bf:5b:eb:54:40:cd:48:71:c4:20:8d:7d:de:0a:42:f2 +# SHA1 Fingerprint: 54:d3:ac:b3:bd:57:56:f6:85:9d:ce:e5:c3:21:e2:d4:ad:83:d0:93 +# SHA256 Fingerprint: ef:c6:5c:ad:bb:59:ad:b6:ef:e8:4d:a2:23:11:b3:56:24:b7:1b:3b:1e:a0:da:8b:66:55:17:4e:c8:97:86:46 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIQIZxULej27HF3+k7ow3BXlzANBgkqhkiG9w0BAQwFADBj +MQswCQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0 +eSBHbWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBSU0EgUm9vdCAy +MDIzMB4XDTIzMDMyODEyMTY0NVoXDTQ4MDMyNzIzNTk1OVowYzELMAkGA1UEBhMC +REUxJzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkG +A1UEAwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgUlNBIFJvb3QgMjAyMzCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAO01oYGA88tKaVvC+1GDrib94W7zgRJ9 +cUD/h3VCKSHtgVIs3xLBGYSJwb3FKNXVS2xE1kzbB5ZKVXrKNoIENqil/Cf2SfHV +cp6R+SPWcHu79ZvB7JPPGeplfohwoHP89v+1VmLhc2o0mD6CuKyVU/QBoCcHcqMA +U6DksquDOFczJZSfvkgdmOGjup5czQRxUX11eKvzWarE4GC+j4NSuHUaQTXtvPM6 +Y+mpFEXX5lLRbtLevOP1Czvm4MS9Q2QTps70mDdsipWol8hHD/BeEIvnHRz+sTug +BTNoBUGCwQMrAcjnj02r6LX2zWtEtefdi+zqJbQAIldNsLGyMcEWzv/9FIS3R/qy +8XDe24tsNlikfLMR0cN3f1+2JeANxdKz+bi4d9s3cXFH42AYTyS2dTd4uaNir73J +co4vzLuu2+QVUhkHM/tqty1LkCiCc/4YizWN26cEar7qwU02OxY2kTLvtkCJkUPg +8qKrBC7m8kwOFjQgrIfBLX7JZkcXFBGk8/ehJImr2BrIoVyxo/eMbcgByU/J7MT8 +rFEz0ciD0cmfHdRHNCk+y7AO+oMLKFjlKdw/fKifybYKu6boRhYPluV75Gp6SG12 +mAWl3G0eQh5C2hrgUve1g8Aae3g1LDj1H/1Joy7SWWO/gLCMk3PLNaaZlSJhZQNg ++y+TS/qanIA7AgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUtqeX +gj10hZv3PJ+TmpV5dVKMbUcwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS2 +p5eCPXSFm/c8n5OalXl1UoxtRzANBgkqhkiG9w0BAQwFAAOCAgEAqMxhpr51nhVQ +pGv7qHBFfLp+sVr8WyP6Cnf4mHGCDG3gXkaqk/QeoMPhk9tLrbKmXauw1GLLXrtm +9S3ul0A8Yute1hTWjOKWi0FpkzXmuZlrYrShF2Y0pmtjxrlO8iLpWA1WQdH6DErw +M807u20hOq6OcrXDSvvpfeWxm4bu4uB9tPcy/SKE8YXJN3nptT+/XOR0so8RYgDd +GGah2XsjX/GO1WfoVNpbOms2b/mBsTNHM3dA+VKq3dSDz4V4mZqTuXNnQkYRIer+ +CqkbGmVps4+uFrb2S1ayLfmlyOw7YqPta9BO1UAJpB+Y1zqlklkg5LB9zVtzaL1t +xKITDmcZuI1CfmwMmm6gJC3VRRvcxAIU/oVbZZfKTpBQCHpCNfnqwmbU+AGuHrS+ +w6jv/naaoqYfRvaE7fzbzsQCzndILIyy7MMAo+wsVRjBfhnu4S/yrYObnqsZ38aK +L4x35bcF7DvB7L6Gs4a8wPfc5+pbrrLMtTWGS9DiP7bY+A4A7l3j941Y/8+LN+lj +X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q +ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm +dTdmQRCsu/WU48IxK63nI1bMNSWSs1A= +-----END CERTIFICATE----- diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi/core.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi/core.py new file mode 100644 index 000000000..91f538bb1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi/core.py @@ -0,0 +1,114 @@ +""" +certifi.py +~~~~~~~~~~ + +This module returns the installation location of cacert.pem or its contents. +""" +import sys +import atexit + +def exit_cacert_ctx() -> None: + _CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr] + + +if sys.version_info >= (3, 11): + + from importlib.resources import as_file, files + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the file + # in cases where we're inside of a zipimport situation until someone + # actually calls where(), but we don't want to re-extract the file + # on every call of where(), so we'll do it once then store it in a + # global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you to + # manage the cleanup of this file, so it doesn't actually return a + # path, it returns a context manager that will give you the path + # when you enter it and will do any cleanup when you leave it. In + # the common case of not needing a temporary file, it will just + # return the file system location and the __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem")) + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + atexit.register(exit_cacert_ctx) + + return _CACERT_PATH + + def contents() -> str: + return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii") + +elif sys.version_info >= (3, 7): + + from importlib.resources import path as get_path, read_text + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the + # file in cases where we're inside of a zipimport situation until + # someone actually calls where(), but we don't want to re-extract + # the file on every call of where(), so we'll do it once then store + # it in a global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you + # to manage the cleanup of this file, so it doesn't actually + # return a path, it returns a context manager that will give + # you the path when you enter it and will do any cleanup when + # you leave it. In the common case of not needing a temporary + # file, it will just return the file system location and the + # __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = get_path("certifi", "cacert.pem") + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + atexit.register(exit_cacert_ctx) + + return _CACERT_PATH + + def contents() -> str: + return read_text("certifi", "cacert.pem", encoding="ascii") + +else: + import os + import types + from typing import Union + + Package = Union[types.ModuleType, str] + Resource = Union[str, "os.PathLike"] + + # This fallback will work for Python versions prior to 3.7 that lack the + # importlib.resources module but relies on the existing `where` function + # so won't address issues with environments like PyOxidizer that don't set + # __file__ on modules. + def read_text( + package: Package, + resource: Resource, + encoding: str = 'utf-8', + errors: str = 'strict' + ) -> str: + with open(where(), encoding=encoding) as data: + return data.read() + + # If we don't have importlib.resources, then we will just do the old logic + # of assuming we're on the filesystem and munge the path directly. + def where() -> str: + f = os.path.dirname(__file__) + + return os.path.join(f, "cacert.pem") + + def contents() -> str: + return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi/py.typed b/.direnv/python-3.8.20/lib/python3.8/site-packages/certifi/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi-1.16.0.dist-info/INSTALLER b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi-1.16.0.dist-info/INSTALLER new file mode 100644 index 000000000..ce3133dc1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi-1.16.0.dist-info/INSTALLER @@ -0,0 +1 @@ +Poetry 2.2.1 \ No newline at end of file diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi-1.16.0.dist-info/LICENSE b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi-1.16.0.dist-info/LICENSE new file mode 100644 index 000000000..29225eee9 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi-1.16.0.dist-info/LICENSE @@ -0,0 +1,26 @@ + +Except when otherwise stated (look for LICENSE files in directories or +information at the beginning of each file) all software and +documentation is licensed as follows: + + The MIT License + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without + restriction, including without limitation the rights to use, + copy, modify, merge, publish, distribute, sublicense, and/or + sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi-1.16.0.dist-info/METADATA b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi-1.16.0.dist-info/METADATA new file mode 100644 index 000000000..f582bfbba --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi-1.16.0.dist-info/METADATA @@ -0,0 +1,39 @@ +Metadata-Version: 2.1 +Name: cffi +Version: 1.16.0 +Summary: Foreign Function Interface for Python calling C code. +Home-page: http://cffi.readthedocs.org +Author: Armin Rigo, Maciej Fijalkowski +Author-email: python-cffi@googlegroups.com +License: MIT +Project-URL: Documentation, http://cffi.readthedocs.org/ +Project-URL: Source Code, https://github.com/python-cffi/cffi +Project-URL: Issue Tracker, https://github.com/python-cffi/cffi/issues +Project-URL: Changelog, https://cffi.readthedocs.io/en/latest/whatsnew.html +Project-URL: Downloads, https://github.com/python-cffi/cffi/releases +Project-URL: Contact, https://groups.google.com/forum/#!forum/python-cffi +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: License :: OSI Approved :: MIT License +Requires-Python: >=3.8 +License-File: LICENSE +Requires-Dist: pycparser + + +CFFI +==== + +Foreign Function Interface for Python calling C code. +Please see the `Documentation `_. + +Contact +------- + +`Mailing list `_ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi-1.16.0.dist-info/RECORD b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi-1.16.0.dist-info/RECORD new file mode 100644 index 000000000..f87459682 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi-1.16.0.dist-info/RECORD @@ -0,0 +1,30 @@ +_cffi_backend.cpython-38-x86_64-linux-gnu.so,sha256=0Wc_F2ZeW72fY5FYqfmy722t0ptibyXgCWC1gMj0qC4,992800 +cffi/error.py,sha256=v6xTiS4U0kvDcy4h_BDRo5v39ZQuj-IMRYLv5ETddZs,877 +cffi/api.py,sha256=wtJU0aGUC3TyYnjBIgsOIlv7drF19jV-y_srt7c8yhg,42085 +cffi/commontypes.py,sha256=QS4uxCDI7JhtTyjh1hlnCA-gynmaszWxJaRRLGkJa1A,2689 +cffi/verifier.py,sha256=oX8jpaohg2Qm3aHcznidAdvrVm5N4sQYG0a3Eo5mIl4,11182 +cffi/setuptools_ext.py,sha256=-ebj79lO2_AUH-kRcaja2pKY1Z_5tloGwsJgzK8P3Cc,8871 +cffi/_embedding.h,sha256=QEmrJKlB_W2VC601CjjyfMuxtgzPQwwEKFlwMCGHbT0,18787 +cffi/_imp_emulation.py,sha256=RxREG8zAbI2RPGBww90u_5fi8sWdahpdipOoPzkp7C0,2960 +cffi/_cffi_include.h,sha256=tKnA1rdSoPHp23FnDL1mDGwFo-Uj6fXfA6vA6kcoEUc,14800 +cffi/ffiplatform.py,sha256=avxFjdikYGJoEtmJO7ewVmwG_VEVl6EZ_WaNhZYCqv4,3584 +cffi/_cffi_errors.h,sha256=zQXt7uR_m8gUW-fI2hJg0KoSkJFwXv8RGUkEDZ177dQ,3908 +cffi/_shimmed_dist_utils.py,sha256=mLuEtxw4gbuA2De_gD7zEhb6Q8Wm2lBPtwC68gd9XTs,2007 +cffi/lock.py,sha256=l9TTdwMIMpi6jDkJGnQgE9cvTIR7CAntIJr8EGHt3pY,747 +cffi/vengine_gen.py,sha256=5dX7s1DU6pTBOMI6oTVn_8Bnmru_lj932B6b4v29Hlg,26684 +cffi/model.py,sha256=RVsAb3h_u7VHWZJ-J_Z4kvB36pFyFG_MVIjPOQ8YhQ8,21790 +cffi/recompiler.py,sha256=oTusgKQ02YY6LXhcmWcqpIlPrG178RMXXSBseSACRgg,64601 +cffi/cparser.py,sha256=rO_1pELRw1gI1DE1m4gi2ik5JMfpxouAACLXpRPlVEA,44231 +cffi/vengine_cpy.py,sha256=nK_im1DbdIGMMgxFgeo1MndFjaB-Qlkc2ZYlSquLjs0,43351 +cffi/__init__.py,sha256=uEnzaXlQndR9nc8ar1qk6_coNEfxfn4pA0sWPVg2MP8,513 +cffi/cffi_opcode.py,sha256=v9RdD_ovA8rCtqsC95Ivki5V667rAOhGgs3fb2q9xpM,5724 +cffi/parse_c_type.h,sha256=OdwQfwM9ktq6vlCB43exFQmxDBtj2MBNdK8LYl15tjw,5976 +cffi/backend_ctypes.py,sha256=h5ZIzLc6BFVXnGyc9xPqZWUS7qGy7yFSDqXe68Sa8z4,42454 +cffi/pkgconfig.py,sha256=LP1w7vmWvmKwyqLaU1Z243FOWGNQMrgMUZrvgFuOlco,4374 +cffi-1.16.0.dist-info/METADATA,sha256=qOBI2i0qSlLOKwmzNjbJfLmrTzHV_JFS7uKbcFj_p9Y,1480 +cffi-1.16.0.dist-info/LICENSE,sha256=BLgPWwd7vtaICM_rreteNSPyqMmpZJXFh72W3x6sKjM,1294 +cffi-1.16.0.dist-info/entry_points.txt,sha256=y6jTxnyeuLnL-XJcDv8uML3n6wyYiGRg8MTp_QGJ9Ho,75 +cffi-1.16.0.dist-info/WHEEL,sha256=CzyHWKXay4N1oFds0wxFofK9MEs-L6SZ6gHGZF6-4Co,148 +cffi-1.16.0.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19 +cffi-1.16.0.dist-info/INSTALLER,sha256=sz0Tnp99msIbbLB51q7U9nA6AvRKcsOeUKhrHH0Ulg4,12 +cffi-1.16.0.dist-info/RECORD,, diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi-1.16.0.dist-info/WHEEL b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi-1.16.0.dist-info/WHEEL new file mode 100644 index 000000000..e8d19a232 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi-1.16.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.41.2) +Root-Is-Purelib: false +Tag: cp38-cp38-manylinux_2_17_x86_64 +Tag: cp38-cp38-manylinux2014_x86_64 + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi-1.16.0.dist-info/entry_points.txt b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi-1.16.0.dist-info/entry_points.txt new file mode 100644 index 000000000..4b0274f23 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi-1.16.0.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[distutils.setup_keywords] +cffi_modules = cffi.setuptools_ext:cffi_modules diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi-1.16.0.dist-info/top_level.txt b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi-1.16.0.dist-info/top_level.txt new file mode 100644 index 000000000..f64577957 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi-1.16.0.dist-info/top_level.txt @@ -0,0 +1,2 @@ +_cffi_backend +cffi diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/__init__.py new file mode 100644 index 000000000..90dedf433 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/__init__.py @@ -0,0 +1,14 @@ +__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError', + 'FFIError'] + +from .api import FFI +from .error import CDefError, FFIError, VerificationError, VerificationMissing +from .error import PkgConfigError + +__version__ = "1.16.0" +__version_info__ = (1, 16, 0) + +# The verifier module file names are based on the CRC32 of a string that +# contains the following version number. It may be older than __version__ +# if nothing is clearly incompatible. +__version_verifier_modules__ = "0.8.6" diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/_cffi_errors.h b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/_cffi_errors.h new file mode 100644 index 000000000..158e05903 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/_cffi_errors.h @@ -0,0 +1,149 @@ +#ifndef CFFI_MESSAGEBOX +# ifdef _MSC_VER +# define CFFI_MESSAGEBOX 1 +# else +# define CFFI_MESSAGEBOX 0 +# endif +#endif + + +#if CFFI_MESSAGEBOX +/* Windows only: logic to take the Python-CFFI embedding logic + initialization errors and display them in a background thread + with MessageBox. The idea is that if the whole program closes + as a result of this problem, then likely it is already a console + program and you can read the stderr output in the console too. + If it is not a console program, then it will likely show its own + dialog to complain, or generally not abruptly close, and for this + case the background thread should stay alive. +*/ +static void *volatile _cffi_bootstrap_text; + +static PyObject *_cffi_start_error_capture(void) +{ + PyObject *result = NULL; + PyObject *x, *m, *bi; + + if (InterlockedCompareExchangePointer(&_cffi_bootstrap_text, + (void *)1, NULL) != NULL) + return (PyObject *)1; + + m = PyImport_AddModule("_cffi_error_capture"); + if (m == NULL) + goto error; + + result = PyModule_GetDict(m); + if (result == NULL) + goto error; + +#if PY_MAJOR_VERSION >= 3 + bi = PyImport_ImportModule("builtins"); +#else + bi = PyImport_ImportModule("__builtin__"); +#endif + if (bi == NULL) + goto error; + PyDict_SetItemString(result, "__builtins__", bi); + Py_DECREF(bi); + + x = PyRun_String( + "import sys\n" + "class FileLike:\n" + " def write(self, x):\n" + " try:\n" + " of.write(x)\n" + " except: pass\n" + " self.buf += x\n" + " def flush(self):\n" + " pass\n" + "fl = FileLike()\n" + "fl.buf = ''\n" + "of = sys.stderr\n" + "sys.stderr = fl\n" + "def done():\n" + " sys.stderr = of\n" + " return fl.buf\n", /* make sure the returned value stays alive */ + Py_file_input, + result, result); + Py_XDECREF(x); + + error: + if (PyErr_Occurred()) + { + PyErr_WriteUnraisable(Py_None); + PyErr_Clear(); + } + return result; +} + +#pragma comment(lib, "user32.lib") + +static DWORD WINAPI _cffi_bootstrap_dialog(LPVOID ignored) +{ + Sleep(666); /* may be interrupted if the whole process is closing */ +#if PY_MAJOR_VERSION >= 3 + MessageBoxW(NULL, (wchar_t *)_cffi_bootstrap_text, + L"Python-CFFI error", + MB_OK | MB_ICONERROR); +#else + MessageBoxA(NULL, (char *)_cffi_bootstrap_text, + "Python-CFFI error", + MB_OK | MB_ICONERROR); +#endif + _cffi_bootstrap_text = NULL; + return 0; +} + +static void _cffi_stop_error_capture(PyObject *ecap) +{ + PyObject *s; + void *text; + + if (ecap == (PyObject *)1) + return; + + if (ecap == NULL) + goto error; + + s = PyRun_String("done()", Py_eval_input, ecap, ecap); + if (s == NULL) + goto error; + + /* Show a dialog box, but in a background thread, and + never show multiple dialog boxes at once. */ +#if PY_MAJOR_VERSION >= 3 + text = PyUnicode_AsWideCharString(s, NULL); +#else + text = PyString_AsString(s); +#endif + + _cffi_bootstrap_text = text; + + if (text != NULL) + { + HANDLE h; + h = CreateThread(NULL, 0, _cffi_bootstrap_dialog, + NULL, 0, NULL); + if (h != NULL) + CloseHandle(h); + } + /* decref the string, but it should stay alive as 'fl.buf' + in the small module above. It will really be freed only if + we later get another similar error. So it's a leak of at + most one copy of the small module. That's fine for this + situation which is usually a "fatal error" anyway. */ + Py_DECREF(s); + PyErr_Clear(); + return; + + error: + _cffi_bootstrap_text = NULL; + PyErr_Clear(); +} + +#else + +static PyObject *_cffi_start_error_capture(void) { return NULL; } +static void _cffi_stop_error_capture(PyObject *ecap) { } + +#endif diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/_cffi_include.h b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/_cffi_include.h new file mode 100644 index 000000000..e4c0a6724 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/_cffi_include.h @@ -0,0 +1,385 @@ +#define _CFFI_ + +/* We try to define Py_LIMITED_API before including Python.h. + + Mess: we can only define it if Py_DEBUG, Py_TRACE_REFS and + Py_REF_DEBUG are not defined. This is a best-effort approximation: + we can learn about Py_DEBUG from pyconfig.h, but it is unclear if + the same works for the other two macros. Py_DEBUG implies them, + but not the other way around. + + The implementation is messy (issue #350): on Windows, with _MSC_VER, + we have to define Py_LIMITED_API even before including pyconfig.h. + In that case, we guess what pyconfig.h will do to the macros above, + and check our guess after the #include. + + Note that on Windows, with CPython 3.x, you need >= 3.5 and virtualenv + version >= 16.0.0. With older versions of either, you don't get a + copy of PYTHON3.DLL in the virtualenv. We can't check the version of + CPython *before* we even include pyconfig.h. ffi.set_source() puts + a ``#define _CFFI_NO_LIMITED_API'' at the start of this file if it is + running on Windows < 3.5, as an attempt at fixing it, but that's + arguably wrong because it may not be the target version of Python. + Still better than nothing I guess. As another workaround, you can + remove the definition of Py_LIMITED_API here. + + See also 'py_limited_api' in cffi/setuptools_ext.py. +*/ +#if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API) +# ifdef _MSC_VER +# if !defined(_DEBUG) && !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API) +# define Py_LIMITED_API +# endif +# include + /* sanity-check: Py_LIMITED_API will cause crashes if any of these + are also defined. Normally, the Python file PC/pyconfig.h does not + cause any of these to be defined, with the exception that _DEBUG + causes Py_DEBUG. Double-check that. */ +# ifdef Py_LIMITED_API +# if defined(Py_DEBUG) +# error "pyconfig.h unexpectedly defines Py_DEBUG, but Py_LIMITED_API is set" +# endif +# if defined(Py_TRACE_REFS) +# error "pyconfig.h unexpectedly defines Py_TRACE_REFS, but Py_LIMITED_API is set" +# endif +# if defined(Py_REF_DEBUG) +# error "pyconfig.h unexpectedly defines Py_REF_DEBUG, but Py_LIMITED_API is set" +# endif +# endif +# else +# include +# if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API) +# define Py_LIMITED_API +# endif +# endif +#endif + +#include +#ifdef __cplusplus +extern "C" { +#endif +#include +#include "parse_c_type.h" + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +#endif + +#ifdef __GNUC__ +# define _CFFI_UNUSED_FN __attribute__((unused)) +#else +# define _CFFI_UNUSED_FN /* nothing */ +#endif + +#ifdef __cplusplus +# ifndef _Bool + typedef bool _Bool; /* semi-hackish: C++ has no _Bool; bool is builtin */ +# endif +#endif + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong +#define _cffi_from_c__Bool PyBool_FromLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, struct _cffi_ctypedescr *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + not used any more +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((_cffi_wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(_cffi_wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(struct _cffi_ctypedescr *, \ + PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[24]) +#define _CFFI_CPIDX 25 +#define _cffi_call_python \ + ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX]) +#define _cffi_to_c_wchar3216_t \ + ((int(*)(PyObject *))_cffi_exports[26]) +#define _cffi_from_c_wchar3216_t \ + ((PyObject *(*)(int))_cffi_exports[27]) +#define _CFFI_NUM_EXPORTS 28 + +struct _cffi_ctypedescr; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; + +#define _cffi_type(index) ( \ + assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \ + (struct _cffi_ctypedescr *)_cffi_types[index]) + +static PyObject *_cffi_init(const char *module_name, Py_ssize_t version, + const struct _cffi_type_context_s *ctx) +{ + PyObject *module, *o_arg, *new_module; + void *raw[] = { + (void *)module_name, + (void *)version, + (void *)_cffi_exports, + (void *)ctx, + }; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + o_arg = PyLong_FromVoidPtr((void *)raw); + if (o_arg == NULL) + goto failure; + + new_module = PyObject_CallMethod( + module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg); + + Py_DECREF(o_arg); + Py_DECREF(module); + return new_module; + + failure: + Py_XDECREF(module); + return NULL; +} + + +#ifdef HAVE_WCHAR_H +typedef wchar_t _cffi_wchar_t; +#else +typedef uint16_t _cffi_wchar_t; /* same random pick as _cffi_backend.c */ +#endif + +_CFFI_UNUSED_FN static uint16_t _cffi_to_c_char16_t(PyObject *o) +{ + if (sizeof(_cffi_wchar_t) == 2) + return (uint16_t)_cffi_to_c_wchar_t(o); + else + return (uint16_t)_cffi_to_c_wchar3216_t(o); +} + +_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char16_t(uint16_t x) +{ + if (sizeof(_cffi_wchar_t) == 2) + return _cffi_from_c_wchar_t((_cffi_wchar_t)x); + else + return _cffi_from_c_wchar3216_t((int)x); +} + +_CFFI_UNUSED_FN static int _cffi_to_c_char32_t(PyObject *o) +{ + if (sizeof(_cffi_wchar_t) == 4) + return (int)_cffi_to_c_wchar_t(o); + else + return (int)_cffi_to_c_wchar3216_t(o); +} + +_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(unsigned int x) +{ + if (sizeof(_cffi_wchar_t) == 4) + return _cffi_from_c_wchar_t((_cffi_wchar_t)x); + else + return _cffi_from_c_wchar3216_t((int)x); +} + +union _cffi_union_alignment_u { + unsigned char m_char; + unsigned short m_short; + unsigned int m_int; + unsigned long m_long; + unsigned long long m_longlong; + float m_float; + double m_double; + long double m_longdouble; +}; + +struct _cffi_freeme_s { + struct _cffi_freeme_s *next; + union _cffi_union_alignment_u alignment; +}; + +_CFFI_UNUSED_FN static int +_cffi_convert_array_argument(struct _cffi_ctypedescr *ctptr, PyObject *arg, + char **output_data, Py_ssize_t datasize, + struct _cffi_freeme_s **freeme) +{ + char *p; + if (datasize < 0) + return -1; + + p = *output_data; + if (p == NULL) { + struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( + offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); + if (fp == NULL) + return -1; + fp->next = *freeme; + *freeme = fp; + p = *output_data = (char *)&fp->alignment; + } + memset((void *)p, 0, (size_t)datasize); + return _cffi_convert_array_from_object(p, ctptr, arg); +} + +_CFFI_UNUSED_FN static void +_cffi_free_array_arguments(struct _cffi_freeme_s *freeme) +{ + do { + void *p = (void *)freeme; + freeme = freeme->next; + PyObject_Free(p); + } while (freeme != NULL); +} + +/********** end CPython-specific section **********/ +#else +_CFFI_UNUSED_FN +static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *); +# define _cffi_call_python _cffi_call_python_org +#endif + + +#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0])) + +#define _cffi_prim_int(size, sign) \ + ((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \ + (size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \ + (size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \ + (size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \ + _CFFI__UNKNOWN_PRIM) + +#define _cffi_prim_float(size) \ + ((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \ + (size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \ + (size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \ + _CFFI__UNKNOWN_FLOAT_PRIM) + +#define _cffi_check_int(got, got_nonpos, expected) \ + ((got_nonpos) == (expected <= 0) && \ + (got) == (unsigned long long)expected) + +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + +#ifdef __cplusplus +} +#endif diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/_embedding.h b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/_embedding.h new file mode 100644 index 000000000..1cb66f235 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/_embedding.h @@ -0,0 +1,550 @@ + +/***** Support code for embedding *****/ + +#ifdef __cplusplus +extern "C" { +#endif + + +#if defined(_WIN32) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. If _cffi_start_python() fails, then this is set + to NULL; otherwise, it should never be NULL. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + +#include "_cffi_errors.h" + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + PyObject *builtins; + + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + builtins = PyEval_GetBuiltins(); + if (builtins == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *ecap; + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + ecap = _cffi_start_error_capture(); + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString( + "Failed to initialize the Python-CFFI embedding logic:\n\n", f); + } + + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 1.16.0" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + _cffi_stop_error_capture(ecap); + } + result = -1; + goto done; +} + +#if PY_VERSION_HEX < 0x03080000 +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ +#endif + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + (What it really does used to be completely different in Python 2 + and Python 3, with the Python 2 solution avoiding the spin-lock + around the Py_InitializeEx() call. However, after recent changes + to CPython 2.7 (issue #358) it no longer works. So we use the + Python 3 solution everywhere.) + + This initializes Python by calling Py_InitializeEx(). + Important: this must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. + + In Python < 3.8, we choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + In Python >= 3.8, this string array is no longer writable, so + instead we pick PyCapsuleType.tp_version_tag. We can't change + Python < 3.8 because someone might use a mixture of cffi + embedded modules, some of which were compiled before this file + changed. + + In Python >= 3.12, this stopped working because that particular + tp_version_tag gets modified during interpreter startup. It's + arguably a bad idea before 3.12 too, but again we can't change + that because someone might use a mixture of cffi embedded + modules, and no-one reported a bug so far. In Python >= 3.12 + we go instead for PyCapsuleType.tp_as_buffer, which is supposed + to always be NULL. We write to it temporarily a pointer to + a struct full of NULLs, which is semantically the same. + */ + +#ifdef WITH_THREAD +# if PY_VERSION_HEX < 0x03080000 + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value, *locked_value; + + while (1) { /* spin loop */ + old_value = *lock; + locked_value = old_value + 1; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, locked_value)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +# else +# if PY_VERSION_HEX < 0x030C0000 + int volatile *lock = (int volatile *)&PyCapsule_Type.tp_version_tag; + int old_value, locked_value = -42; + assert(!(PyCapsule_Type.tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG)); +# else + static struct ebp_s { PyBufferProcs buf; int mark; } empty_buffer_procs; + empty_buffer_procs.mark = -42; + PyBufferProcs *volatile *lock = (PyBufferProcs *volatile *) + &PyCapsule_Type.tp_as_buffer; + PyBufferProcs *old_value, *locked_value = &empty_buffer_procs.buf; +# endif + + while (1) { /* spin loop */ + old_value = *lock; + if (old_value == 0) { + if (cffi_compare_and_swap(lock, old_value, locked_value)) + break; + } + else { +# if PY_VERSION_HEX < 0x030C0000 + assert(old_value == locked_value); +# else + /* The pointer should point to a possibly different + empty_buffer_procs from another C extension module */ + assert(((struct ebp_s *)old_value)->mark == -42); +# endif + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +# endif +#endif + + /* call Py_InitializeEx() */ + if (!Py_IsInitialized()) { + _cffi_py_initialize(); +#if PY_VERSION_HEX < 0x03070000 + PyEval_InitThreads(); +#endif + PyEval_SaveThread(); /* release the GIL */ + /* the returned tstate must be the one that has been stored into the + autoTLSkey by _PyGILState_Init() called from Py_Initialize(). */ + } + else { +#if PY_VERSION_HEX < 0x03070000 + /* PyEval_InitThreads() is always a no-op from CPython 3.7 */ + PyGILState_STATE state = PyGILState_Ensure(); + PyEval_InitThreads(); + PyGILState_Release(state); +#endif + } + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, locked_value, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void *func; /* function pointer */ + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + _CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier + +#ifdef __cplusplus +} +#endif diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/_imp_emulation.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/_imp_emulation.py new file mode 100644 index 000000000..136abdddf --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/_imp_emulation.py @@ -0,0 +1,83 @@ + +try: + # this works on Python < 3.12 + from imp import * + +except ImportError: + # this is a limited emulation for Python >= 3.12. + # Note that this is used only for tests or for the old ffi.verify(). + # This is copied from the source code of Python 3.11. + + from _imp import (acquire_lock, release_lock, + is_builtin, is_frozen) + + from importlib._bootstrap import _load + + from importlib import machinery + import os + import sys + import tokenize + + SEARCH_ERROR = 0 + PY_SOURCE = 1 + PY_COMPILED = 2 + C_EXTENSION = 3 + PY_RESOURCE = 4 + PKG_DIRECTORY = 5 + C_BUILTIN = 6 + PY_FROZEN = 7 + PY_CODERESOURCE = 8 + IMP_HOOK = 9 + + def get_suffixes(): + extensions = [(s, 'rb', C_EXTENSION) + for s in machinery.EXTENSION_SUFFIXES] + source = [(s, 'r', PY_SOURCE) for s in machinery.SOURCE_SUFFIXES] + bytecode = [(s, 'rb', PY_COMPILED) for s in machinery.BYTECODE_SUFFIXES] + return extensions + source + bytecode + + def find_module(name, path=None): + if not isinstance(name, str): + raise TypeError("'name' must be a str, not {}".format(type(name))) + elif not isinstance(path, (type(None), list)): + # Backwards-compatibility + raise RuntimeError("'path' must be None or a list, " + "not {}".format(type(path))) + + if path is None: + if is_builtin(name): + return None, None, ('', '', C_BUILTIN) + elif is_frozen(name): + return None, None, ('', '', PY_FROZEN) + else: + path = sys.path + + for entry in path: + package_directory = os.path.join(entry, name) + for suffix in ['.py', machinery.BYTECODE_SUFFIXES[0]]: + package_file_name = '__init__' + suffix + file_path = os.path.join(package_directory, package_file_name) + if os.path.isfile(file_path): + return None, package_directory, ('', '', PKG_DIRECTORY) + for suffix, mode, type_ in get_suffixes(): + file_name = name + suffix + file_path = os.path.join(entry, file_name) + if os.path.isfile(file_path): + break + else: + continue + break # Break out of outer loop when breaking out of inner loop. + else: + raise ImportError(name, name=name) + + encoding = None + if 'b' not in mode: + with open(file_path, 'rb') as file: + encoding = tokenize.detect_encoding(file.readline)[0] + file = open(file_path, mode, encoding=encoding) + return file, file_path, (suffix, mode, type_) + + def load_dynamic(name, path, file=None): + loader = machinery.ExtensionFileLoader(name, path) + spec = machinery.ModuleSpec(name=name, loader=loader, origin=path) + return _load(spec) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/_shimmed_dist_utils.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/_shimmed_dist_utils.py new file mode 100644 index 000000000..611bf40f4 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/_shimmed_dist_utils.py @@ -0,0 +1,41 @@ +""" +Temporary shim module to indirect the bits of distutils we need from setuptools/distutils while providing useful +error messages beyond `No module named 'distutils' on Python >= 3.12, or when setuptools' vendored distutils is broken. + +This is a compromise to avoid a hard-dep on setuptools for Python >= 3.12, since many users don't need runtime compilation support from CFFI. +""" +import sys + +try: + # import setuptools first; this is the most robust way to ensure its embedded distutils is available + # (the .pth shim should usually work, but this is even more robust) + import setuptools +except Exception as ex: + if sys.version_info >= (3, 12): + # Python 3.12 has no built-in distutils to fall back on, so any import problem is fatal + raise Exception("This CFFI feature requires setuptools on Python >= 3.12. The setuptools module is missing or non-functional.") from ex + + # silently ignore on older Pythons (support fallback to stdlib distutils where available) +else: + del setuptools + +try: + # bring in just the bits of distutils we need, whether they really came from setuptools or stdlib-embedded distutils + from distutils import log, sysconfig + from distutils.ccompiler import CCompiler + from distutils.command.build_ext import build_ext + from distutils.core import Distribution, Extension + from distutils.dir_util import mkpath + from distutils.errors import DistutilsSetupError, CompileError, LinkError + from distutils.log import set_threshold, set_verbosity + + if sys.platform == 'win32': + from distutils.msvc9compiler import MSVCCompiler +except Exception as ex: + if sys.version_info >= (3, 12): + raise Exception("This CFFI feature requires setuptools on Python >= 3.12. Please install the setuptools package.") from ex + + # anything older, just let the underlying distutils import error fly + raise Exception("This CFFI feature requires distutils. Please install the distutils or setuptools package.") from ex + +del sys diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/api.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/api.py new file mode 100644 index 000000000..edeb79281 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/api.py @@ -0,0 +1,965 @@ +import sys, types +from .lock import allocate_lock +from .error import CDefError +from . import model + +try: + callable +except NameError: + # Python 3.1 + from collections import Callable + callable = lambda x: isinstance(x, Callable) + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +_unspecified = object() + + + +class FFI(object): + r''' + The main top-level class that you instantiate once, or once per module. + + Example usage: + + ffi = FFI() + ffi.cdef(""" + int printf(const char *, ...); + """) + + C = ffi.dlopen(None) # standard library + -or- + C = ffi.verify() # use a C compiler: verify the decl above is right + + C.printf("hello, %s!\n", ffi.new("char[]", "world")) + ''' + + def __init__(self, backend=None): + """Create an FFI instance. The 'backend' argument is used to + select a non-default backend, mostly for tests. + """ + if backend is None: + # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with + # _cffi_backend.so compiled. + import _cffi_backend as backend + from . import __version__ + if backend.__version__ != __version__: + # bad version! Try to be as explicit as possible. + if hasattr(backend, '__file__'): + # CPython + raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r. The two versions should be equal; check your installation." % ( + __version__, __file__, + backend.__version__, backend.__file__)) + else: + # PyPy + raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. This interpreter comes with a built-in '_cffi_backend' module, which is version %s. The two versions should be equal; check your installation." % ( + __version__, __file__, backend.__version__)) + # (If you insist you can also try to pass the option + # 'backend=backend_ctypes.CTypesBackend()', but don't + # rely on it! It's probably not going to work well.) + + from . import cparser + self._backend = backend + self._lock = allocate_lock() + self._parser = cparser.Parser() + self._cached_btypes = {} + self._parsed_types = types.ModuleType('parsed_types').__dict__ + self._new_types = types.ModuleType('new_types').__dict__ + self._function_caches = [] + self._libraries = [] + self._cdefsources = [] + self._included_ffis = [] + self._windows_unicode = None + self._init_once_cache = {} + self._cdef_version = None + self._embedding = None + self._typecache = model.get_typecache(backend) + if hasattr(backend, 'set_ffi'): + backend.set_ffi(self) + for name in list(backend.__dict__): + if name.startswith('RTLD_'): + setattr(self, name, getattr(backend, name)) + # + with self._lock: + self.BVoidP = self._get_cached_btype(model.voidp_type) + self.BCharA = self._get_cached_btype(model.char_array_type) + if isinstance(backend, types.ModuleType): + # _cffi_backend: attach these constants to the class + if not hasattr(FFI, 'NULL'): + FFI.NULL = self.cast(self.BVoidP, 0) + FFI.CData, FFI.CType = backend._get_types() + else: + # ctypes backend: attach these constants to the instance + self.NULL = self.cast(self.BVoidP, 0) + self.CData, self.CType = backend._get_types() + self.buffer = backend.buffer + + def cdef(self, csource, override=False, packed=False, pack=None): + """Parse the given C source. This registers all declared functions, + types, and global variables. The functions and global variables can + then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'. + The types can be used in 'ffi.new()' and other functions. + If 'packed' is specified as True, all structs declared inside this + cdef are packed, i.e. laid out without any field alignment at all. + Alternatively, 'pack' can be a small integer, and requests for + alignment greater than that are ignored (pack=1 is equivalent to + packed=True). + """ + self._cdef(csource, override=override, packed=packed, pack=pack) + + def embedding_api(self, csource, packed=False, pack=None): + self._cdef(csource, packed=packed, pack=pack, dllexport=True) + if self._embedding is None: + self._embedding = '' + + def _cdef(self, csource, override=False, **options): + if not isinstance(csource, str): # unicode, on Python 2 + if not isinstance(csource, basestring): + raise TypeError("cdef() argument must be a string") + csource = csource.encode('ascii') + with self._lock: + self._cdef_version = object() + self._parser.parse(csource, override=override, **options) + self._cdefsources.append(csource) + if override: + for cache in self._function_caches: + cache.clear() + finishlist = self._parser._recomplete + if finishlist: + self._parser._recomplete = [] + for tp in finishlist: + tp.finish_backend_type(self, finishlist) + + def dlopen(self, name, flags=0): + """Load and return a dynamic library identified by 'name'. + The standard C library can be loaded by passing None. + Note that functions and types declared by 'ffi.cdef()' are not + linked to a particular library, just like C headers; in the + library we only look for the actual (untyped) symbols. + """ + if not (isinstance(name, basestring) or + name is None or + isinstance(name, self.CData)): + raise TypeError("dlopen(name): name must be a file name, None, " + "or an already-opened 'void *' handle") + with self._lock: + lib, function_cache = _make_ffi_library(self, name, flags) + self._function_caches.append(function_cache) + self._libraries.append(lib) + return lib + + def dlclose(self, lib): + """Close a library obtained with ffi.dlopen(). After this call, + access to functions or variables from the library will fail + (possibly with a segmentation fault). + """ + type(lib).__cffi_close__(lib) + + def _typeof_locked(self, cdecl): + # call me with the lock! + key = cdecl + if key in self._parsed_types: + return self._parsed_types[key] + # + if not isinstance(cdecl, str): # unicode, on Python 2 + cdecl = cdecl.encode('ascii') + # + type = self._parser.parse_type(cdecl) + really_a_function_type = type.is_raw_function + if really_a_function_type: + type = type.as_function_pointer() + btype = self._get_cached_btype(type) + result = btype, really_a_function_type + self._parsed_types[key] = result + return result + + def _typeof(self, cdecl, consider_function_as_funcptr=False): + # string -> ctype object + try: + result = self._parsed_types[cdecl] + except KeyError: + with self._lock: + result = self._typeof_locked(cdecl) + # + btype, really_a_function_type = result + if really_a_function_type and not consider_function_as_funcptr: + raise CDefError("the type %r is a function type, not a " + "pointer-to-function type" % (cdecl,)) + return btype + + def typeof(self, cdecl): + """Parse the C type given as a string and return the + corresponding object. + It can also be used on 'cdata' instance to get its C type. + """ + if isinstance(cdecl, basestring): + return self._typeof(cdecl) + if isinstance(cdecl, self.CData): + return self._backend.typeof(cdecl) + if isinstance(cdecl, types.BuiltinFunctionType): + res = _builtin_function_type(cdecl) + if res is not None: + return res + if (isinstance(cdecl, types.FunctionType) + and hasattr(cdecl, '_cffi_base_type')): + with self._lock: + return self._get_cached_btype(cdecl._cffi_base_type) + raise TypeError(type(cdecl)) + + def sizeof(self, cdecl): + """Return the size in bytes of the argument. It can be a + string naming a C type, or a 'cdata' instance. + """ + if isinstance(cdecl, basestring): + BType = self._typeof(cdecl) + return self._backend.sizeof(BType) + else: + return self._backend.sizeof(cdecl) + + def alignof(self, cdecl): + """Return the natural alignment size in bytes of the C type + given as a string. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.alignof(cdecl) + + def offsetof(self, cdecl, *fields_or_indexes): + """Return the offset of the named field inside the given + structure or array, which must be given as a C type name. + You can give several field names in case of nested structures. + You can also give numeric values which correspond to array + items, in case of an array type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._typeoffsetof(cdecl, *fields_or_indexes)[1] + + def new(self, cdecl, init=None): + """Allocate an instance according to the specified C type and + return a pointer to it. The specified C type must be either a + pointer or an array: ``new('X *')`` allocates an X and returns + a pointer to it, whereas ``new('X[n]')`` allocates an array of + n X'es and returns an array referencing it (which works + mostly like a pointer, like in C). You can also use + ``new('X[]', n)`` to allocate an array of a non-constant + length n. + + The memory is initialized following the rules of declaring a + global variable in C: by default it is zero-initialized, but + an explicit initializer can be given which can be used to + fill all or part of the memory. + + When the returned object goes out of scope, the memory + is freed. In other words the returned object has + ownership of the value of type 'cdecl' that it points to. This + means that the raw data can be used as long as this object is + kept alive, but must not be used for a longer time. Be careful + about that when copying the pointer to the memory somewhere + else, e.g. into another structure. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.newp(cdecl, init) + + def new_allocator(self, alloc=None, free=None, + should_clear_after_alloc=True): + """Return a new allocator, i.e. a function that behaves like ffi.new() + but uses the provided low-level 'alloc' and 'free' functions. + + 'alloc' is called with the size as argument. If it returns NULL, a + MemoryError is raised. 'free' is called with the result of 'alloc' + as argument. Both can be either Python function or directly C + functions. If 'free' is None, then no free function is called. + If both 'alloc' and 'free' are None, the default is used. + + If 'should_clear_after_alloc' is set to False, then the memory + returned by 'alloc' is assumed to be already cleared (or you are + fine with garbage); otherwise CFFI will clear it. + """ + compiled_ffi = self._backend.FFI() + allocator = compiled_ffi.new_allocator(alloc, free, + should_clear_after_alloc) + def allocate(cdecl, init=None): + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return allocator(cdecl, init) + return allocate + + def cast(self, cdecl, source): + """Similar to a C cast: returns an instance of the named C + type initialized with the given 'source'. The source is + casted between integers or pointers of any type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.cast(cdecl, source) + + def string(self, cdata, maxlen=-1): + """Return a Python string (or unicode string) from the 'cdata'. + If 'cdata' is a pointer or array of characters or bytes, returns + the null-terminated string. The returned string extends until + the first null character, or at most 'maxlen' characters. If + 'cdata' is an array then 'maxlen' defaults to its length. + + If 'cdata' is a pointer or array of wchar_t, returns a unicode + string following the same rules. + + If 'cdata' is a single character or byte or a wchar_t, returns + it as a string or unicode string. + + If 'cdata' is an enum, returns the value of the enumerator as a + string, or 'NUMBER' if the value is out of range. + """ + return self._backend.string(cdata, maxlen) + + def unpack(self, cdata, length): + """Unpack an array of C data of the given length, + returning a Python string/unicode/list. + + If 'cdata' is a pointer to 'char', returns a byte string. + It does not stop at the first null. This is equivalent to: + ffi.buffer(cdata, length)[:] + + If 'cdata' is a pointer to 'wchar_t', returns a unicode string. + 'length' is measured in wchar_t's; it is not the size in bytes. + + If 'cdata' is a pointer to anything else, returns a list of + 'length' items. This is a faster equivalent to: + [cdata[i] for i in range(length)] + """ + return self._backend.unpack(cdata, length) + + #def buffer(self, cdata, size=-1): + # """Return a read-write buffer object that references the raw C data + # pointed to by the given 'cdata'. The 'cdata' must be a pointer or + # an array. Can be passed to functions expecting a buffer, or directly + # manipulated with: + # + # buf[:] get a copy of it in a regular string, or + # buf[idx] as a single character + # buf[:] = ... + # buf[idx] = ... change the content + # """ + # note that 'buffer' is a type, set on this instance by __init__ + + def from_buffer(self, cdecl, python_buffer=_unspecified, + require_writable=False): + """Return a cdata of the given type pointing to the data of the + given Python object, which must support the buffer interface. + Note that this is not meant to be used on the built-in types + str or unicode (you can build 'char[]' arrays explicitly) + but only on objects containing large quantities of raw data + in some other format, like 'array.array' or numpy arrays. + + The first argument is optional and default to 'char[]'. + """ + if python_buffer is _unspecified: + cdecl, python_buffer = self.BCharA, cdecl + elif isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.from_buffer(cdecl, python_buffer, + require_writable) + + def memmove(self, dest, src, n): + """ffi.memmove(dest, src, n) copies n bytes of memory from src to dest. + + Like the C function memmove(), the memory areas may overlap; + apart from that it behaves like the C function memcpy(). + + 'src' can be any cdata ptr or array, or any Python buffer object. + 'dest' can be any cdata ptr or array, or a writable Python buffer + object. The size to copy, 'n', is always measured in bytes. + + Unlike other methods, this one supports all Python buffer including + byte strings and bytearrays---but it still does not support + non-contiguous buffers. + """ + return self._backend.memmove(dest, src, n) + + def callback(self, cdecl, python_callable=None, error=None, onerror=None): + """Return a callback object or a decorator making such a + callback object. 'cdecl' must name a C function pointer type. + The callback invokes the specified 'python_callable' (which may + be provided either directly or via a decorator). Important: the + callback object must be manually kept alive for as long as the + callback may be invoked from the C level. + """ + def callback_decorator_wrap(python_callable): + if not callable(python_callable): + raise TypeError("the 'python_callable' argument " + "is not callable") + return self._backend.callback(cdecl, python_callable, + error, onerror) + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl, consider_function_as_funcptr=True) + if python_callable is None: + return callback_decorator_wrap # decorator mode + else: + return callback_decorator_wrap(python_callable) # direct mode + + def getctype(self, cdecl, replace_with=''): + """Return a string giving the C type 'cdecl', which may be itself + a string or a object. If 'replace_with' is given, it gives + extra text to append (or insert for more complicated C types), like + a variable name, or '*' to get actually the C type 'pointer-to-cdecl'. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + replace_with = replace_with.strip() + if (replace_with.startswith('*') + and '&[' in self._backend.getcname(cdecl, '&')): + replace_with = '(%s)' % replace_with + elif replace_with and not replace_with[0] in '[(': + replace_with = ' ' + replace_with + return self._backend.getcname(cdecl, replace_with) + + def gc(self, cdata, destructor, size=0): + """Return a new cdata object that points to the same + data. Later, when this new cdata object is garbage-collected, + 'destructor(old_cdata_object)' will be called. + + The optional 'size' gives an estimate of the size, used to + trigger the garbage collection more eagerly. So far only used + on PyPy. It tells the GC that the returned object keeps alive + roughly 'size' bytes of external memory. + """ + return self._backend.gcp(cdata, destructor, size) + + def _get_cached_btype(self, type): + assert self._lock.acquire(False) is False + # call me with the lock! + try: + BType = self._cached_btypes[type] + except KeyError: + finishlist = [] + BType = type.get_cached_btype(self, finishlist) + for type in finishlist: + type.finish_backend_type(self, finishlist) + return BType + + def verify(self, source='', tmpdir=None, **kwargs): + """Verify that the current ffi signatures compile on this + machine, and return a dynamic library object. The dynamic + library can be used to call functions and access global + variables declared in this 'ffi'. The library is compiled + by the C compiler: it gives you C-level API compatibility + (including calling macros). This is unlike 'ffi.dlopen()', + which requires binary compatibility in the signatures. + """ + from .verifier import Verifier, _caller_dir_pycache + # + # If set_unicode(True) was called, insert the UNICODE and + # _UNICODE macro declarations + if self._windows_unicode: + self._apply_windows_unicode(kwargs) + # + # Set the tmpdir here, and not in Verifier.__init__: it picks + # up the caller's directory, which we want to be the caller of + # ffi.verify(), as opposed to the caller of Veritier(). + tmpdir = tmpdir or _caller_dir_pycache() + # + # Make a Verifier() and use it to load the library. + self.verifier = Verifier(self, source, tmpdir, **kwargs) + lib = self.verifier.load_library() + # + # Save the loaded library for keep-alive purposes, even + # if the caller doesn't keep it alive itself (it should). + self._libraries.append(lib) + return lib + + def _get_errno(self): + return self._backend.get_errno() + def _set_errno(self, errno): + self._backend.set_errno(errno) + errno = property(_get_errno, _set_errno, None, + "the value of 'errno' from/to the C calls") + + def getwinerror(self, code=-1): + return self._backend.getwinerror(code) + + def _pointer_to(self, ctype): + with self._lock: + return model.pointer_cache(self, ctype) + + def addressof(self, cdata, *fields_or_indexes): + """Return the address of a . + If 'fields_or_indexes' are given, returns the address of that + field or array item in the structure or array, recursively in + case of nested structures. + """ + try: + ctype = self._backend.typeof(cdata) + except TypeError: + if '__addressof__' in type(cdata).__dict__: + return type(cdata).__addressof__(cdata, *fields_or_indexes) + raise + if fields_or_indexes: + ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes) + else: + if ctype.kind == "pointer": + raise TypeError("addressof(pointer)") + offset = 0 + ctypeptr = self._pointer_to(ctype) + return self._backend.rawaddressof(ctypeptr, cdata, offset) + + def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes): + ctype, offset = self._backend.typeoffsetof(ctype, field_or_index) + for field1 in fields_or_indexes: + ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1) + offset += offset1 + return ctype, offset + + def include(self, ffi_to_include): + """Includes the typedefs, structs, unions and enums defined + in another FFI instance. Usage is similar to a #include in C, + where a part of the program might include types defined in + another part for its own usage. Note that the include() + method has no effect on functions, constants and global + variables, which must anyway be accessed directly from the + lib object returned by the original FFI instance. + """ + if not isinstance(ffi_to_include, FFI): + raise TypeError("ffi.include() expects an argument that is also of" + " type cffi.FFI, not %r" % ( + type(ffi_to_include).__name__,)) + if ffi_to_include is self: + raise ValueError("self.include(self)") + with ffi_to_include._lock: + with self._lock: + self._parser.include(ffi_to_include._parser) + self._cdefsources.append('[') + self._cdefsources.extend(ffi_to_include._cdefsources) + self._cdefsources.append(']') + self._included_ffis.append(ffi_to_include) + + def new_handle(self, x): + return self._backend.newp_handle(self.BVoidP, x) + + def from_handle(self, x): + return self._backend.from_handle(x) + + def release(self, x): + self._backend.release(x) + + def set_unicode(self, enabled_flag): + """Windows: if 'enabled_flag' is True, enable the UNICODE and + _UNICODE defines in C, and declare the types like TCHAR and LPTCSTR + to be (pointers to) wchar_t. If 'enabled_flag' is False, + declare these types to be (pointers to) plain 8-bit characters. + This is mostly for backward compatibility; you usually want True. + """ + if self._windows_unicode is not None: + raise ValueError("set_unicode() can only be called once") + enabled_flag = bool(enabled_flag) + if enabled_flag: + self.cdef("typedef wchar_t TBYTE;" + "typedef wchar_t TCHAR;" + "typedef const wchar_t *LPCTSTR;" + "typedef const wchar_t *PCTSTR;" + "typedef wchar_t *LPTSTR;" + "typedef wchar_t *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + else: + self.cdef("typedef char TBYTE;" + "typedef char TCHAR;" + "typedef const char *LPCTSTR;" + "typedef const char *PCTSTR;" + "typedef char *LPTSTR;" + "typedef char *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + self._windows_unicode = enabled_flag + + def _apply_windows_unicode(self, kwds): + defmacros = kwds.get('define_macros', ()) + if not isinstance(defmacros, (list, tuple)): + raise TypeError("'define_macros' must be a list or tuple") + defmacros = list(defmacros) + [('UNICODE', '1'), + ('_UNICODE', '1')] + kwds['define_macros'] = defmacros + + def _apply_embedding_fix(self, kwds): + # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # + if '__pypy__' in sys.builtin_module_names: + import os + if sys.platform == "win32": + # we need 'libpypy-c.lib'. Current distributions of + # pypy (>= 4.1) contain it as 'libs/python27.lib'. + pythonlib = "python{0[0]}{0[1]}".format(sys.version_info) + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'libs')) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' for installed + # systems. + if sys.version_info < (3,): + pythonlib = "pypy-c" + else: + pythonlib = "pypy3-c" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) + # On uninstalled pypy's, the libpypy-c is typically found in + # .../pypy/goal/. + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal')) + else: + if sys.platform == "win32": + template = "python%d%d" + if hasattr(sys, 'gettotalrefcount'): + template += '_d' + else: + try: + import sysconfig + except ImportError: # 2.6 + from cffi._shimmed_dist_utils import sysconfig + template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') + pythonlib = (template % + (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + if hasattr(sys, 'abiflags'): + pythonlib += sys.abiflags + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') + + def set_source(self, module_name, source, source_extension='.c', **kwds): + import os + if hasattr(self, '_assigned_source'): + raise ValueError("set_source() cannot be called several times " + "per ffi object") + if not isinstance(module_name, basestring): + raise TypeError("'module_name' must be a string") + if os.sep in module_name or (os.altsep and os.altsep in module_name): + raise ValueError("'module_name' must not contain '/': use a dotted " + "name to make a 'package.module' location") + self._assigned_source = (str(module_name), source, + source_extension, kwds) + + def set_source_pkgconfig(self, module_name, pkgconfig_libs, source, + source_extension='.c', **kwds): + from . import pkgconfig + if not isinstance(pkgconfig_libs, list): + raise TypeError("the pkgconfig_libs argument must be a list " + "of package names") + kwds2 = pkgconfig.flags_from_pkgconfig(pkgconfig_libs) + pkgconfig.merge_flags(kwds, kwds2) + self.set_source(module_name, source, source_extension, **kwds) + + def distutils_extension(self, tmpdir='build', verbose=True): + from cffi._shimmed_dist_utils import mkpath + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored + return self.verifier.get_extension() + raise ValueError("set_source() must be called before" + " distutils_extension()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("distutils_extension() is only for C extension " + "modules, not for dlopen()-style pure Python " + "modules") + mkpath(tmpdir) + ext, updated = recompile(self, module_name, + source, tmpdir=tmpdir, extradir=tmpdir, + source_extension=source_extension, + call_c_compiler=False, **kwds) + if verbose: + if updated: + sys.stderr.write("regenerated: %r\n" % (ext.sources[0],)) + else: + sys.stderr.write("not modified: %r\n" % (ext.sources[0],)) + return ext + + def emit_c_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("emit_c_code() is only for C extension modules, " + "not for dlopen()-style pure Python modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, **kwds) + + def emit_python_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is not None: + raise TypeError("emit_python_code() is only for dlopen()-style " + "pure Python modules, not for C extension modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, **kwds) + + def compile(self, tmpdir='.', verbose=0, target=None, debug=None): + """The 'target' argument gives the final file name of the + compiled DLL. Use '*' to force distutils' choice, suitable for + regular CPython C API modules. Use a file name ending in '.*' + to ask for the system's default extension for dynamic libraries + (.so/.dll/.dylib). + + The default is '*' when building a non-embedded C API extension, + and (module_name + '.*') when building an embedded library. + """ + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before compile()") + module_name, source, source_extension, kwds = self._assigned_source + return recompile(self, module_name, source, tmpdir=tmpdir, + target=target, source_extension=source_extension, + compiler_verbose=verbose, debug=debug, **kwds) + + def init_once(self, func, tag): + # Read _init_once_cache[tag], which is either (False, lock) if + # we're calling the function now in some thread, or (True, result). + # Don't call setdefault() in most cases, to avoid allocating and + # immediately freeing a lock; but still use setdefaut() to avoid + # races. + try: + x = self._init_once_cache[tag] + except KeyError: + x = self._init_once_cache.setdefault(tag, (False, allocate_lock())) + # Common case: we got (True, result), so we return the result. + if x[0]: + return x[1] + # Else, it's a lock. Acquire it to serialize the following tests. + with x[1]: + # Read again from _init_once_cache the current status. + x = self._init_once_cache[tag] + if x[0]: + return x[1] + # Call the function and store the result back. + result = func() + self._init_once_cache[tag] = (True, result) + return result + + def embedding_init_code(self, pysource): + if self._embedding: + raise ValueError("embedding_init_code() can only be called once") + # fix 'pysource' before it gets dumped into the C file: + # - remove empty lines at the beginning, so it starts at "line 1" + # - dedent, if all non-empty lines are indented + # - check for SyntaxErrors + import re + match = re.match(r'\s*\n', pysource) + if match: + pysource = pysource[match.end():] + lines = pysource.splitlines() or [''] + prefix = re.match(r'\s*', lines[0]).group() + for i in range(1, len(lines)): + line = lines[i] + if line.rstrip(): + while not line.startswith(prefix): + prefix = prefix[:-1] + i = len(prefix) + lines = [line[i:]+'\n' for line in lines] + pysource = ''.join(lines) + # + compile(pysource, "cffi_init", "exec") + # + self._embedding = pysource + + def def_extern(self, *args, **kwds): + raise ValueError("ffi.def_extern() is only available on API-mode FFI " + "objects") + + def list_types(self): + """Returns the user type names known to this FFI instance. + This returns a tuple containing three lists of names: + (typedef_names, names_of_structs, names_of_unions) + """ + typedefs = [] + structs = [] + unions = [] + for key in self._parser._declarations: + if key.startswith('typedef '): + typedefs.append(key[8:]) + elif key.startswith('struct '): + structs.append(key[7:]) + elif key.startswith('union '): + unions.append(key[6:]) + typedefs.sort() + structs.sort() + unions.sort() + return (typedefs, structs, unions) + + +def _load_backend_lib(backend, name, flags): + import os + if not isinstance(name, basestring): + if sys.platform != "win32" or name is not None: + return backend.load_library(name, flags) + name = "c" # Windows: load_library(None) fails, but this works + # on Python 2 (backward compatibility hack only) + first_error = None + if '.' in name or '/' in name or os.sep in name: + try: + return backend.load_library(name, flags) + except OSError as e: + first_error = e + import ctypes.util + path = ctypes.util.find_library(name) + if path is None: + if name == "c" and sys.platform == "win32" and sys.version_info >= (3,): + raise OSError("dlopen(None) cannot work on Windows for Python 3 " + "(see http://bugs.python.org/issue23606)") + msg = ("ctypes.util.find_library() did not manage " + "to locate a library called %r" % (name,)) + if first_error is not None: + msg = "%s. Additionally, %s" % (first_error, msg) + raise OSError(msg) + return backend.load_library(path, flags) + +def _make_ffi_library(ffi, libname, flags): + backend = ffi._backend + backendlib = _load_backend_lib(backend, libname, flags) + # + def accessor_function(name): + key = 'function ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + value = backendlib.load_function(BType, name) + library.__dict__[name] = value + # + def accessor_variable(name): + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + read_variable = backendlib.read_variable + write_variable = backendlib.write_variable + setattr(FFILibrary, name, property( + lambda self: read_variable(BType, name), + lambda self, value: write_variable(BType, name, value))) + # + def addressof_var(name): + try: + return addr_variables[name] + except KeyError: + with ffi._lock: + if name not in addr_variables: + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + if BType.kind != 'array': + BType = model.pointer_cache(ffi, BType) + p = backendlib.load_function(BType, name) + addr_variables[name] = p + return addr_variables[name] + # + def accessor_constant(name): + raise NotImplementedError("non-integer constant '%s' cannot be " + "accessed from a dlopen() library" % (name,)) + # + def accessor_int_constant(name): + library.__dict__[name] = ffi._parser._int_constants[name] + # + accessors = {} + accessors_version = [False] + addr_variables = {} + # + def update_accessors(): + if accessors_version[0] is ffi._cdef_version: + return + # + for key, (tp, _) in ffi._parser._declarations.items(): + if not isinstance(tp, model.EnumType): + tag, name = key.split(' ', 1) + if tag == 'function': + accessors[name] = accessor_function + elif tag == 'variable': + accessors[name] = accessor_variable + elif tag == 'constant': + accessors[name] = accessor_constant + else: + for i, enumname in enumerate(tp.enumerators): + def accessor_enum(name, tp=tp, i=i): + tp.check_not_partial() + library.__dict__[name] = tp.enumvalues[i] + accessors[enumname] = accessor_enum + for name in ffi._parser._int_constants: + accessors.setdefault(name, accessor_int_constant) + accessors_version[0] = ffi._cdef_version + # + def make_accessor(name): + with ffi._lock: + if name in library.__dict__ or name in FFILibrary.__dict__: + return # added by another thread while waiting for the lock + if name not in accessors: + update_accessors() + if name not in accessors: + raise AttributeError(name) + accessors[name](name) + # + class FFILibrary(object): + def __getattr__(self, name): + make_accessor(name) + return getattr(self, name) + def __setattr__(self, name, value): + try: + property = getattr(self.__class__, name) + except AttributeError: + make_accessor(name) + setattr(self, name, value) + else: + property.__set__(self, value) + def __dir__(self): + with ffi._lock: + update_accessors() + return accessors.keys() + def __addressof__(self, name): + if name in library.__dict__: + return library.__dict__[name] + if name in FFILibrary.__dict__: + return addressof_var(name) + make_accessor(name) + if name in library.__dict__: + return library.__dict__[name] + if name in FFILibrary.__dict__: + return addressof_var(name) + raise AttributeError("cffi library has no function or " + "global variable named '%s'" % (name,)) + def __cffi_close__(self): + backendlib.close_lib() + self.__dict__.clear() + # + if isinstance(libname, basestring): + try: + if not isinstance(libname, str): # unicode, on Python 2 + libname = libname.encode('utf-8') + FFILibrary.__name__ = 'FFILibrary_%s' % libname + except UnicodeError: + pass + library = FFILibrary() + return library, library.__dict__ + +def _builtin_function_type(func): + # a hack to make at least ffi.typeof(builtin_function) work, + # if the builtin function was obtained by 'vengine_cpy'. + import sys + try: + module = sys.modules[func.__module__] + ffi = module._cffi_original_ffi + types_of_builtin_funcs = module._cffi_types_of_builtin_funcs + tp = types_of_builtin_funcs[func] + except (KeyError, AttributeError, TypeError): + return None + else: + with ffi._lock: + return ffi._get_cached_btype(tp) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/backend_ctypes.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/backend_ctypes.py new file mode 100644 index 000000000..e7956a79c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/backend_ctypes.py @@ -0,0 +1,1121 @@ +import ctypes, ctypes.util, operator, sys +from . import model + +if sys.version_info < (3,): + bytechr = chr +else: + unicode = str + long = int + xrange = range + bytechr = lambda num: bytes([num]) + +class CTypesType(type): + pass + +class CTypesData(object): + __metaclass__ = CTypesType + __slots__ = ['__weakref__'] + __name__ = '' + + def __init__(self, *args): + raise TypeError("cannot instantiate %r" % (self.__class__,)) + + @classmethod + def _newp(cls, init): + raise TypeError("expected a pointer or array ctype, got '%s'" + % (cls._get_c_name(),)) + + @staticmethod + def _to_ctypes(value): + raise TypeError + + @classmethod + def _arg_to_ctypes(cls, *value): + try: + ctype = cls._ctype + except AttributeError: + raise TypeError("cannot create an instance of %r" % (cls,)) + if value: + res = cls._to_ctypes(*value) + if not isinstance(res, ctype): + res = cls._ctype(res) + else: + res = cls._ctype() + return res + + @classmethod + def _create_ctype_obj(cls, init): + if init is None: + return cls._arg_to_ctypes() + else: + return cls._arg_to_ctypes(init) + + @staticmethod + def _from_ctypes(ctypes_value): + raise TypeError + + @classmethod + def _get_c_name(cls, replace_with=''): + return cls._reftypename.replace(' &', replace_with) + + @classmethod + def _fix_class(cls): + cls.__name__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__qualname__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__module__ = 'ffi' + + def _get_own_repr(self): + raise NotImplementedError + + def _addr_repr(self, address): + if address == 0: + return 'NULL' + else: + if address < 0: + address += 1 << (8*ctypes.sizeof(ctypes.c_void_p)) + return '0x%x' % address + + def __repr__(self, c_name=None): + own = self._get_own_repr() + return '' % (c_name or self._get_c_name(), own) + + def _convert_to_address(self, BClass): + if BClass is None: + raise TypeError("cannot convert %r to an address" % ( + self._get_c_name(),)) + else: + raise TypeError("cannot convert %r to %r" % ( + self._get_c_name(), BClass._get_c_name())) + + @classmethod + def _get_size(cls): + return ctypes.sizeof(cls._ctype) + + def _get_size_of_instance(self): + return ctypes.sizeof(self._ctype) + + @classmethod + def _cast_from(cls, source): + raise TypeError("cannot cast to %r" % (cls._get_c_name(),)) + + def _cast_to_integer(self): + return self._convert_to_address(None) + + @classmethod + def _alignment(cls): + return ctypes.alignment(cls._ctype) + + def __iter__(self): + raise TypeError("cdata %r does not support iteration" % ( + self._get_c_name()),) + + def _make_cmp(name): + cmpfunc = getattr(operator, name) + def cmp(self, other): + v_is_ptr = not isinstance(self, CTypesGenericPrimitive) + w_is_ptr = (isinstance(other, CTypesData) and + not isinstance(other, CTypesGenericPrimitive)) + if v_is_ptr and w_is_ptr: + return cmpfunc(self._convert_to_address(None), + other._convert_to_address(None)) + elif v_is_ptr or w_is_ptr: + return NotImplemented + else: + if isinstance(self, CTypesGenericPrimitive): + self = self._value + if isinstance(other, CTypesGenericPrimitive): + other = other._value + return cmpfunc(self, other) + cmp.func_name = name + return cmp + + __eq__ = _make_cmp('__eq__') + __ne__ = _make_cmp('__ne__') + __lt__ = _make_cmp('__lt__') + __le__ = _make_cmp('__le__') + __gt__ = _make_cmp('__gt__') + __ge__ = _make_cmp('__ge__') + + def __hash__(self): + return hash(self._convert_to_address(None)) + + def _to_string(self, maxlen): + raise TypeError("string(): %r" % (self,)) + + +class CTypesGenericPrimitive(CTypesData): + __slots__ = [] + + def __hash__(self): + return hash(self._value) + + def _get_own_repr(self): + return repr(self._from_ctypes(self._value)) + + +class CTypesGenericArray(CTypesData): + __slots__ = [] + + @classmethod + def _newp(cls, init): + return cls(init) + + def __iter__(self): + for i in xrange(len(self)): + yield self[i] + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + +class CTypesGenericPtr(CTypesData): + __slots__ = ['_address', '_as_ctype_ptr'] + _automatic_casts = False + kind = "pointer" + + @classmethod + def _newp(cls, init): + return cls(init) + + @classmethod + def _cast_from(cls, source): + if source is None: + address = 0 + elif isinstance(source, CTypesData): + address = source._cast_to_integer() + elif isinstance(source, (int, long)): + address = source + else: + raise TypeError("bad type for cast to %r: %r" % + (cls, type(source).__name__)) + return cls._new_pointer_at(address) + + @classmethod + def _new_pointer_at(cls, address): + self = cls.__new__(cls) + self._address = address + self._as_ctype_ptr = ctypes.cast(address, cls._ctype) + return self + + def _get_own_repr(self): + try: + return self._addr_repr(self._address) + except AttributeError: + return '???' + + def _cast_to_integer(self): + return self._address + + def __nonzero__(self): + return bool(self._address) + __bool__ = __nonzero__ + + @classmethod + def _to_ctypes(cls, value): + if not isinstance(value, CTypesData): + raise TypeError("unexpected %s object" % type(value).__name__) + address = value._convert_to_address(cls) + return ctypes.cast(address, cls._ctype) + + @classmethod + def _from_ctypes(cls, ctypes_ptr): + address = ctypes.cast(ctypes_ptr, ctypes.c_void_p).value or 0 + return cls._new_pointer_at(address) + + @classmethod + def _initialize(cls, ctypes_ptr, value): + if value: + ctypes_ptr.contents = cls._to_ctypes(value).contents + + def _convert_to_address(self, BClass): + if (BClass in (self.__class__, None) or BClass._automatic_casts + or self._automatic_casts): + return self._address + else: + return CTypesData._convert_to_address(self, BClass) + + +class CTypesBaseStructOrUnion(CTypesData): + __slots__ = ['_blob'] + + @classmethod + def _create_ctype_obj(cls, init): + # may be overridden + raise TypeError("cannot instantiate opaque type %s" % (cls,)) + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + @classmethod + def _offsetof(cls, fieldname): + return getattr(cls._ctype, fieldname).offset + + def _convert_to_address(self, BClass): + if getattr(BClass, '_BItem', None) is self.__class__: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @classmethod + def _from_ctypes(cls, ctypes_struct_or_union): + self = cls.__new__(cls) + self._blob = ctypes_struct_or_union + return self + + @classmethod + def _to_ctypes(cls, value): + return value._blob + + def __repr__(self, c_name=None): + return CTypesData.__repr__(self, c_name or self._get_c_name(' &')) + + +class CTypesBackend(object): + + PRIMITIVE_TYPES = { + 'char': ctypes.c_char, + 'short': ctypes.c_short, + 'int': ctypes.c_int, + 'long': ctypes.c_long, + 'long long': ctypes.c_longlong, + 'signed char': ctypes.c_byte, + 'unsigned char': ctypes.c_ubyte, + 'unsigned short': ctypes.c_ushort, + 'unsigned int': ctypes.c_uint, + 'unsigned long': ctypes.c_ulong, + 'unsigned long long': ctypes.c_ulonglong, + 'float': ctypes.c_float, + 'double': ctypes.c_double, + '_Bool': ctypes.c_bool, + } + + for _name in ['unsigned long long', 'unsigned long', + 'unsigned int', 'unsigned short', 'unsigned char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['uint%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['uintptr_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['size_t'] = PRIMITIVE_TYPES[_name] + + for _name in ['long long', 'long', 'int', 'short', 'signed char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['int%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['intptr_t'] = PRIMITIVE_TYPES[_name] + PRIMITIVE_TYPES['ptrdiff_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['ssize_t'] = PRIMITIVE_TYPES[_name] + + + def __init__(self): + self.RTLD_LAZY = 0 # not supported anyway by ctypes + self.RTLD_NOW = 0 + self.RTLD_GLOBAL = ctypes.RTLD_GLOBAL + self.RTLD_LOCAL = ctypes.RTLD_LOCAL + + def set_ffi(self, ffi): + self.ffi = ffi + + def _get_types(self): + return CTypesData, CTypesType + + def load_library(self, path, flags=0): + cdll = ctypes.CDLL(path, flags) + return CTypesLibrary(self, cdll) + + def new_void_type(self): + class CTypesVoid(CTypesData): + __slots__ = [] + _reftypename = 'void &' + @staticmethod + def _from_ctypes(novalue): + return None + @staticmethod + def _to_ctypes(novalue): + if novalue is not None: + raise TypeError("None expected, got %s object" % + (type(novalue).__name__,)) + return None + CTypesVoid._fix_class() + return CTypesVoid + + def new_primitive_type(self, name): + if name == 'wchar_t': + raise NotImplementedError(name) + ctype = self.PRIMITIVE_TYPES[name] + if name == 'char': + kind = 'char' + elif name in ('float', 'double'): + kind = 'float' + else: + if name in ('signed char', 'unsigned char'): + kind = 'byte' + elif name == '_Bool': + kind = 'bool' + else: + kind = 'int' + is_signed = (ctype(-1).value == -1) + # + def _cast_source_to_int(source): + if isinstance(source, (int, long, float)): + source = int(source) + elif isinstance(source, CTypesData): + source = source._cast_to_integer() + elif isinstance(source, bytes): + source = ord(source) + elif source is None: + source = 0 + else: + raise TypeError("bad type for cast to %r: %r" % + (CTypesPrimitive, type(source).__name__)) + return source + # + kind1 = kind + class CTypesPrimitive(CTypesGenericPrimitive): + __slots__ = ['_value'] + _ctype = ctype + _reftypename = '%s &' % name + kind = kind1 + + def __init__(self, value): + self._value = value + + @staticmethod + def _create_ctype_obj(init): + if init is None: + return ctype() + return ctype(CTypesPrimitive._to_ctypes(init)) + + if kind == 'int' or kind == 'byte': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = ctype(source).value # cast within range + return cls(source) + def __int__(self): + return self._value + + if kind == 'bool': + @classmethod + def _cast_from(cls, source): + if not isinstance(source, (int, long, float)): + source = _cast_source_to_int(source) + return cls(bool(source)) + def __int__(self): + return int(self._value) + + if kind == 'char': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = bytechr(source & 0xFF) + return cls(source) + def __int__(self): + return ord(self._value) + + if kind == 'float': + @classmethod + def _cast_from(cls, source): + if isinstance(source, float): + pass + elif isinstance(source, CTypesGenericPrimitive): + if hasattr(source, '__float__'): + source = float(source) + else: + source = int(source) + else: + source = _cast_source_to_int(source) + source = ctype(source).value # fix precision + return cls(source) + def __int__(self): + return int(self._value) + def __float__(self): + return self._value + + _cast_to_integer = __int__ + + if kind == 'int' or kind == 'byte' or kind == 'bool': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long)): + if isinstance(x, CTypesData): + x = int(x) + else: + raise TypeError("integer expected, got %s" % + type(x).__name__) + if ctype(x).value != x: + if not is_signed and x < 0: + raise OverflowError("%s: negative integer" % name) + else: + raise OverflowError("%s: integer out of bounds" + % name) + return x + + if kind == 'char': + @staticmethod + def _to_ctypes(x): + if isinstance(x, bytes) and len(x) == 1: + return x + if isinstance(x, CTypesPrimitive): # > + return x._value + raise TypeError("character expected, got %s" % + type(x).__name__) + def __nonzero__(self): + return ord(self._value) != 0 + else: + def __nonzero__(self): + return self._value != 0 + __bool__ = __nonzero__ + + if kind == 'float': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long, float, CTypesData)): + raise TypeError("float expected, got %s" % + type(x).__name__) + return ctype(x).value + + @staticmethod + def _from_ctypes(value): + return getattr(value, 'value', value) + + @staticmethod + def _initialize(blob, init): + blob.value = CTypesPrimitive._to_ctypes(init) + + if kind == 'char': + def _to_string(self, maxlen): + return self._value + if kind == 'byte': + def _to_string(self, maxlen): + return chr(self._value & 0xff) + # + CTypesPrimitive._fix_class() + return CTypesPrimitive + + def new_pointer_type(self, BItem): + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'charp' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'bytep' + elif BItem is getbtype(model.void_type): + kind = 'voidp' + else: + kind = 'generic' + # + class CTypesPtr(CTypesGenericPtr): + __slots__ = ['_own'] + if kind == 'charp': + __slots__ += ['__as_strbuf'] + _BItem = BItem + if hasattr(BItem, '_ctype'): + _ctype = ctypes.POINTER(BItem._ctype) + _bitem_size = ctypes.sizeof(BItem._ctype) + else: + _ctype = ctypes.c_void_p + if issubclass(BItem, CTypesGenericArray): + _reftypename = BItem._get_c_name('(* &)') + else: + _reftypename = BItem._get_c_name(' * &') + + def __init__(self, init): + ctypeobj = BItem._create_ctype_obj(init) + if kind == 'charp': + self.__as_strbuf = ctypes.create_string_buffer( + ctypeobj.value + b'\x00') + self._as_ctype_ptr = ctypes.cast( + self.__as_strbuf, self._ctype) + else: + self._as_ctype_ptr = ctypes.pointer(ctypeobj) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own = True + + def __add__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address + + other * self._bitem_size) + else: + return NotImplemented + + def __sub__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address - + other * self._bitem_size) + elif type(self) is type(other): + return (self._address - other._address) // self._bitem_size + else: + return NotImplemented + + def __getitem__(self, index): + if getattr(self, '_own', False) and index != 0: + raise IndexError + return BItem._from_ctypes(self._as_ctype_ptr[index]) + + def __setitem__(self, index, value): + self._as_ctype_ptr[index] = BItem._to_ctypes(value) + + if kind == 'charp' or kind == 'voidp': + @classmethod + def _arg_to_ctypes(cls, *value): + if value and isinstance(value[0], bytes): + return ctypes.c_char_p(value[0]) + else: + return super(CTypesPtr, cls)._arg_to_ctypes(*value) + + if kind == 'charp' or kind == 'bytep': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = sys.maxsize + p = ctypes.cast(self._as_ctype_ptr, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % ( + ctypes.sizeof(self._as_ctype_ptr.contents),) + return super(CTypesPtr, self)._get_own_repr() + # + if (BItem is self.ffi._get_cached_btype(model.void_type) or + BItem is self.ffi._get_cached_btype(model.PrimitiveType('char'))): + CTypesPtr._automatic_casts = True + # + CTypesPtr._fix_class() + return CTypesPtr + + def new_array_type(self, CTypesPtr, length): + if length is None: + brackets = ' &[]' + else: + brackets = ' &[%d]' % length + BItem = CTypesPtr._BItem + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'char' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'byte' + else: + kind = 'generic' + # + class CTypesArray(CTypesGenericArray): + __slots__ = ['_blob', '_own'] + if length is not None: + _ctype = BItem._ctype * length + else: + __slots__.append('_ctype') + _reftypename = BItem._get_c_name(brackets) + _declared_length = length + _CTPtr = CTypesPtr + + def __init__(self, init): + if length is None: + if isinstance(init, (int, long)): + len1 = init + init = None + elif kind == 'char' and isinstance(init, bytes): + len1 = len(init) + 1 # extra null + else: + init = tuple(init) + len1 = len(init) + self._ctype = BItem._ctype * len1 + self._blob = self._ctype() + self._own = True + if init is not None: + self._initialize(self._blob, init) + + @staticmethod + def _initialize(blob, init): + if isinstance(init, bytes): + init = [init[i:i+1] for i in range(len(init))] + else: + if isinstance(init, CTypesGenericArray): + if (len(init) != len(blob) or + not isinstance(init, CTypesArray)): + raise TypeError("length/type mismatch: %s" % (init,)) + init = tuple(init) + if len(init) > len(blob): + raise IndexError("too many initializers") + addr = ctypes.cast(blob, ctypes.c_void_p).value + PTR = ctypes.POINTER(BItem._ctype) + itemsize = ctypes.sizeof(BItem._ctype) + for i, value in enumerate(init): + p = ctypes.cast(addr + i * itemsize, PTR) + BItem._initialize(p.contents, value) + + def __len__(self): + return len(self._blob) + + def __getitem__(self, index): + if not (0 <= index < len(self._blob)): + raise IndexError + return BItem._from_ctypes(self._blob[index]) + + def __setitem__(self, index, value): + if not (0 <= index < len(self._blob)): + raise IndexError + self._blob[index] = BItem._to_ctypes(value) + + if kind == 'char' or kind == 'byte': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = len(self._blob) + p = ctypes.cast(self._blob, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % (ctypes.sizeof(self._blob),) + return super(CTypesArray, self)._get_own_repr() + + def _convert_to_address(self, BClass): + if BClass in (CTypesPtr, None) or BClass._automatic_casts: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @staticmethod + def _from_ctypes(ctypes_array): + self = CTypesArray.__new__(CTypesArray) + self._blob = ctypes_array + return self + + @staticmethod + def _arg_to_ctypes(value): + return CTypesPtr._arg_to_ctypes(value) + + def __add__(self, other): + if isinstance(other, (int, long)): + return CTypesPtr._new_pointer_at( + ctypes.addressof(self._blob) + + other * ctypes.sizeof(BItem._ctype)) + else: + return NotImplemented + + @classmethod + def _cast_from(cls, source): + raise NotImplementedError("casting to %r" % ( + cls._get_c_name(),)) + # + CTypesArray._fix_class() + return CTypesArray + + def _new_struct_or_union(self, kind, name, base_ctypes_class): + # + class struct_or_union(base_ctypes_class): + pass + struct_or_union.__name__ = '%s_%s' % (kind, name) + kind1 = kind + # + class CTypesStructOrUnion(CTypesBaseStructOrUnion): + __slots__ = ['_blob'] + _ctype = struct_or_union + _reftypename = '%s &' % (name,) + _kind = kind = kind1 + # + CTypesStructOrUnion._fix_class() + return CTypesStructOrUnion + + def new_struct_type(self, name): + return self._new_struct_or_union('struct', name, ctypes.Structure) + + def new_union_type(self, name): + return self._new_struct_or_union('union', name, ctypes.Union) + + def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp, + totalsize=-1, totalalignment=-1, sflags=0, + pack=0): + if totalsize >= 0 or totalalignment >= 0: + raise NotImplementedError("the ctypes backend of CFFI does not support " + "structures completed by verify(); please " + "compile and install the _cffi_backend module.") + struct_or_union = CTypesStructOrUnion._ctype + fnames = [fname for (fname, BField, bitsize) in fields] + btypes = [BField for (fname, BField, bitsize) in fields] + bitfields = [bitsize for (fname, BField, bitsize) in fields] + # + bfield_types = {} + cfields = [] + for (fname, BField, bitsize) in fields: + if bitsize < 0: + cfields.append((fname, BField._ctype)) + bfield_types[fname] = BField + else: + cfields.append((fname, BField._ctype, bitsize)) + bfield_types[fname] = Ellipsis + if sflags & 8: + struct_or_union._pack_ = 1 + elif pack: + struct_or_union._pack_ = pack + struct_or_union._fields_ = cfields + CTypesStructOrUnion._bfield_types = bfield_types + # + @staticmethod + def _create_ctype_obj(init): + result = struct_or_union() + if init is not None: + initialize(result, init) + return result + CTypesStructOrUnion._create_ctype_obj = _create_ctype_obj + # + def initialize(blob, init): + if is_union: + if len(init) > 1: + raise ValueError("union initializer: %d items given, but " + "only one supported (use a dict if needed)" + % (len(init),)) + if not isinstance(init, dict): + if isinstance(init, (bytes, unicode)): + raise TypeError("union initializer: got a str") + init = tuple(init) + if len(init) > len(fnames): + raise ValueError("too many values for %s initializer" % + CTypesStructOrUnion._get_c_name()) + init = dict(zip(fnames, init)) + addr = ctypes.addressof(blob) + for fname, value in init.items(): + BField, bitsize = name2fieldtype[fname] + assert bitsize < 0, \ + "not implemented: initializer with bit fields" + offset = CTypesStructOrUnion._offsetof(fname) + PTR = ctypes.POINTER(BField._ctype) + p = ctypes.cast(addr + offset, PTR) + BField._initialize(p.contents, value) + is_union = CTypesStructOrUnion._kind == 'union' + name2fieldtype = dict(zip(fnames, zip(btypes, bitfields))) + # + for fname, BField, bitsize in fields: + if fname == '': + raise NotImplementedError("nested anonymous structs/unions") + if hasattr(CTypesStructOrUnion, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + if bitsize < 0: + def getter(self, fname=fname, BField=BField, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BField._from_ctypes(p.contents) + def setter(self, value, fname=fname, BField=BField): + setattr(self._blob, fname, BField._to_ctypes(value)) + # + if issubclass(BField, CTypesGenericArray): + setter = None + if BField._declared_length == 0: + def getter(self, fname=fname, BFieldPtr=BField._CTPtr, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BFieldPtr._from_ctypes(p) + # + else: + def getter(self, fname=fname, BField=BField): + return BField._from_ctypes(getattr(self._blob, fname)) + def setter(self, value, fname=fname, BField=BField): + # xxx obscure workaround + value = BField._to_ctypes(value) + oldvalue = getattr(self._blob, fname) + setattr(self._blob, fname, value) + if value != getattr(self._blob, fname): + setattr(self._blob, fname, oldvalue) + raise OverflowError("value too large for bitfield") + setattr(CTypesStructOrUnion, fname, property(getter, setter)) + # + CTypesPtr = self.ffi._get_cached_btype(model.PointerType(tp)) + for fname in fnames: + if hasattr(CTypesPtr, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + def getter(self, fname=fname): + return getattr(self[0], fname) + def setter(self, value, fname=fname): + setattr(self[0], fname, value) + setattr(CTypesPtr, fname, property(getter, setter)) + + def new_function_type(self, BArgs, BResult, has_varargs): + nameargs = [BArg._get_c_name() for BArg in BArgs] + if has_varargs: + nameargs.append('...') + nameargs = ', '.join(nameargs) + # + class CTypesFunctionPtr(CTypesGenericPtr): + __slots__ = ['_own_callback', '_name'] + _ctype = ctypes.CFUNCTYPE(getattr(BResult, '_ctype', None), + *[BArg._ctype for BArg in BArgs], + use_errno=True) + _reftypename = BResult._get_c_name('(* &)(%s)' % (nameargs,)) + + def __init__(self, init, error=None): + # create a callback to the Python callable init() + import traceback + assert not has_varargs, "varargs not supported for callbacks" + if getattr(BResult, '_ctype', None) is not None: + error = BResult._from_ctypes( + BResult._create_ctype_obj(error)) + else: + error = None + def callback(*args): + args2 = [] + for arg, BArg in zip(args, BArgs): + args2.append(BArg._from_ctypes(arg)) + try: + res2 = init(*args2) + res2 = BResult._to_ctypes(res2) + except: + traceback.print_exc() + res2 = error + if issubclass(BResult, CTypesGenericPtr): + if res2: + res2 = ctypes.cast(res2, ctypes.c_void_p).value + # .value: http://bugs.python.org/issue1574593 + else: + res2 = None + #print repr(res2) + return res2 + if issubclass(BResult, CTypesGenericPtr): + # The only pointers callbacks can return are void*s: + # http://bugs.python.org/issue5710 + callback_ctype = ctypes.CFUNCTYPE( + ctypes.c_void_p, + *[BArg._ctype for BArg in BArgs], + use_errno=True) + else: + callback_ctype = CTypesFunctionPtr._ctype + self._as_ctype_ptr = callback_ctype(callback) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own_callback = init + + @staticmethod + def _initialize(ctypes_ptr, value): + if value: + raise NotImplementedError("ctypes backend: not supported: " + "initializers for function pointers") + + def __repr__(self): + c_name = getattr(self, '_name', None) + if c_name: + i = self._reftypename.index('(* &)') + if self._reftypename[i-1] not in ' )*': + c_name = ' ' + c_name + c_name = self._reftypename.replace('(* &)', c_name) + return CTypesData.__repr__(self, c_name) + + def _get_own_repr(self): + if getattr(self, '_own_callback', None) is not None: + return 'calling %r' % (self._own_callback,) + return super(CTypesFunctionPtr, self)._get_own_repr() + + def __call__(self, *args): + if has_varargs: + assert len(args) >= len(BArgs) + extraargs = args[len(BArgs):] + args = args[:len(BArgs)] + else: + assert len(args) == len(BArgs) + ctypes_args = [] + for arg, BArg in zip(args, BArgs): + ctypes_args.append(BArg._arg_to_ctypes(arg)) + if has_varargs: + for i, arg in enumerate(extraargs): + if arg is None: + ctypes_args.append(ctypes.c_void_p(0)) # NULL + continue + if not isinstance(arg, CTypesData): + raise TypeError( + "argument %d passed in the variadic part " + "needs to be a cdata object (got %s)" % + (1 + len(BArgs) + i, type(arg).__name__)) + ctypes_args.append(arg._arg_to_ctypes(arg)) + result = self._as_ctype_ptr(*ctypes_args) + return BResult._from_ctypes(result) + # + CTypesFunctionPtr._fix_class() + return CTypesFunctionPtr + + def new_enum_type(self, name, enumerators, enumvalues, CTypesInt): + assert isinstance(name, str) + reverse_mapping = dict(zip(reversed(enumvalues), + reversed(enumerators))) + # + class CTypesEnum(CTypesInt): + __slots__ = [] + _reftypename = '%s &' % name + + def _get_own_repr(self): + value = self._value + try: + return '%d: %s' % (value, reverse_mapping[value]) + except KeyError: + return str(value) + + def _to_string(self, maxlen): + value = self._value + try: + return reverse_mapping[value] + except KeyError: + return str(value) + # + CTypesEnum._fix_class() + return CTypesEnum + + def get_errno(self): + return ctypes.get_errno() + + def set_errno(self, value): + ctypes.set_errno(value) + + def string(self, b, maxlen=-1): + return b._to_string(maxlen) + + def buffer(self, bptr, size=-1): + raise NotImplementedError("buffer() with ctypes backend") + + def sizeof(self, cdata_or_BType): + if isinstance(cdata_or_BType, CTypesData): + return cdata_or_BType._get_size_of_instance() + else: + assert issubclass(cdata_or_BType, CTypesData) + return cdata_or_BType._get_size() + + def alignof(self, BType): + assert issubclass(BType, CTypesData) + return BType._alignment() + + def newp(self, BType, source): + if not issubclass(BType, CTypesData): + raise TypeError + return BType._newp(source) + + def cast(self, BType, source): + return BType._cast_from(source) + + def callback(self, BType, source, error, onerror): + assert onerror is None # XXX not implemented + return BType(source, error) + + _weakref_cache_ref = None + + def gcp(self, cdata, destructor, size=0): + if self._weakref_cache_ref is None: + import weakref + class MyRef(weakref.ref): + def __eq__(self, other): + myref = self() + return self is other or ( + myref is not None and myref is other()) + def __ne__(self, other): + return not (self == other) + def __hash__(self): + try: + return self._hash + except AttributeError: + self._hash = hash(self()) + return self._hash + self._weakref_cache_ref = {}, MyRef + weak_cache, MyRef = self._weakref_cache_ref + + if destructor is None: + try: + del weak_cache[MyRef(cdata)] + except KeyError: + raise TypeError("Can remove destructor only on a object " + "previously returned by ffi.gc()") + return None + + def remove(k): + cdata, destructor = weak_cache.pop(k, (None, None)) + if destructor is not None: + destructor(cdata) + + new_cdata = self.cast(self.typeof(cdata), cdata) + assert new_cdata is not cdata + weak_cache[MyRef(new_cdata, remove)] = (cdata, destructor) + return new_cdata + + typeof = type + + def getcname(self, BType, replace_with): + return BType._get_c_name(replace_with) + + def typeoffsetof(self, BType, fieldname, num=0): + if isinstance(fieldname, str): + if num == 0 and issubclass(BType, CTypesGenericPtr): + BType = BType._BItem + if not issubclass(BType, CTypesBaseStructOrUnion): + raise TypeError("expected a struct or union ctype") + BField = BType._bfield_types[fieldname] + if BField is Ellipsis: + raise TypeError("not supported for bitfields") + return (BField, BType._offsetof(fieldname)) + elif isinstance(fieldname, (int, long)): + if issubclass(BType, CTypesGenericArray): + BType = BType._CTPtr + if not issubclass(BType, CTypesGenericPtr): + raise TypeError("expected an array or ptr ctype") + BItem = BType._BItem + offset = BItem._get_size() * fieldname + if offset > sys.maxsize: + raise OverflowError + return (BItem, offset) + else: + raise TypeError(type(fieldname)) + + def rawaddressof(self, BTypePtr, cdata, offset=None): + if isinstance(cdata, CTypesBaseStructOrUnion): + ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata)) + elif isinstance(cdata, CTypesGenericPtr): + if offset is None or not issubclass(type(cdata)._BItem, + CTypesBaseStructOrUnion): + raise TypeError("unexpected cdata type") + ptr = type(cdata)._to_ctypes(cdata) + elif isinstance(cdata, CTypesGenericArray): + ptr = type(cdata)._to_ctypes(cdata) + else: + raise TypeError("expected a ") + if offset: + ptr = ctypes.cast( + ctypes.c_void_p( + ctypes.cast(ptr, ctypes.c_void_p).value + offset), + type(ptr)) + return BTypePtr._from_ctypes(ptr) + + +class CTypesLibrary(object): + + def __init__(self, backend, cdll): + self.backend = backend + self.cdll = cdll + + def load_function(self, BType, name): + c_func = getattr(self.cdll, name) + funcobj = BType._from_ctypes(c_func) + funcobj._name = name + return funcobj + + def read_variable(self, BType, name): + try: + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + except AttributeError as e: + raise NotImplementedError(e) + return BType._from_ctypes(ctypes_obj) + + def write_variable(self, BType, name, value): + new_ctypes_obj = BType._to_ctypes(value) + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + ctypes.memmove(ctypes.addressof(ctypes_obj), + ctypes.addressof(new_ctypes_obj), + ctypes.sizeof(BType._ctype)) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/cffi_opcode.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/cffi_opcode.py new file mode 100644 index 000000000..a0df98d1c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/cffi_opcode.py @@ -0,0 +1,187 @@ +from .error import VerificationError + +class CffiOp(object): + def __init__(self, op, arg): + self.op = op + self.arg = arg + + def as_c_expr(self): + if self.op is None: + assert isinstance(self.arg, str) + return '(_cffi_opcode_t)(%s)' % (self.arg,) + classname = CLASS_NAME[self.op] + return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg) + + def as_python_bytes(self): + if self.op is None and self.arg.isdigit(): + value = int(self.arg) # non-negative: '-' not in self.arg + if value >= 2**31: + raise OverflowError("cannot emit %r: limited to 2**31-1" + % (self.arg,)) + return format_four_bytes(value) + if isinstance(self.arg, str): + raise VerificationError("cannot emit to Python: %r" % (self.arg,)) + return format_four_bytes((self.arg << 8) | self.op) + + def __str__(self): + classname = CLASS_NAME.get(self.op, self.op) + return '(%s %s)' % (classname, self.arg) + +def format_four_bytes(num): + return '\\x%02X\\x%02X\\x%02X\\x%02X' % ( + (num >> 24) & 0xFF, + (num >> 16) & 0xFF, + (num >> 8) & 0xFF, + (num ) & 0xFF) + +OP_PRIMITIVE = 1 +OP_POINTER = 3 +OP_ARRAY = 5 +OP_OPEN_ARRAY = 7 +OP_STRUCT_UNION = 9 +OP_ENUM = 11 +OP_FUNCTION = 13 +OP_FUNCTION_END = 15 +OP_NOOP = 17 +OP_BITFIELD = 19 +OP_TYPENAME = 21 +OP_CPYTHON_BLTN_V = 23 # varargs +OP_CPYTHON_BLTN_N = 25 # noargs +OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg) +OP_CONSTANT = 29 +OP_CONSTANT_INT = 31 +OP_GLOBAL_VAR = 33 +OP_DLOPEN_FUNC = 35 +OP_DLOPEN_CONST = 37 +OP_GLOBAL_VAR_F = 39 +OP_EXTERN_PYTHON = 41 + +PRIM_VOID = 0 +PRIM_BOOL = 1 +PRIM_CHAR = 2 +PRIM_SCHAR = 3 +PRIM_UCHAR = 4 +PRIM_SHORT = 5 +PRIM_USHORT = 6 +PRIM_INT = 7 +PRIM_UINT = 8 +PRIM_LONG = 9 +PRIM_ULONG = 10 +PRIM_LONGLONG = 11 +PRIM_ULONGLONG = 12 +PRIM_FLOAT = 13 +PRIM_DOUBLE = 14 +PRIM_LONGDOUBLE = 15 + +PRIM_WCHAR = 16 +PRIM_INT8 = 17 +PRIM_UINT8 = 18 +PRIM_INT16 = 19 +PRIM_UINT16 = 20 +PRIM_INT32 = 21 +PRIM_UINT32 = 22 +PRIM_INT64 = 23 +PRIM_UINT64 = 24 +PRIM_INTPTR = 25 +PRIM_UINTPTR = 26 +PRIM_PTRDIFF = 27 +PRIM_SIZE = 28 +PRIM_SSIZE = 29 +PRIM_INT_LEAST8 = 30 +PRIM_UINT_LEAST8 = 31 +PRIM_INT_LEAST16 = 32 +PRIM_UINT_LEAST16 = 33 +PRIM_INT_LEAST32 = 34 +PRIM_UINT_LEAST32 = 35 +PRIM_INT_LEAST64 = 36 +PRIM_UINT_LEAST64 = 37 +PRIM_INT_FAST8 = 38 +PRIM_UINT_FAST8 = 39 +PRIM_INT_FAST16 = 40 +PRIM_UINT_FAST16 = 41 +PRIM_INT_FAST32 = 42 +PRIM_UINT_FAST32 = 43 +PRIM_INT_FAST64 = 44 +PRIM_UINT_FAST64 = 45 +PRIM_INTMAX = 46 +PRIM_UINTMAX = 47 +PRIM_FLOATCOMPLEX = 48 +PRIM_DOUBLECOMPLEX = 49 +PRIM_CHAR16 = 50 +PRIM_CHAR32 = 51 + +_NUM_PRIM = 52 +_UNKNOWN_PRIM = -1 +_UNKNOWN_FLOAT_PRIM = -2 +_UNKNOWN_LONG_DOUBLE = -3 + +_IO_FILE_STRUCT = -1 + +PRIMITIVE_TO_INDEX = { + 'char': PRIM_CHAR, + 'short': PRIM_SHORT, + 'int': PRIM_INT, + 'long': PRIM_LONG, + 'long long': PRIM_LONGLONG, + 'signed char': PRIM_SCHAR, + 'unsigned char': PRIM_UCHAR, + 'unsigned short': PRIM_USHORT, + 'unsigned int': PRIM_UINT, + 'unsigned long': PRIM_ULONG, + 'unsigned long long': PRIM_ULONGLONG, + 'float': PRIM_FLOAT, + 'double': PRIM_DOUBLE, + 'long double': PRIM_LONGDOUBLE, + 'float _Complex': PRIM_FLOATCOMPLEX, + 'double _Complex': PRIM_DOUBLECOMPLEX, + '_Bool': PRIM_BOOL, + 'wchar_t': PRIM_WCHAR, + 'char16_t': PRIM_CHAR16, + 'char32_t': PRIM_CHAR32, + 'int8_t': PRIM_INT8, + 'uint8_t': PRIM_UINT8, + 'int16_t': PRIM_INT16, + 'uint16_t': PRIM_UINT16, + 'int32_t': PRIM_INT32, + 'uint32_t': PRIM_UINT32, + 'int64_t': PRIM_INT64, + 'uint64_t': PRIM_UINT64, + 'intptr_t': PRIM_INTPTR, + 'uintptr_t': PRIM_UINTPTR, + 'ptrdiff_t': PRIM_PTRDIFF, + 'size_t': PRIM_SIZE, + 'ssize_t': PRIM_SSIZE, + 'int_least8_t': PRIM_INT_LEAST8, + 'uint_least8_t': PRIM_UINT_LEAST8, + 'int_least16_t': PRIM_INT_LEAST16, + 'uint_least16_t': PRIM_UINT_LEAST16, + 'int_least32_t': PRIM_INT_LEAST32, + 'uint_least32_t': PRIM_UINT_LEAST32, + 'int_least64_t': PRIM_INT_LEAST64, + 'uint_least64_t': PRIM_UINT_LEAST64, + 'int_fast8_t': PRIM_INT_FAST8, + 'uint_fast8_t': PRIM_UINT_FAST8, + 'int_fast16_t': PRIM_INT_FAST16, + 'uint_fast16_t': PRIM_UINT_FAST16, + 'int_fast32_t': PRIM_INT_FAST32, + 'uint_fast32_t': PRIM_UINT_FAST32, + 'int_fast64_t': PRIM_INT_FAST64, + 'uint_fast64_t': PRIM_UINT_FAST64, + 'intmax_t': PRIM_INTMAX, + 'uintmax_t': PRIM_UINTMAX, + } + +F_UNION = 0x01 +F_CHECK_FIELDS = 0x02 +F_PACKED = 0x04 +F_EXTERNAL = 0x08 +F_OPAQUE = 0x10 + +G_FLAGS = dict([('_CFFI_' + _key, globals()[_key]) + for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED', + 'F_EXTERNAL', 'F_OPAQUE']]) + +CLASS_NAME = {} +for _name, _value in list(globals().items()): + if _name.startswith('OP_') and isinstance(_value, int): + CLASS_NAME[_value] = _name[3:] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/commontypes.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/commontypes.py new file mode 100644 index 000000000..8ec97c756 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/commontypes.py @@ -0,0 +1,80 @@ +import sys +from . import model +from .error import FFIError + + +COMMON_TYPES = {} + +try: + # fetch "bool" and all simple Windows types + from _cffi_backend import _get_common_types + _get_common_types(COMMON_TYPES) +except ImportError: + pass + +COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE') +COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above + +for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + if _type.endswith('_t'): + COMMON_TYPES[_type] = _type +del _type + +_CACHE = {} + +def resolve_common_type(parser, commontype): + try: + return _CACHE[commontype] + except KeyError: + cdecl = COMMON_TYPES.get(commontype, commontype) + if not isinstance(cdecl, str): + result, quals = cdecl, 0 # cdecl is already a BaseType + elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + result, quals = model.PrimitiveType(cdecl), 0 + elif cdecl == 'set-unicode-needed': + raise FFIError("The Windows type %r is only available after " + "you call ffi.set_unicode()" % (commontype,)) + else: + if commontype == cdecl: + raise FFIError( + "Unsupported type: %r. Please look at " + "http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations " + "and file an issue if you think this type should really " + "be supported." % (commontype,)) + result, quals = parser.parse_type_and_quals(cdecl) # recursive + + assert isinstance(result, model.BaseTypeByIdentity) + _CACHE[commontype] = result, quals + return result, quals + + +# ____________________________________________________________ +# extra types for Windows (most of them are in commontypes.c) + + +def win_common_types(): + return { + "UNICODE_STRING": model.StructType( + "_UNICODE_STRING", + ["Length", + "MaximumLength", + "Buffer"], + [model.PrimitiveType("unsigned short"), + model.PrimitiveType("unsigned short"), + model.PointerType(model.PrimitiveType("wchar_t"))], + [-1, -1, -1]), + "PUNICODE_STRING": "UNICODE_STRING *", + "PCUNICODE_STRING": "const UNICODE_STRING *", + + "TBYTE": "set-unicode-needed", + "TCHAR": "set-unicode-needed", + "LPCTSTR": "set-unicode-needed", + "PCTSTR": "set-unicode-needed", + "LPTSTR": "set-unicode-needed", + "PTSTR": "set-unicode-needed", + "PTBYTE": "set-unicode-needed", + "PTCHAR": "set-unicode-needed", + } + +if sys.platform == 'win32': + COMMON_TYPES.update(win_common_types()) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/cparser.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/cparser.py new file mode 100644 index 000000000..74830e913 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/cparser.py @@ -0,0 +1,1006 @@ +from . import model +from .commontypes import COMMON_TYPES, resolve_common_type +from .error import FFIError, CDefError +try: + from . import _pycparser as pycparser +except ImportError: + import pycparser +import weakref, re, sys + +try: + if sys.version_info < (3,): + import thread as _thread + else: + import _thread + lock = _thread.allocate_lock() +except ImportError: + lock = None + +def _workaround_for_static_import_finders(): + # Issue #392: packaging tools like cx_Freeze can not find these + # because pycparser uses exec dynamic import. This is an obscure + # workaround. This function is never called. + import pycparser.yacctab + import pycparser.lextab + +CDEF_SOURCE_STRING = "" +_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$", + re.DOTALL | re.MULTILINE) +_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)" + r"\b((?:[^\n\\]|\\.)*?)$", + re.DOTALL | re.MULTILINE) +_r_line_directive = re.compile(r"^[ \t]*#[ \t]*(?:line|\d+)\b.*$", re.MULTILINE) +_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}") +_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$") +_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]") +_r_words = re.compile(r"\w+|\S") +_parser_cache = None +_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE) +_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b") +_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b") +_r_cdecl = re.compile(r"\b__cdecl\b") +_r_extern_python = re.compile(r'\bextern\s*"' + r'(Python|Python\s*\+\s*C|C\s*\+\s*Python)"\s*.') +_r_star_const_space = re.compile( # matches "* const " + r"[*]\s*((const|volatile|restrict)\b\s*)+") +_r_int_dotdotdot = re.compile(r"(\b(int|long|short|signed|unsigned|char)\s*)+" + r"\.\.\.") +_r_float_dotdotdot = re.compile(r"\b(double|float)\s*\.\.\.") + +def _get_parser(): + global _parser_cache + if _parser_cache is None: + _parser_cache = pycparser.CParser() + return _parser_cache + +def _workaround_for_old_pycparser(csource): + # Workaround for a pycparser issue (fixed between pycparser 2.10 and + # 2.14): "char*const***" gives us a wrong syntax tree, the same as + # for "char***(*const)". This means we can't tell the difference + # afterwards. But "char(*const(***))" gives us the right syntax + # tree. The issue only occurs if there are several stars in + # sequence with no parenthesis inbetween, just possibly qualifiers. + # Attempt to fix it by adding some parentheses in the source: each + # time we see "* const" or "* const *", we add an opening + # parenthesis before each star---the hard part is figuring out where + # to close them. + parts = [] + while True: + match = _r_star_const_space.search(csource) + if not match: + break + #print repr(''.join(parts)+csource), '=>', + parts.append(csource[:match.start()]) + parts.append('('); closing = ')' + parts.append(match.group()) # e.g. "* const " + endpos = match.end() + if csource.startswith('*', endpos): + parts.append('('); closing += ')' + level = 0 + i = endpos + while i < len(csource): + c = csource[i] + if c == '(': + level += 1 + elif c == ')': + if level == 0: + break + level -= 1 + elif c in ',;=': + if level == 0: + break + i += 1 + csource = csource[endpos:i] + closing + csource[i:] + #print repr(''.join(parts)+csource) + parts.append(csource) + return ''.join(parts) + +def _preprocess_extern_python(csource): + # input: `extern "Python" int foo(int);` or + # `extern "Python" { int foo(int); }` + # output: + # void __cffi_extern_python_start; + # int foo(int); + # void __cffi_extern_python_stop; + # + # input: `extern "Python+C" int foo(int);` + # output: + # void __cffi_extern_python_plus_c_start; + # int foo(int); + # void __cffi_extern_python_stop; + parts = [] + while True: + match = _r_extern_python.search(csource) + if not match: + break + endpos = match.end() - 1 + #print + #print ''.join(parts)+csource + #print '=>' + parts.append(csource[:match.start()]) + if 'C' in match.group(1): + parts.append('void __cffi_extern_python_plus_c_start; ') + else: + parts.append('void __cffi_extern_python_start; ') + if csource[endpos] == '{': + # grouping variant + closing = csource.find('}', endpos) + if closing < 0: + raise CDefError("'extern \"Python\" {': no '}' found") + if csource.find('{', endpos + 1, closing) >= 0: + raise NotImplementedError("cannot use { } inside a block " + "'extern \"Python\" { ... }'") + parts.append(csource[endpos+1:closing]) + csource = csource[closing+1:] + else: + # non-grouping variant + semicolon = csource.find(';', endpos) + if semicolon < 0: + raise CDefError("'extern \"Python\": no ';' found") + parts.append(csource[endpos:semicolon+1]) + csource = csource[semicolon+1:] + parts.append(' void __cffi_extern_python_stop;') + #print ''.join(parts)+csource + #print + parts.append(csource) + return ''.join(parts) + +def _warn_for_string_literal(csource): + if '"' not in csource: + return + for line in csource.splitlines(): + if '"' in line and not line.lstrip().startswith('#'): + import warnings + warnings.warn("String literal found in cdef() or type source. " + "String literals are ignored here, but you should " + "remove them anyway because some character sequences " + "confuse pre-parsing.") + break + +def _warn_for_non_extern_non_static_global_variable(decl): + if not decl.storage: + import warnings + warnings.warn("Global variable '%s' in cdef(): for consistency " + "with C it should have a storage class specifier " + "(usually 'extern')" % (decl.name,)) + +def _remove_line_directives(csource): + # _r_line_directive matches whole lines, without the final \n, if they + # start with '#line' with some spacing allowed, or '#NUMBER'. This + # function stores them away and replaces them with exactly the string + # '#line@N', where N is the index in the list 'line_directives'. + line_directives = [] + def replace(m): + i = len(line_directives) + line_directives.append(m.group()) + return '#line@%d' % i + csource = _r_line_directive.sub(replace, csource) + return csource, line_directives + +def _put_back_line_directives(csource, line_directives): + def replace(m): + s = m.group() + if not s.startswith('#line@'): + raise AssertionError("unexpected #line directive " + "(should have been processed and removed") + return line_directives[int(s[6:])] + return _r_line_directive.sub(replace, csource) + +def _preprocess(csource): + # First, remove the lines of the form '#line N "filename"' because + # the "filename" part could confuse the rest + csource, line_directives = _remove_line_directives(csource) + # Remove comments. NOTE: this only work because the cdef() section + # should not contain any string literals (except in line directives)! + def replace_keeping_newlines(m): + return ' ' + m.group().count('\n') * '\n' + csource = _r_comment.sub(replace_keeping_newlines, csource) + # Remove the "#define FOO x" lines + macros = {} + for match in _r_define.finditer(csource): + macroname, macrovalue = match.groups() + macrovalue = macrovalue.replace('\\\n', '').strip() + macros[macroname] = macrovalue + csource = _r_define.sub('', csource) + # + if pycparser.__version__ < '2.14': + csource = _workaround_for_old_pycparser(csource) + # + # BIG HACK: replace WINAPI or __stdcall with "volatile const". + # It doesn't make sense for the return type of a function to be + # "volatile volatile const", so we abuse it to detect __stdcall... + # Hack number 2 is that "int(volatile *fptr)();" is not valid C + # syntax, so we place the "volatile" before the opening parenthesis. + csource = _r_stdcall2.sub(' volatile volatile const(', csource) + csource = _r_stdcall1.sub(' volatile volatile const ', csource) + csource = _r_cdecl.sub(' ', csource) + # + # Replace `extern "Python"` with start/end markers + csource = _preprocess_extern_python(csource) + # + # Now there should not be any string literal left; warn if we get one + _warn_for_string_literal(csource) + # + # Replace "[...]" with "[__dotdotdotarray__]" + csource = _r_partial_array.sub('[__dotdotdotarray__]', csource) + # + # Replace "...}" with "__dotdotdotNUM__}". This construction should + # occur only at the end of enums; at the end of structs we have "...;}" + # and at the end of vararg functions "...);". Also replace "=...[,}]" + # with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when + # giving an unknown value. + matches = list(_r_partial_enum.finditer(csource)) + for number, match in enumerate(reversed(matches)): + p = match.start() + if csource[p] == '=': + p2 = csource.find('...', p, match.end()) + assert p2 > p + csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number, + csource[p2+3:]) + else: + assert csource[p:p+3] == '...' + csource = '%s __dotdotdot%d__ %s' % (csource[:p], number, + csource[p+3:]) + # Replace "int ..." or "unsigned long int..." with "__dotdotdotint__" + csource = _r_int_dotdotdot.sub(' __dotdotdotint__ ', csource) + # Replace "float ..." or "double..." with "__dotdotdotfloat__" + csource = _r_float_dotdotdot.sub(' __dotdotdotfloat__ ', csource) + # Replace all remaining "..." with the same name, "__dotdotdot__", + # which is declared with a typedef for the purpose of C parsing. + csource = csource.replace('...', ' __dotdotdot__ ') + # Finally, put back the line directives + csource = _put_back_line_directives(csource, line_directives) + return csource, macros + +def _common_type_names(csource): + # Look in the source for what looks like usages of types from the + # list of common types. A "usage" is approximated here as the + # appearance of the word, minus a "definition" of the type, which + # is the last word in a "typedef" statement. Approximative only + # but should be fine for all the common types. + look_for_words = set(COMMON_TYPES) + look_for_words.add(';') + look_for_words.add(',') + look_for_words.add('(') + look_for_words.add(')') + look_for_words.add('typedef') + words_used = set() + is_typedef = False + paren = 0 + previous_word = '' + for word in _r_words.findall(csource): + if word in look_for_words: + if word == ';': + if is_typedef: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + is_typedef = False + elif word == 'typedef': + is_typedef = True + paren = 0 + elif word == '(': + paren += 1 + elif word == ')': + paren -= 1 + elif word == ',': + if is_typedef and paren == 0: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + else: # word in COMMON_TYPES + words_used.add(word) + previous_word = word + return words_used + + +class Parser(object): + + def __init__(self): + self._declarations = {} + self._included_declarations = set() + self._anonymous_counter = 0 + self._structnode2type = weakref.WeakKeyDictionary() + self._options = {} + self._int_constants = {} + self._recomplete = [] + self._uses_new_feature = None + + def _parse(self, csource): + csource, macros = _preprocess(csource) + # XXX: for more efficiency we would need to poke into the + # internals of CParser... the following registers the + # typedefs, because their presence or absence influences the + # parsing itself (but what they are typedef'ed to plays no role) + ctn = _common_type_names(csource) + typenames = [] + for name in sorted(self._declarations): + if name.startswith('typedef '): + name = name[8:] + typenames.append(name) + ctn.discard(name) + typenames += sorted(ctn) + # + csourcelines = [] + csourcelines.append('# 1 ""') + for typename in typenames: + csourcelines.append('typedef int %s;' % typename) + csourcelines.append('typedef int __dotdotdotint__, __dotdotdotfloat__,' + ' __dotdotdot__;') + # this forces pycparser to consider the following in the file + # called from line 1 + csourcelines.append('# 1 "%s"' % (CDEF_SOURCE_STRING,)) + csourcelines.append(csource) + fullcsource = '\n'.join(csourcelines) + if lock is not None: + lock.acquire() # pycparser is not thread-safe... + try: + ast = _get_parser().parse(fullcsource) + except pycparser.c_parser.ParseError as e: + self.convert_pycparser_error(e, csource) + finally: + if lock is not None: + lock.release() + # csource will be used to find buggy source text + return ast, macros, csource + + def _convert_pycparser_error(self, e, csource): + # xxx look for ":NUM:" at the start of str(e) + # and interpret that as a line number. This will not work if + # the user gives explicit ``# NUM "FILE"`` directives. + line = None + msg = str(e) + match = re.match(r"%s:(\d+):" % (CDEF_SOURCE_STRING,), msg) + if match: + linenum = int(match.group(1), 10) + csourcelines = csource.splitlines() + if 1 <= linenum <= len(csourcelines): + line = csourcelines[linenum-1] + return line + + def convert_pycparser_error(self, e, csource): + line = self._convert_pycparser_error(e, csource) + + msg = str(e) + if line: + msg = 'cannot parse "%s"\n%s' % (line.strip(), msg) + else: + msg = 'parse error\n%s' % (msg,) + raise CDefError(msg) + + def parse(self, csource, override=False, packed=False, pack=None, + dllexport=False): + if packed: + if packed != True: + raise ValueError("'packed' should be False or True; use " + "'pack' to give another value") + if pack: + raise ValueError("cannot give both 'pack' and 'packed'") + pack = 1 + elif pack: + if pack & (pack - 1): + raise ValueError("'pack' must be a power of two, not %r" % + (pack,)) + else: + pack = 0 + prev_options = self._options + try: + self._options = {'override': override, + 'packed': pack, + 'dllexport': dllexport} + self._internal_parse(csource) + finally: + self._options = prev_options + + def _internal_parse(self, csource): + ast, macros, csource = self._parse(csource) + # add the macros + self._process_macros(macros) + # find the first "__dotdotdot__" and use that as a separator + # between the repeated typedefs and the real csource + iterator = iter(ast.ext) + for decl in iterator: + if decl.name == '__dotdotdot__': + break + else: + assert 0 + current_decl = None + # + try: + self._inside_extern_python = '__cffi_extern_python_stop' + for decl in iterator: + current_decl = decl + if isinstance(decl, pycparser.c_ast.Decl): + self._parse_decl(decl) + elif isinstance(decl, pycparser.c_ast.Typedef): + if not decl.name: + raise CDefError("typedef does not declare any name", + decl) + quals = 0 + if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) and + decl.type.type.names[-1].startswith('__dotdotdot')): + realtype = self._get_unknown_type(decl) + elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and + isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and + isinstance(decl.type.type.type, + pycparser.c_ast.IdentifierType) and + decl.type.type.type.names[-1].startswith('__dotdotdot')): + realtype = self._get_unknown_ptr_type(decl) + else: + realtype, quals = self._get_type_and_quals( + decl.type, name=decl.name, partial_length_ok=True, + typedef_example="*(%s *)0" % (decl.name,)) + self._declare('typedef ' + decl.name, realtype, quals=quals) + elif decl.__class__.__name__ == 'Pragma': + pass # skip pragma, only in pycparser 2.15 + else: + raise CDefError("unexpected <%s>: this construct is valid " + "C but not valid in cdef()" % + decl.__class__.__name__, decl) + except CDefError as e: + if len(e.args) == 1: + e.args = e.args + (current_decl,) + raise + except FFIError as e: + msg = self._convert_pycparser_error(e, csource) + if msg: + e.args = (e.args[0] + "\n *** Err: %s" % msg,) + raise + + def _add_constants(self, key, val): + if key in self._int_constants: + if self._int_constants[key] == val: + return # ignore identical double declarations + raise FFIError( + "multiple declarations of constant: %s" % (key,)) + self._int_constants[key] = val + + def _add_integer_constant(self, name, int_str): + int_str = int_str.lower().rstrip("ul") + neg = int_str.startswith('-') + if neg: + int_str = int_str[1:] + # "010" is not valid oct in py3 + if (int_str.startswith("0") and int_str != '0' + and not int_str.startswith("0x")): + int_str = "0o" + int_str[1:] + pyvalue = int(int_str, 0) + if neg: + pyvalue = -pyvalue + self._add_constants(name, pyvalue) + self._declare('macro ' + name, pyvalue) + + def _process_macros(self, macros): + for key, value in macros.items(): + value = value.strip() + if _r_int_literal.match(value): + self._add_integer_constant(key, value) + elif value == '...': + self._declare('macro ' + key, value) + else: + raise CDefError( + 'only supports one of the following syntax:\n' + ' #define %s ... (literally dot-dot-dot)\n' + ' #define %s NUMBER (with NUMBER an integer' + ' constant, decimal/hex/octal)\n' + 'got:\n' + ' #define %s %s' + % (key, key, key, value)) + + def _declare_function(self, tp, quals, decl): + tp = self._get_type_pointer(tp, quals) + if self._options.get('dllexport'): + tag = 'dllexport_python ' + elif self._inside_extern_python == '__cffi_extern_python_start': + tag = 'extern_python ' + elif self._inside_extern_python == '__cffi_extern_python_plus_c_start': + tag = 'extern_python_plus_c ' + else: + tag = 'function ' + self._declare(tag + decl.name, tp) + + def _parse_decl(self, decl): + node = decl.type + if isinstance(node, pycparser.c_ast.FuncDecl): + tp, quals = self._get_type_and_quals(node, name=decl.name) + assert isinstance(tp, model.RawFunctionType) + self._declare_function(tp, quals, decl) + else: + if isinstance(node, pycparser.c_ast.Struct): + self._get_struct_union_enum_type('struct', node) + elif isinstance(node, pycparser.c_ast.Union): + self._get_struct_union_enum_type('union', node) + elif isinstance(node, pycparser.c_ast.Enum): + self._get_struct_union_enum_type('enum', node) + elif not decl.name: + raise CDefError("construct does not declare any variable", + decl) + # + if decl.name: + tp, quals = self._get_type_and_quals(node, + partial_length_ok=True) + if tp.is_raw_function: + self._declare_function(tp, quals, decl) + elif (tp.is_integer_type() and + hasattr(decl, 'init') and + hasattr(decl.init, 'value') and + _r_int_literal.match(decl.init.value)): + self._add_integer_constant(decl.name, decl.init.value) + elif (tp.is_integer_type() and + isinstance(decl.init, pycparser.c_ast.UnaryOp) and + decl.init.op == '-' and + hasattr(decl.init.expr, 'value') and + _r_int_literal.match(decl.init.expr.value)): + self._add_integer_constant(decl.name, + '-' + decl.init.expr.value) + elif (tp is model.void_type and + decl.name.startswith('__cffi_extern_python_')): + # hack: `extern "Python"` in the C source is replaced + # with "void __cffi_extern_python_start;" and + # "void __cffi_extern_python_stop;" + self._inside_extern_python = decl.name + else: + if self._inside_extern_python !='__cffi_extern_python_stop': + raise CDefError( + "cannot declare constants or " + "variables with 'extern \"Python\"'") + if (quals & model.Q_CONST) and not tp.is_array_type: + self._declare('constant ' + decl.name, tp, quals=quals) + else: + _warn_for_non_extern_non_static_global_variable(decl) + self._declare('variable ' + decl.name, tp, quals=quals) + + def parse_type(self, cdecl): + return self.parse_type_and_quals(cdecl)[0] + + def parse_type_and_quals(self, cdecl): + ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2] + assert not macros + exprnode = ast.ext[-1].type.args.params[0] + if isinstance(exprnode, pycparser.c_ast.ID): + raise CDefError("unknown identifier '%s'" % (exprnode.name,)) + return self._get_type_and_quals(exprnode.type) + + def _declare(self, name, obj, included=False, quals=0): + if name in self._declarations: + prevobj, prevquals = self._declarations[name] + if prevobj is obj and prevquals == quals: + return + if not self._options.get('override'): + raise FFIError( + "multiple declarations of %s (for interactive usage, " + "try cdef(xx, override=True))" % (name,)) + assert '__dotdotdot__' not in name.split() + self._declarations[name] = (obj, quals) + if included: + self._included_declarations.add(obj) + + def _extract_quals(self, type): + quals = 0 + if isinstance(type, (pycparser.c_ast.TypeDecl, + pycparser.c_ast.PtrDecl)): + if 'const' in type.quals: + quals |= model.Q_CONST + if 'volatile' in type.quals: + quals |= model.Q_VOLATILE + if 'restrict' in type.quals: + quals |= model.Q_RESTRICT + return quals + + def _get_type_pointer(self, type, quals, declname=None): + if isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + if (isinstance(type, model.StructOrUnionOrEnum) and + type.name.startswith('$') and type.name[1:].isdigit() and + type.forcename is None and declname is not None): + return model.NamedPointerType(type, declname, quals) + return model.PointerType(type, quals) + + def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False, + typedef_example=None): + # first, dereference typedefs, if we have it already parsed, we're good + if (isinstance(typenode, pycparser.c_ast.TypeDecl) and + isinstance(typenode.type, pycparser.c_ast.IdentifierType) and + len(typenode.type.names) == 1 and + ('typedef ' + typenode.type.names[0]) in self._declarations): + tp, quals = self._declarations['typedef ' + typenode.type.names[0]] + quals |= self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.ArrayDecl): + # array type + if typenode.dim is None: + length = None + else: + length = self._parse_constant( + typenode.dim, partial_length_ok=partial_length_ok) + # a hack: in 'typedef int foo_t[...][...];', don't use '...' as + # the length but use directly the C expression that would be + # generated by recompiler.py. This lets the typedef be used in + # many more places within recompiler.py + if typedef_example is not None: + if length == '...': + length = '_cffi_array_len(%s)' % (typedef_example,) + typedef_example = "*" + typedef_example + # + tp, quals = self._get_type_and_quals(typenode.type, + partial_length_ok=partial_length_ok, + typedef_example=typedef_example) + return model.ArrayType(tp, length), quals + # + if isinstance(typenode, pycparser.c_ast.PtrDecl): + # pointer type + itemtype, itemquals = self._get_type_and_quals(typenode.type) + tp = self._get_type_pointer(itemtype, itemquals, declname=name) + quals = self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.TypeDecl): + quals = self._extract_quals(typenode) + type = typenode.type + if isinstance(type, pycparser.c_ast.IdentifierType): + # assume a primitive type. get it from .names, but reduce + # synonyms to a single chosen combination + names = list(type.names) + if names != ['signed', 'char']: # keep this unmodified + prefixes = {} + while names: + name = names[0] + if name in ('short', 'long', 'signed', 'unsigned'): + prefixes[name] = prefixes.get(name, 0) + 1 + del names[0] + else: + break + # ignore the 'signed' prefix below, and reorder the others + newnames = [] + for prefix in ('unsigned', 'short', 'long'): + for i in range(prefixes.get(prefix, 0)): + newnames.append(prefix) + if not names: + names = ['int'] # implicitly + if names == ['int']: # but kill it if 'short' or 'long' + if 'short' in prefixes or 'long' in prefixes: + names = [] + names = newnames + names + ident = ' '.join(names) + if ident == 'void': + return model.void_type, quals + if ident == '__dotdotdot__': + raise FFIError(':%d: bad usage of "..."' % + typenode.coord.line) + tp0, quals0 = resolve_common_type(self, ident) + return tp0, (quals | quals0) + # + if isinstance(type, pycparser.c_ast.Struct): + # 'struct foobar' + tp = self._get_struct_union_enum_type('struct', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Union): + # 'union foobar' + tp = self._get_struct_union_enum_type('union', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Enum): + # 'enum foobar' + tp = self._get_struct_union_enum_type('enum', type, name) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.FuncDecl): + # a function type + return self._parse_function_type(typenode, name), 0 + # + # nested anonymous structs or unions end up here + if isinstance(typenode, pycparser.c_ast.Struct): + return self._get_struct_union_enum_type('struct', typenode, name, + nested=True), 0 + if isinstance(typenode, pycparser.c_ast.Union): + return self._get_struct_union_enum_type('union', typenode, name, + nested=True), 0 + # + raise FFIError(":%d: bad or unsupported type declaration" % + typenode.coord.line) + + def _parse_function_type(self, typenode, funcname=None): + params = list(getattr(typenode.args, 'params', [])) + for i, arg in enumerate(params): + if not hasattr(arg, 'type'): + raise CDefError("%s arg %d: unknown type '%s'" + " (if you meant to use the old C syntax of giving" + " untyped arguments, it is not supported)" + % (funcname or 'in expression', i + 1, + getattr(arg, 'name', '?'))) + ellipsis = ( + len(params) > 0 and + isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and + isinstance(params[-1].type.type, + pycparser.c_ast.IdentifierType) and + params[-1].type.type.names == ['__dotdotdot__']) + if ellipsis: + params.pop() + if not params: + raise CDefError( + "%s: a function with only '(...)' as argument" + " is not correct C" % (funcname or 'in expression')) + args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type)) + for argdeclnode in params] + if not ellipsis and args == [model.void_type]: + args = [] + result, quals = self._get_type_and_quals(typenode.type) + # the 'quals' on the result type are ignored. HACK: we absure them + # to detect __stdcall functions: we textually replace "__stdcall" + # with "volatile volatile const" above. + abi = None + if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway + if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']: + abi = '__stdcall' + return model.RawFunctionType(tuple(args), result, ellipsis, abi) + + def _as_func_arg(self, type, quals): + if isinstance(type, model.ArrayType): + return model.PointerType(type.item, quals) + elif isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + else: + return type + + def _get_struct_union_enum_type(self, kind, type, name=None, nested=False): + # First, a level of caching on the exact 'type' node of the AST. + # This is obscure, but needed because pycparser "unrolls" declarations + # such as "typedef struct { } foo_t, *foo_p" and we end up with + # an AST that is not a tree, but a DAG, with the "type" node of the + # two branches foo_t and foo_p of the trees being the same node. + # It's a bit silly but detecting "DAG-ness" in the AST tree seems + # to be the only way to distinguish this case from two independent + # structs. See test_struct_with_two_usages. + try: + return self._structnode2type[type] + except KeyError: + pass + # + # Note that this must handle parsing "struct foo" any number of + # times and always return the same StructType object. Additionally, + # one of these times (not necessarily the first), the fields of + # the struct can be specified with "struct foo { ...fields... }". + # If no name is given, then we have to create a new anonymous struct + # with no caching; in this case, the fields are either specified + # right now or never. + # + force_name = name + name = type.name + # + # get the type or create it if needed + if name is None: + # 'force_name' is used to guess a more readable name for + # anonymous structs, for the common case "typedef struct { } foo". + if force_name is not None: + explicit_name = '$%s' % force_name + else: + self._anonymous_counter += 1 + explicit_name = '$%d' % self._anonymous_counter + tp = None + else: + explicit_name = name + key = '%s %s' % (kind, name) + tp, _ = self._declarations.get(key, (None, None)) + # + if tp is None: + if kind == 'struct': + tp = model.StructType(explicit_name, None, None, None) + elif kind == 'union': + tp = model.UnionType(explicit_name, None, None, None) + elif kind == 'enum': + if explicit_name == '__dotdotdot__': + raise CDefError("Enums cannot be declared with ...") + tp = self._build_enum_type(explicit_name, type.values) + else: + raise AssertionError("kind = %r" % (kind,)) + if name is not None: + self._declare(key, tp) + else: + if kind == 'enum' and type.values is not None: + raise NotImplementedError( + "enum %s: the '{}' declaration should appear on the first " + "time the enum is mentioned, not later" % explicit_name) + if not tp.forcename: + tp.force_the_name(force_name) + if tp.forcename and '$' in tp.name: + self._declare('anonymous %s' % tp.forcename, tp) + # + self._structnode2type[type] = tp + # + # enums: done here + if kind == 'enum': + return tp + # + # is there a 'type.decls'? If yes, then this is the place in the + # C sources that declare the fields. If no, then just return the + # existing type, possibly still incomplete. + if type.decls is None: + return tp + # + if tp.fldnames is not None: + raise CDefError("duplicate declaration of struct %s" % name) + fldnames = [] + fldtypes = [] + fldbitsize = [] + fldquals = [] + for decl in type.decls: + if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and + ''.join(decl.type.names) == '__dotdotdot__'): + # XXX pycparser is inconsistent: 'names' should be a list + # of strings, but is sometimes just one string. Use + # str.join() as a way to cope with both. + self._make_partial(tp, nested) + continue + if decl.bitsize is None: + bitsize = -1 + else: + bitsize = self._parse_constant(decl.bitsize) + self._partial_length = False + type, fqual = self._get_type_and_quals(decl.type, + partial_length_ok=True) + if self._partial_length: + self._make_partial(tp, nested) + if isinstance(type, model.StructType) and type.partial: + self._make_partial(tp, nested) + fldnames.append(decl.name or '') + fldtypes.append(type) + fldbitsize.append(bitsize) + fldquals.append(fqual) + tp.fldnames = tuple(fldnames) + tp.fldtypes = tuple(fldtypes) + tp.fldbitsize = tuple(fldbitsize) + tp.fldquals = tuple(fldquals) + if fldbitsize != [-1] * len(fldbitsize): + if isinstance(tp, model.StructType) and tp.partial: + raise NotImplementedError("%s: using both bitfields and '...;'" + % (tp,)) + tp.packed = self._options.get('packed') + if tp.completed: # must be re-completed: it is not opaque any more + tp.completed = 0 + self._recomplete.append(tp) + return tp + + def _make_partial(self, tp, nested): + if not isinstance(tp, model.StructOrUnion): + raise CDefError("%s cannot be partial" % (tp,)) + if not tp.has_c_name() and not nested: + raise NotImplementedError("%s is partial but has no C name" %(tp,)) + tp.partial = True + + def _parse_constant(self, exprnode, partial_length_ok=False): + # for now, limited to expressions that are an immediate number + # or positive/negative number + if isinstance(exprnode, pycparser.c_ast.Constant): + s = exprnode.value + if '0' <= s[0] <= '9': + s = s.rstrip('uUlL') + try: + if s.startswith('0'): + return int(s, 8) + else: + return int(s, 10) + except ValueError: + if len(s) > 1: + if s.lower()[0:2] == '0x': + return int(s, 16) + elif s.lower()[0:2] == '0b': + return int(s, 2) + raise CDefError("invalid constant %r" % (s,)) + elif s[0] == "'" and s[-1] == "'" and ( + len(s) == 3 or (len(s) == 4 and s[1] == "\\")): + return ord(s[-2]) + else: + raise CDefError("invalid constant %r" % (s,)) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '+'): + return self._parse_constant(exprnode.expr) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '-'): + return -self._parse_constant(exprnode.expr) + # load previously defined int constant + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name in self._int_constants): + return self._int_constants[exprnode.name] + # + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name == '__dotdotdotarray__'): + if partial_length_ok: + self._partial_length = True + return '...' + raise FFIError(":%d: unsupported '[...]' here, cannot derive " + "the actual array length in this context" + % exprnode.coord.line) + # + if isinstance(exprnode, pycparser.c_ast.BinaryOp): + left = self._parse_constant(exprnode.left) + right = self._parse_constant(exprnode.right) + if exprnode.op == '+': + return left + right + elif exprnode.op == '-': + return left - right + elif exprnode.op == '*': + return left * right + elif exprnode.op == '/': + return self._c_div(left, right) + elif exprnode.op == '%': + return left - self._c_div(left, right) * right + elif exprnode.op == '<<': + return left << right + elif exprnode.op == '>>': + return left >> right + elif exprnode.op == '&': + return left & right + elif exprnode.op == '|': + return left | right + elif exprnode.op == '^': + return left ^ right + # + raise FFIError(":%d: unsupported expression: expected a " + "simple numeric constant" % exprnode.coord.line) + + def _c_div(self, a, b): + result = a // b + if ((a < 0) ^ (b < 0)) and (a % b) != 0: + result += 1 + return result + + def _build_enum_type(self, explicit_name, decls): + if decls is not None: + partial = False + enumerators = [] + enumvalues = [] + nextenumvalue = 0 + for enum in decls.enumerators: + if _r_enum_dotdotdot.match(enum.name): + partial = True + continue + if enum.value is not None: + nextenumvalue = self._parse_constant(enum.value) + enumerators.append(enum.name) + enumvalues.append(nextenumvalue) + self._add_constants(enum.name, nextenumvalue) + nextenumvalue += 1 + enumerators = tuple(enumerators) + enumvalues = tuple(enumvalues) + tp = model.EnumType(explicit_name, enumerators, enumvalues) + tp.partial = partial + else: # opaque enum + tp = model.EnumType(explicit_name, (), ()) + return tp + + def include(self, other): + for name, (tp, quals) in other._declarations.items(): + if name.startswith('anonymous $enum_$'): + continue # fix for test_anonymous_enum_include + kind = name.split(' ', 1)[0] + if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'): + self._declare(name, tp, included=True, quals=quals) + for k, v in other._int_constants.items(): + self._add_constants(k, v) + + def _get_unknown_type(self, decl): + typenames = decl.type.type.names + if typenames == ['__dotdotdot__']: + return model.unknown_type(decl.name) + + if typenames == ['__dotdotdotint__']: + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef int... %s'" % decl.name + return model.UnknownIntegerType(decl.name) + + if typenames == ['__dotdotdotfloat__']: + # note: not for 'long double' so far + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef float... %s'" % decl.name + return model.UnknownFloatType(decl.name) + + raise FFIError(':%d: unsupported usage of "..." in typedef' + % decl.coord.line) + + def _get_unknown_ptr_type(self, decl): + if decl.type.type.type.names == ['__dotdotdot__']: + return model.unknown_ptr_type(decl.name) + raise FFIError(':%d: unsupported usage of "..." in typedef' + % decl.coord.line) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/error.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/error.py new file mode 100644 index 000000000..0a27247c3 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/error.py @@ -0,0 +1,31 @@ + +class FFIError(Exception): + __module__ = 'cffi' + +class CDefError(Exception): + __module__ = 'cffi' + def __str__(self): + try: + current_decl = self.args[1] + filename = current_decl.coord.file + linenum = current_decl.coord.line + prefix = '%s:%d: ' % (filename, linenum) + except (AttributeError, TypeError, IndexError): + prefix = '' + return '%s%s' % (prefix, self.args[0]) + +class VerificationError(Exception): + """ An error raised when verification fails + """ + __module__ = 'cffi' + +class VerificationMissing(Exception): + """ An error raised when incomplete structures are passed into + cdef, but no verification has been done + """ + __module__ = 'cffi' + +class PkgConfigError(Exception): + """ An error raised for missing modules in pkg-config + """ + __module__ = 'cffi' diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/ffiplatform.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/ffiplatform.py new file mode 100644 index 000000000..adca28f1a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/ffiplatform.py @@ -0,0 +1,113 @@ +import sys, os +from .error import VerificationError + + +LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs', + 'extra_objects', 'depends'] + +def get_extension(srcfilename, modname, sources=(), **kwds): + from cffi._shimmed_dist_utils import Extension + allsources = [srcfilename] + for src in sources: + allsources.append(os.path.normpath(src)) + return Extension(name=modname, sources=allsources, **kwds) + +def compile(tmpdir, ext, compiler_verbose=0, debug=None): + """Compile a C extension module using distutils.""" + + saved_environ = os.environ.copy() + try: + outputfilename = _build(tmpdir, ext, compiler_verbose, debug) + outputfilename = os.path.abspath(outputfilename) + finally: + # workaround for a distutils bugs where some env vars can + # become longer and longer every time it is used + for key, value in saved_environ.items(): + if os.environ.get(key) != value: + os.environ[key] = value + return outputfilename + +def _build(tmpdir, ext, compiler_verbose=0, debug=None): + # XXX compact but horrible :-( + from cffi._shimmed_dist_utils import Distribution, CompileError, LinkError, set_threshold, set_verbosity + + dist = Distribution({'ext_modules': [ext]}) + dist.parse_config_files() + options = dist.get_option_dict('build_ext') + if debug is None: + debug = sys.flags.debug + options['debug'] = ('ffiplatform', debug) + options['force'] = ('ffiplatform', True) + options['build_lib'] = ('ffiplatform', tmpdir) + options['build_temp'] = ('ffiplatform', tmpdir) + # + try: + old_level = set_threshold(0) or 0 + try: + set_verbosity(compiler_verbose) + dist.run_command('build_ext') + cmd_obj = dist.get_command_obj('build_ext') + [soname] = cmd_obj.get_outputs() + finally: + set_threshold(old_level) + except (CompileError, LinkError) as e: + raise VerificationError('%s: %s' % (e.__class__.__name__, e)) + # + return soname + +try: + from os.path import samefile +except ImportError: + def samefile(f1, f2): + return os.path.abspath(f1) == os.path.abspath(f2) + +def maybe_relative_path(path): + if not os.path.isabs(path): + return path # already relative + dir = path + names = [] + while True: + prevdir = dir + dir, name = os.path.split(prevdir) + if dir == prevdir or not dir: + return path # failed to make it relative + names.append(name) + try: + if samefile(dir, os.curdir): + names.reverse() + return os.path.join(*names) + except OSError: + pass + +# ____________________________________________________________ + +try: + int_or_long = (int, long) + import cStringIO +except NameError: + int_or_long = int # Python 3 + import io as cStringIO + +def _flatten(x, f): + if isinstance(x, str): + f.write('%ds%s' % (len(x), x)) + elif isinstance(x, dict): + keys = sorted(x.keys()) + f.write('%dd' % len(keys)) + for key in keys: + _flatten(key, f) + _flatten(x[key], f) + elif isinstance(x, (list, tuple)): + f.write('%dl' % len(x)) + for value in x: + _flatten(value, f) + elif isinstance(x, int_or_long): + f.write('%di' % (x,)) + else: + raise TypeError( + "the keywords to verify() contains unsupported object %r" % (x,)) + +def flatten(x): + f = cStringIO.StringIO() + _flatten(x, f) + return f.getvalue() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/lock.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/lock.py new file mode 100644 index 000000000..db91b7158 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/lock.py @@ -0,0 +1,30 @@ +import sys + +if sys.version_info < (3,): + try: + from thread import allocate_lock + except ImportError: + from dummy_thread import allocate_lock +else: + try: + from _thread import allocate_lock + except ImportError: + from _dummy_thread import allocate_lock + + +##import sys +##l1 = allocate_lock + +##class allocate_lock(object): +## def __init__(self): +## self._real = l1() +## def __enter__(self): +## for i in range(4, 0, -1): +## print sys._getframe(i).f_code +## print +## return self._real.__enter__() +## def __exit__(self, *args): +## return self._real.__exit__(*args) +## def acquire(self, f): +## assert f is False +## return self._real.acquire(f) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/model.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/model.py new file mode 100644 index 000000000..1708f43df --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/model.py @@ -0,0 +1,618 @@ +import types +import weakref + +from .lock import allocate_lock +from .error import CDefError, VerificationError, VerificationMissing + +# type qualifiers +Q_CONST = 0x01 +Q_RESTRICT = 0x02 +Q_VOLATILE = 0x04 + +def qualify(quals, replace_with): + if quals & Q_CONST: + replace_with = ' const ' + replace_with.lstrip() + if quals & Q_VOLATILE: + replace_with = ' volatile ' + replace_with.lstrip() + if quals & Q_RESTRICT: + # It seems that __restrict is supported by gcc and msvc. + # If you hit some different compiler, add a #define in + # _cffi_include.h for it (and in its copies, documented there) + replace_with = ' __restrict ' + replace_with.lstrip() + return replace_with + + +class BaseTypeByIdentity(object): + is_array_type = False + is_raw_function = False + + def get_c_name(self, replace_with='', context='a C file', quals=0): + result = self.c_name_with_marker + assert result.count('&') == 1 + # some logic duplication with ffi.getctype()... :-( + replace_with = replace_with.strip() + if replace_with: + if replace_with.startswith('*') and '&[' in result: + replace_with = '(%s)' % replace_with + elif not replace_with[0] in '[(': + replace_with = ' ' + replace_with + replace_with = qualify(quals, replace_with) + result = result.replace('&', replace_with) + if '$' in result: + raise VerificationError( + "cannot generate '%s' in %s: unknown type name" + % (self._get_c_name(), context)) + return result + + def _get_c_name(self): + return self.c_name_with_marker.replace('&', '') + + def has_c_name(self): + return '$' not in self._get_c_name() + + def is_integer_type(self): + return False + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + try: + BType = ffi._cached_btypes[self] + except KeyError: + BType = self.build_backend_type(ffi, finishlist) + BType2 = ffi._cached_btypes.setdefault(self, BType) + assert BType2 is BType + return BType + + def __repr__(self): + return '<%s>' % (self._get_c_name(),) + + def _get_items(self): + return [(name, getattr(self, name)) for name in self._attrs_] + + +class BaseType(BaseTypeByIdentity): + + def __eq__(self, other): + return (self.__class__ == other.__class__ and + self._get_items() == other._get_items()) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.__class__, tuple(self._get_items()))) + + +class VoidType(BaseType): + _attrs_ = () + + def __init__(self): + self.c_name_with_marker = 'void&' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_void_type') + +void_type = VoidType() + + +class BasePrimitiveType(BaseType): + def is_complex_type(self): + return False + + +class PrimitiveType(BasePrimitiveType): + _attrs_ = ('name',) + + ALL_PRIMITIVE_TYPES = { + 'char': 'c', + 'short': 'i', + 'int': 'i', + 'long': 'i', + 'long long': 'i', + 'signed char': 'i', + 'unsigned char': 'i', + 'unsigned short': 'i', + 'unsigned int': 'i', + 'unsigned long': 'i', + 'unsigned long long': 'i', + 'float': 'f', + 'double': 'f', + 'long double': 'f', + 'float _Complex': 'j', + 'double _Complex': 'j', + '_Bool': 'i', + # the following types are not primitive in the C sense + 'wchar_t': 'c', + 'char16_t': 'c', + 'char32_t': 'c', + 'int8_t': 'i', + 'uint8_t': 'i', + 'int16_t': 'i', + 'uint16_t': 'i', + 'int32_t': 'i', + 'uint32_t': 'i', + 'int64_t': 'i', + 'uint64_t': 'i', + 'int_least8_t': 'i', + 'uint_least8_t': 'i', + 'int_least16_t': 'i', + 'uint_least16_t': 'i', + 'int_least32_t': 'i', + 'uint_least32_t': 'i', + 'int_least64_t': 'i', + 'uint_least64_t': 'i', + 'int_fast8_t': 'i', + 'uint_fast8_t': 'i', + 'int_fast16_t': 'i', + 'uint_fast16_t': 'i', + 'int_fast32_t': 'i', + 'uint_fast32_t': 'i', + 'int_fast64_t': 'i', + 'uint_fast64_t': 'i', + 'intptr_t': 'i', + 'uintptr_t': 'i', + 'intmax_t': 'i', + 'uintmax_t': 'i', + 'ptrdiff_t': 'i', + 'size_t': 'i', + 'ssize_t': 'i', + } + + def __init__(self, name): + assert name in self.ALL_PRIMITIVE_TYPES + self.name = name + self.c_name_with_marker = name + '&' + + def is_char_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'c' + def is_integer_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'i' + def is_float_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'f' + def is_complex_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'j' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_primitive_type', self.name) + + +class UnknownIntegerType(BasePrimitiveType): + _attrs_ = ('name',) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def is_integer_type(self): + return True + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("integer type '%s' can only be used after " + "compilation" % self.name) + +class UnknownFloatType(BasePrimitiveType): + _attrs_ = ('name', ) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("float type '%s' can only be used after " + "compilation" % self.name) + + +class BaseFunctionType(BaseType): + _attrs_ = ('args', 'result', 'ellipsis', 'abi') + + def __init__(self, args, result, ellipsis, abi=None): + self.args = args + self.result = result + self.ellipsis = ellipsis + self.abi = abi + # + reprargs = [arg._get_c_name() for arg in self.args] + if self.ellipsis: + reprargs.append('...') + reprargs = reprargs or ['void'] + replace_with = self._base_pattern % (', '.join(reprargs),) + if abi is not None: + replace_with = replace_with[:1] + abi + ' ' + replace_with[1:] + self.c_name_with_marker = ( + self.result.c_name_with_marker.replace('&', replace_with)) + + +class RawFunctionType(BaseFunctionType): + # Corresponds to a C type like 'int(int)', which is the C type of + # a function, but not a pointer-to-function. The backend has no + # notion of such a type; it's used temporarily by parsing. + _base_pattern = '(&)(%s)' + is_raw_function = True + + def build_backend_type(self, ffi, finishlist): + raise CDefError("cannot render the type %r: it is a function " + "type, not a pointer-to-function type" % (self,)) + + def as_function_pointer(self): + return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi) + + +class FunctionPtrType(BaseFunctionType): + _base_pattern = '(*&)(%s)' + + def build_backend_type(self, ffi, finishlist): + result = self.result.get_cached_btype(ffi, finishlist) + args = [] + for tp in self.args: + args.append(tp.get_cached_btype(ffi, finishlist)) + abi_args = () + if self.abi == "__stdcall": + if not self.ellipsis: # __stdcall ignored for variadic funcs + try: + abi_args = (ffi._backend.FFI_STDCALL,) + except AttributeError: + pass + return global_cache(self, ffi, 'new_function_type', + tuple(args), result, self.ellipsis, *abi_args) + + def as_raw_function(self): + return RawFunctionType(self.args, self.result, self.ellipsis, self.abi) + + +class PointerType(BaseType): + _attrs_ = ('totype', 'quals') + + def __init__(self, totype, quals=0): + self.totype = totype + self.quals = quals + extra = " *&" + if totype.is_array_type: + extra = "(%s)" % (extra.lstrip(),) + extra = qualify(quals, extra) + self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra) + + def build_backend_type(self, ffi, finishlist): + BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True) + return global_cache(self, ffi, 'new_pointer_type', BItem) + +voidp_type = PointerType(void_type) + +def ConstPointerType(totype): + return PointerType(totype, Q_CONST) + +const_voidp_type = ConstPointerType(void_type) + + +class NamedPointerType(PointerType): + _attrs_ = ('totype', 'name') + + def __init__(self, totype, name, quals=0): + PointerType.__init__(self, totype, quals) + self.name = name + self.c_name_with_marker = name + '&' + + +class ArrayType(BaseType): + _attrs_ = ('item', 'length') + is_array_type = True + + def __init__(self, item, length): + self.item = item + self.length = length + # + if length is None: + brackets = '&[]' + elif length == '...': + brackets = '&[/*...*/]' + else: + brackets = '&[%s]' % length + self.c_name_with_marker = ( + self.item.c_name_with_marker.replace('&', brackets)) + + def length_is_unknown(self): + return isinstance(self.length, str) + + def resolve_length(self, newlength): + return ArrayType(self.item, newlength) + + def build_backend_type(self, ffi, finishlist): + if self.length_is_unknown(): + raise CDefError("cannot render the type %r: unknown length" % + (self,)) + self.item.get_cached_btype(ffi, finishlist) # force the item BType + BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist) + return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length) + +char_array_type = ArrayType(PrimitiveType('char'), None) + + +class StructOrUnionOrEnum(BaseTypeByIdentity): + _attrs_ = ('name',) + forcename = None + + def build_c_name_with_marker(self): + name = self.forcename or '%s %s' % (self.kind, self.name) + self.c_name_with_marker = name + '&' + + def force_the_name(self, forcename): + self.forcename = forcename + self.build_c_name_with_marker() + + def get_official_name(self): + assert self.c_name_with_marker.endswith('&') + return self.c_name_with_marker[:-1] + + +class StructOrUnion(StructOrUnionOrEnum): + fixedlayout = None + completed = 0 + partial = False + packed = 0 + + def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None): + self.name = name + self.fldnames = fldnames + self.fldtypes = fldtypes + self.fldbitsize = fldbitsize + self.fldquals = fldquals + self.build_c_name_with_marker() + + def anonymous_struct_fields(self): + if self.fldtypes is not None: + for name, type in zip(self.fldnames, self.fldtypes): + if name == '' and isinstance(type, StructOrUnion): + yield type + + def enumfields(self, expand_anonymous_struct_union=True): + fldquals = self.fldquals + if fldquals is None: + fldquals = (0,) * len(self.fldnames) + for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes, + self.fldbitsize, fldquals): + if (name == '' and isinstance(type, StructOrUnion) + and expand_anonymous_struct_union): + # nested anonymous struct/union + for result in type.enumfields(): + yield result + else: + yield (name, type, bitsize, quals) + + def force_flatten(self): + # force the struct or union to have a declaration that lists + # directly all fields returned by enumfields(), flattening + # nested anonymous structs/unions. + names = [] + types = [] + bitsizes = [] + fldquals = [] + for name, type, bitsize, quals in self.enumfields(): + names.append(name) + types.append(type) + bitsizes.append(bitsize) + fldquals.append(quals) + self.fldnames = tuple(names) + self.fldtypes = tuple(types) + self.fldbitsize = tuple(bitsizes) + self.fldquals = tuple(fldquals) + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist, + can_delay) + if not can_delay: + self.finish_backend_type(ffi, finishlist) + return BType + + def finish_backend_type(self, ffi, finishlist): + if self.completed: + if self.completed != 2: + raise NotImplementedError("recursive structure declaration " + "for '%s'" % (self.name,)) + return + BType = ffi._cached_btypes[self] + # + self.completed = 1 + # + if self.fldtypes is None: + pass # not completing it: it's an opaque struct + # + elif self.fixedlayout is None: + fldtypes = [tp.get_cached_btype(ffi, finishlist) + for tp in self.fldtypes] + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize)) + extra_flags = () + if self.packed: + if self.packed == 1: + extra_flags = (8,) # SF_PACKED + else: + extra_flags = (0, self.packed) + ffi._backend.complete_struct_or_union(BType, lst, self, + -1, -1, *extra_flags) + # + else: + fldtypes = [] + fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout + for i in range(len(self.fldnames)): + fsize = fieldsize[i] + ftype = self.fldtypes[i] + # + if isinstance(ftype, ArrayType) and ftype.length_is_unknown(): + # fix the length to match the total size + BItemType = ftype.item.get_cached_btype(ffi, finishlist) + nlen, nrest = divmod(fsize, ffi.sizeof(BItemType)) + if nrest != 0: + self._verification_error( + "field '%s.%s' has a bogus size?" % ( + self.name, self.fldnames[i] or '{}')) + ftype = ftype.resolve_length(nlen) + self.fldtypes = (self.fldtypes[:i] + (ftype,) + + self.fldtypes[i+1:]) + # + BFieldType = ftype.get_cached_btype(ffi, finishlist) + if isinstance(ftype, ArrayType) and ftype.length is None: + assert fsize == 0 + else: + bitemsize = ffi.sizeof(BFieldType) + if bitemsize != fsize: + self._verification_error( + "field '%s.%s' is declared as %d bytes, but is " + "really %d bytes" % (self.name, + self.fldnames[i] or '{}', + bitemsize, fsize)) + fldtypes.append(BFieldType) + # + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs)) + ffi._backend.complete_struct_or_union(BType, lst, self, + totalsize, totalalignment) + self.completed = 2 + + def _verification_error(self, msg): + raise VerificationError(msg) + + def check_not_partial(self): + if self.partial and self.fixedlayout is None: + raise VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + finishlist.append(self) + # + return global_cache(self, ffi, 'new_%s_type' % self.kind, + self.get_official_name(), key=self) + + +class StructType(StructOrUnion): + kind = 'struct' + + +class UnionType(StructOrUnion): + kind = 'union' + + +class EnumType(StructOrUnionOrEnum): + kind = 'enum' + partial = False + partial_resolved = False + + def __init__(self, name, enumerators, enumvalues, baseinttype=None): + self.name = name + self.enumerators = enumerators + self.enumvalues = enumvalues + self.baseinttype = baseinttype + self.build_c_name_with_marker() + + def force_the_name(self, forcename): + StructOrUnionOrEnum.force_the_name(self, forcename) + if self.forcename is None: + name = self.get_official_name() + self.forcename = '$' + name.replace(' ', '_') + + def check_not_partial(self): + if self.partial and not self.partial_resolved: + raise VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + base_btype = self.build_baseinttype(ffi, finishlist) + return global_cache(self, ffi, 'new_enum_type', + self.get_official_name(), + self.enumerators, self.enumvalues, + base_btype, key=self) + + def build_baseinttype(self, ffi, finishlist): + if self.baseinttype is not None: + return self.baseinttype.get_cached_btype(ffi, finishlist) + # + if self.enumvalues: + smallest_value = min(self.enumvalues) + largest_value = max(self.enumvalues) + else: + import warnings + try: + # XXX! The goal is to ensure that the warnings.warn() + # will not suppress the warning. We want to get it + # several times if we reach this point several times. + __warningregistry__.clear() + except NameError: + pass + warnings.warn("%r has no values explicitly defined; " + "guessing that it is equivalent to 'unsigned int'" + % self._get_c_name()) + smallest_value = largest_value = 0 + if smallest_value < 0: # needs a signed type + sign = 1 + candidate1 = PrimitiveType("int") + candidate2 = PrimitiveType("long") + else: + sign = 0 + candidate1 = PrimitiveType("unsigned int") + candidate2 = PrimitiveType("unsigned long") + btype1 = candidate1.get_cached_btype(ffi, finishlist) + btype2 = candidate2.get_cached_btype(ffi, finishlist) + size1 = ffi.sizeof(btype1) + size2 = ffi.sizeof(btype2) + if (smallest_value >= ((-1) << (8*size1-1)) and + largest_value < (1 << (8*size1-sign))): + return btype1 + if (smallest_value >= ((-1) << (8*size2-1)) and + largest_value < (1 << (8*size2-sign))): + return btype2 + raise CDefError("%s values don't all fit into either 'long' " + "or 'unsigned long'" % self._get_c_name()) + +def unknown_type(name, structname=None): + if structname is None: + structname = '$%s' % name + tp = StructType(structname, None, None, None) + tp.force_the_name(name) + tp.origin = "unknown_type" + return tp + +def unknown_ptr_type(name, structname=None): + if structname is None: + structname = '$$%s' % name + tp = StructType(structname, None, None, None) + return NamedPointerType(tp, name) + + +global_lock = allocate_lock() +_typecache_cffi_backend = weakref.WeakValueDictionary() + +def get_typecache(backend): + # returns _typecache_cffi_backend if backend is the _cffi_backend + # module, or type(backend).__typecache if backend is an instance of + # CTypesBackend (or some FakeBackend class during tests) + if isinstance(backend, types.ModuleType): + return _typecache_cffi_backend + with global_lock: + if not hasattr(type(backend), '__typecache'): + type(backend).__typecache = weakref.WeakValueDictionary() + return type(backend).__typecache + +def global_cache(srctype, ffi, funcname, *args, **kwds): + key = kwds.pop('key', (funcname, args)) + assert not kwds + try: + return ffi._typecache[key] + except KeyError: + pass + try: + res = getattr(ffi._backend, funcname)(*args) + except NotImplementedError as e: + raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e)) + # note that setdefault() on WeakValueDictionary is not atomic + # and contains a rare bug (http://bugs.python.org/issue19542); + # we have to use a lock and do it ourselves + cache = ffi._typecache + with global_lock: + res1 = cache.get(key) + if res1 is None: + cache[key] = res + return res + else: + return res1 + +def pointer_cache(ffi, BType): + return global_cache('?', ffi, 'new_pointer_type', BType) + +def attach_exception_info(e, name): + if e.args and type(e.args[0]) is str: + e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:] diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/parse_c_type.h b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/parse_c_type.h new file mode 100644 index 000000000..84e4ef856 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/parse_c_type.h @@ -0,0 +1,181 @@ + +/* This part is from file 'cffi/parse_c_type.h'. It is copied at the + beginning of C sources generated by CFFI's ffi.set_source(). */ + +typedef void *_cffi_opcode_t; + +#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8)) +#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode) +#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8) + +#define _CFFI_OP_PRIMITIVE 1 +#define _CFFI_OP_POINTER 3 +#define _CFFI_OP_ARRAY 5 +#define _CFFI_OP_OPEN_ARRAY 7 +#define _CFFI_OP_STRUCT_UNION 9 +#define _CFFI_OP_ENUM 11 +#define _CFFI_OP_FUNCTION 13 +#define _CFFI_OP_FUNCTION_END 15 +#define _CFFI_OP_NOOP 17 +#define _CFFI_OP_BITFIELD 19 +#define _CFFI_OP_TYPENAME 21 +#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs +#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs +#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg) +#define _CFFI_OP_CONSTANT 29 +#define _CFFI_OP_CONSTANT_INT 31 +#define _CFFI_OP_GLOBAL_VAR 33 +#define _CFFI_OP_DLOPEN_FUNC 35 +#define _CFFI_OP_DLOPEN_CONST 37 +#define _CFFI_OP_GLOBAL_VAR_F 39 +#define _CFFI_OP_EXTERN_PYTHON 41 + +#define _CFFI_PRIM_VOID 0 +#define _CFFI_PRIM_BOOL 1 +#define _CFFI_PRIM_CHAR 2 +#define _CFFI_PRIM_SCHAR 3 +#define _CFFI_PRIM_UCHAR 4 +#define _CFFI_PRIM_SHORT 5 +#define _CFFI_PRIM_USHORT 6 +#define _CFFI_PRIM_INT 7 +#define _CFFI_PRIM_UINT 8 +#define _CFFI_PRIM_LONG 9 +#define _CFFI_PRIM_ULONG 10 +#define _CFFI_PRIM_LONGLONG 11 +#define _CFFI_PRIM_ULONGLONG 12 +#define _CFFI_PRIM_FLOAT 13 +#define _CFFI_PRIM_DOUBLE 14 +#define _CFFI_PRIM_LONGDOUBLE 15 + +#define _CFFI_PRIM_WCHAR 16 +#define _CFFI_PRIM_INT8 17 +#define _CFFI_PRIM_UINT8 18 +#define _CFFI_PRIM_INT16 19 +#define _CFFI_PRIM_UINT16 20 +#define _CFFI_PRIM_INT32 21 +#define _CFFI_PRIM_UINT32 22 +#define _CFFI_PRIM_INT64 23 +#define _CFFI_PRIM_UINT64 24 +#define _CFFI_PRIM_INTPTR 25 +#define _CFFI_PRIM_UINTPTR 26 +#define _CFFI_PRIM_PTRDIFF 27 +#define _CFFI_PRIM_SIZE 28 +#define _CFFI_PRIM_SSIZE 29 +#define _CFFI_PRIM_INT_LEAST8 30 +#define _CFFI_PRIM_UINT_LEAST8 31 +#define _CFFI_PRIM_INT_LEAST16 32 +#define _CFFI_PRIM_UINT_LEAST16 33 +#define _CFFI_PRIM_INT_LEAST32 34 +#define _CFFI_PRIM_UINT_LEAST32 35 +#define _CFFI_PRIM_INT_LEAST64 36 +#define _CFFI_PRIM_UINT_LEAST64 37 +#define _CFFI_PRIM_INT_FAST8 38 +#define _CFFI_PRIM_UINT_FAST8 39 +#define _CFFI_PRIM_INT_FAST16 40 +#define _CFFI_PRIM_UINT_FAST16 41 +#define _CFFI_PRIM_INT_FAST32 42 +#define _CFFI_PRIM_UINT_FAST32 43 +#define _CFFI_PRIM_INT_FAST64 44 +#define _CFFI_PRIM_UINT_FAST64 45 +#define _CFFI_PRIM_INTMAX 46 +#define _CFFI_PRIM_UINTMAX 47 +#define _CFFI_PRIM_FLOATCOMPLEX 48 +#define _CFFI_PRIM_DOUBLECOMPLEX 49 +#define _CFFI_PRIM_CHAR16 50 +#define _CFFI_PRIM_CHAR32 51 + +#define _CFFI__NUM_PRIM 52 +#define _CFFI__UNKNOWN_PRIM (-1) +#define _CFFI__UNKNOWN_FLOAT_PRIM (-2) +#define _CFFI__UNKNOWN_LONG_DOUBLE (-3) + +#define _CFFI__IO_FILE_STRUCT (-1) + + +struct _cffi_global_s { + const char *name; + void *address; + _cffi_opcode_t type_op; + void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown + // OP_CPYTHON_BLTN_*: addr of direct function +}; + +struct _cffi_getconst_s { + unsigned long long value; + const struct _cffi_type_context_s *ctx; + int gindex; +}; + +struct _cffi_struct_union_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_STRUCT_UNION + int flags; // _CFFI_F_* flags below + size_t size; + int alignment; + int first_field_index; // -> _cffi_fields array + int num_fields; +}; +#define _CFFI_F_UNION 0x01 // is a union, not a struct +#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the + // "standard layout" or if some are missing +#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct +#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include() +#define _CFFI_F_OPAQUE 0x10 // opaque + +struct _cffi_field_s { + const char *name; + size_t field_offset; + size_t field_size; + _cffi_opcode_t field_type_op; +}; + +struct _cffi_enum_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_ENUM + int type_prim; // _CFFI_PRIM_xxx + const char *enumerators; // comma-delimited string +}; + +struct _cffi_typename_s { + const char *name; + int type_index; /* if opaque, points to a possibly artificial + OP_STRUCT which is itself opaque */ +}; + +struct _cffi_type_context_s { + _cffi_opcode_t *types; + const struct _cffi_global_s *globals; + const struct _cffi_field_s *fields; + const struct _cffi_struct_union_s *struct_unions; + const struct _cffi_enum_s *enums; + const struct _cffi_typename_s *typenames; + int num_globals; + int num_struct_unions; + int num_enums; + int num_typenames; + const char *const *includes; + int num_types; + int flags; /* future extension */ +}; + +struct _cffi_parse_info_s { + const struct _cffi_type_context_s *ctx; + _cffi_opcode_t *output; + unsigned int output_size; + size_t error_location; + const char *error_message; +}; + +struct _cffi_externpy_s { + const char *name; + size_t size_of_result; + void *reserved1, *reserved2; +}; + +#ifdef _CFFI_INTERNAL +static int parse_c_type(struct _cffi_parse_info_s *info, const char *input); +static int search_in_globals(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +static int search_in_struct_unions(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +#endif diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/pkgconfig.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/pkgconfig.py new file mode 100644 index 000000000..5c93f15a6 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/pkgconfig.py @@ -0,0 +1,121 @@ +# pkg-config, https://www.freedesktop.org/wiki/Software/pkg-config/ integration for cffi +import sys, os, subprocess + +from .error import PkgConfigError + + +def merge_flags(cfg1, cfg2): + """Merge values from cffi config flags cfg2 to cf1 + + Example: + merge_flags({"libraries": ["one"]}, {"libraries": ["two"]}) + {"libraries": ["one", "two"]} + """ + for key, value in cfg2.items(): + if key not in cfg1: + cfg1[key] = value + else: + if not isinstance(cfg1[key], list): + raise TypeError("cfg1[%r] should be a list of strings" % (key,)) + if not isinstance(value, list): + raise TypeError("cfg2[%r] should be a list of strings" % (key,)) + cfg1[key].extend(value) + return cfg1 + + +def call(libname, flag, encoding=sys.getfilesystemencoding()): + """Calls pkg-config and returns the output if found + """ + a = ["pkg-config", "--print-errors"] + a.append(flag) + a.append(libname) + try: + pc = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + except EnvironmentError as e: + raise PkgConfigError("cannot run pkg-config: %s" % (str(e).strip(),)) + + bout, berr = pc.communicate() + if pc.returncode != 0: + try: + berr = berr.decode(encoding) + except Exception: + pass + raise PkgConfigError(berr.strip()) + + if sys.version_info >= (3,) and not isinstance(bout, str): # Python 3.x + try: + bout = bout.decode(encoding) + except UnicodeDecodeError: + raise PkgConfigError("pkg-config %s %s returned bytes that cannot " + "be decoded with encoding %r:\n%r" % + (flag, libname, encoding, bout)) + + if os.altsep != '\\' and '\\' in bout: + raise PkgConfigError("pkg-config %s %s returned an unsupported " + "backslash-escaped output:\n%r" % + (flag, libname, bout)) + return bout + + +def flags_from_pkgconfig(libs): + r"""Return compiler line flags for FFI.set_source based on pkg-config output + + Usage + ... + ffibuilder.set_source("_foo", pkgconfig = ["libfoo", "libbar >= 1.8.3"]) + + If pkg-config is installed on build machine, then arguments include_dirs, + library_dirs, libraries, define_macros, extra_compile_args and + extra_link_args are extended with an output of pkg-config for libfoo and + libbar. + + Raises PkgConfigError in case the pkg-config call fails. + """ + + def get_include_dirs(string): + return [x[2:] for x in string.split() if x.startswith("-I")] + + def get_library_dirs(string): + return [x[2:] for x in string.split() if x.startswith("-L")] + + def get_libraries(string): + return [x[2:] for x in string.split() if x.startswith("-l")] + + # convert -Dfoo=bar to list of tuples [("foo", "bar")] expected by distutils + def get_macros(string): + def _macro(x): + x = x[2:] # drop "-D" + if '=' in x: + return tuple(x.split("=", 1)) # "-Dfoo=bar" => ("foo", "bar") + else: + return (x, None) # "-Dfoo" => ("foo", None) + return [_macro(x) for x in string.split() if x.startswith("-D")] + + def get_other_cflags(string): + return [x for x in string.split() if not x.startswith("-I") and + not x.startswith("-D")] + + def get_other_libs(string): + return [x for x in string.split() if not x.startswith("-L") and + not x.startswith("-l")] + + # return kwargs for given libname + def kwargs(libname): + fse = sys.getfilesystemencoding() + all_cflags = call(libname, "--cflags") + all_libs = call(libname, "--libs") + return { + "include_dirs": get_include_dirs(all_cflags), + "library_dirs": get_library_dirs(all_libs), + "libraries": get_libraries(all_libs), + "define_macros": get_macros(all_cflags), + "extra_compile_args": get_other_cflags(all_cflags), + "extra_link_args": get_other_libs(all_libs), + } + + # merge all arguments together + ret = {} + for libname in libs: + lib_flags = kwargs(libname) + merge_flags(ret, lib_flags) + return ret diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/recompiler.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/recompiler.py new file mode 100644 index 000000000..4167bc05f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/recompiler.py @@ -0,0 +1,1581 @@ +import os, sys, io +from . import ffiplatform, model +from .error import VerificationError +from .cffi_opcode import * + +VERSION_BASE = 0x2601 +VERSION_EMBEDDED = 0x2701 +VERSION_CHAR16CHAR32 = 0x2801 + +USE_LIMITED_API = (sys.platform != 'win32' or sys.version_info < (3, 0) or + sys.version_info >= (3, 5)) + + +class GlobalExpr: + def __init__(self, name, address, type_op, size=0, check_value=0): + self.name = name + self.address = address + self.type_op = type_op + self.size = size + self.check_value = check_value + + def as_c_expr(self): + return ' { "%s", (void *)%s, %s, (void *)%s },' % ( + self.name, self.address, self.type_op.as_c_expr(), self.size) + + def as_python_expr(self): + return "b'%s%s',%d" % (self.type_op.as_python_bytes(), self.name, + self.check_value) + +class FieldExpr: + def __init__(self, name, field_offset, field_size, fbitsize, field_type_op): + self.name = name + self.field_offset = field_offset + self.field_size = field_size + self.fbitsize = fbitsize + self.field_type_op = field_type_op + + def as_c_expr(self): + spaces = " " * len(self.name) + return (' { "%s", %s,\n' % (self.name, self.field_offset) + + ' %s %s,\n' % (spaces, self.field_size) + + ' %s %s },' % (spaces, self.field_type_op.as_c_expr())) + + def as_python_expr(self): + raise NotImplementedError + + def as_field_python_expr(self): + if self.field_type_op.op == OP_NOOP: + size_expr = '' + elif self.field_type_op.op == OP_BITFIELD: + size_expr = format_four_bytes(self.fbitsize) + else: + raise NotImplementedError + return "b'%s%s%s'" % (self.field_type_op.as_python_bytes(), + size_expr, + self.name) + +class StructUnionExpr: + def __init__(self, name, type_index, flags, size, alignment, comment, + first_field_index, c_fields): + self.name = name + self.type_index = type_index + self.flags = flags + self.size = size + self.alignment = alignment + self.comment = comment + self.first_field_index = first_field_index + self.c_fields = c_fields + + def as_c_expr(self): + return (' { "%s", %d, %s,' % (self.name, self.type_index, self.flags) + + '\n %s, %s, ' % (self.size, self.alignment) + + '%d, %d ' % (self.first_field_index, len(self.c_fields)) + + ('/* %s */ ' % self.comment if self.comment else '') + + '},') + + def as_python_expr(self): + flags = eval(self.flags, G_FLAGS) + fields_expr = [c_field.as_field_python_expr() + for c_field in self.c_fields] + return "(b'%s%s%s',%s)" % ( + format_four_bytes(self.type_index), + format_four_bytes(flags), + self.name, + ','.join(fields_expr)) + +class EnumExpr: + def __init__(self, name, type_index, size, signed, allenums): + self.name = name + self.type_index = type_index + self.size = size + self.signed = signed + self.allenums = allenums + + def as_c_expr(self): + return (' { "%s", %d, _cffi_prim_int(%s, %s),\n' + ' "%s" },' % (self.name, self.type_index, + self.size, self.signed, self.allenums)) + + def as_python_expr(self): + prim_index = { + (1, 0): PRIM_UINT8, (1, 1): PRIM_INT8, + (2, 0): PRIM_UINT16, (2, 1): PRIM_INT16, + (4, 0): PRIM_UINT32, (4, 1): PRIM_INT32, + (8, 0): PRIM_UINT64, (8, 1): PRIM_INT64, + }[self.size, self.signed] + return "b'%s%s%s\\x00%s'" % (format_four_bytes(self.type_index), + format_four_bytes(prim_index), + self.name, self.allenums) + +class TypenameExpr: + def __init__(self, name, type_index): + self.name = name + self.type_index = type_index + + def as_c_expr(self): + return ' { "%s", %d },' % (self.name, self.type_index) + + def as_python_expr(self): + return "b'%s%s'" % (format_four_bytes(self.type_index), self.name) + + +# ____________________________________________________________ + + +class Recompiler: + _num_externpy = 0 + + def __init__(self, ffi, module_name, target_is_python=False): + self.ffi = ffi + self.module_name = module_name + self.target_is_python = target_is_python + self._version = VERSION_BASE + + def needs_version(self, ver): + self._version = max(self._version, ver) + + def collect_type_table(self): + self._typesdict = {} + self._generate("collecttype") + # + all_decls = sorted(self._typesdict, key=str) + # + # prepare all FUNCTION bytecode sequences first + self.cffi_types = [] + for tp in all_decls: + if tp.is_raw_function: + assert self._typesdict[tp] is None + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + for tp1 in tp.args: + assert isinstance(tp1, (model.VoidType, + model.BasePrimitiveType, + model.PointerType, + model.StructOrUnionOrEnum, + model.FunctionPtrType)) + if self._typesdict[tp1] is None: + self._typesdict[tp1] = len(self.cffi_types) + self.cffi_types.append(tp1) # placeholder + self.cffi_types.append('END') # placeholder + # + # prepare all OTHER bytecode sequences + for tp in all_decls: + if not tp.is_raw_function and self._typesdict[tp] is None: + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + if tp.is_array_type and tp.length is not None: + self.cffi_types.append('LEN') # placeholder + assert None not in self._typesdict.values() + # + # collect all structs and unions and enums + self._struct_unions = {} + self._enums = {} + for tp in all_decls: + if isinstance(tp, model.StructOrUnion): + self._struct_unions[tp] = None + elif isinstance(tp, model.EnumType): + self._enums[tp] = None + for i, tp in enumerate(sorted(self._struct_unions, + key=lambda tp: tp.name)): + self._struct_unions[tp] = i + for i, tp in enumerate(sorted(self._enums, + key=lambda tp: tp.name)): + self._enums[tp] = i + # + # emit all bytecode sequences now + for tp in all_decls: + method = getattr(self, '_emit_bytecode_' + tp.__class__.__name__) + method(tp, self._typesdict[tp]) + # + # consistency check + for op in self.cffi_types: + assert isinstance(op, CffiOp) + self.cffi_types = tuple(self.cffi_types) # don't change any more + + def _enum_fields(self, tp): + # When producing C, expand all anonymous struct/union fields. + # That's necessary to have C code checking the offsets of the + # individual fields contained in them. When producing Python, + # don't do it and instead write it like it is, with the + # corresponding fields having an empty name. Empty names are + # recognized at runtime when we import the generated Python + # file. + expand_anonymous_struct_union = not self.target_is_python + return tp.enumfields(expand_anonymous_struct_union) + + def _do_collect_type(self, tp): + if not isinstance(tp, model.BaseTypeByIdentity): + if isinstance(tp, tuple): + for x in tp: + self._do_collect_type(x) + return + if tp not in self._typesdict: + self._typesdict[tp] = None + if isinstance(tp, model.FunctionPtrType): + self._do_collect_type(tp.as_raw_function()) + elif isinstance(tp, model.StructOrUnion): + if tp.fldtypes is not None and ( + tp not in self.ffi._parser._included_declarations): + for name1, tp1, _, _ in self._enum_fields(tp): + self._do_collect_type(self._field_type(tp, name1, tp1)) + else: + for _, x in tp._get_items(): + self._do_collect_type(x) + + def _generate(self, step_name): + lst = self.ffi._parser._declarations.items() + for name, (tp, quals) in sorted(lst): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in recompile(): %r" % name) + try: + self._current_quals = quals + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + # ---------- + + ALL_STEPS = ["global", "field", "struct_union", "enum", "typename"] + + def collect_step_tables(self): + # collect the declarations for '_cffi_globals', '_cffi_typenames', etc. + self._lsts = {} + for step_name in self.ALL_STEPS: + self._lsts[step_name] = [] + self._seen_struct_unions = set() + self._generate("ctx") + self._add_missing_struct_unions() + # + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if step_name != "field": + lst.sort(key=lambda entry: entry.name) + self._lsts[step_name] = tuple(lst) # don't change any more + # + # check for a possible internal inconsistency: _cffi_struct_unions + # should have been generated with exactly self._struct_unions + lst = self._lsts["struct_union"] + for tp, i in self._struct_unions.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._struct_unions) + # same with enums + lst = self._lsts["enum"] + for tp, i in self._enums.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._enums) + + # ---------- + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self, f, preamble): + if self.target_is_python: + assert preamble is None + self.write_py_source_to_f(f) + else: + assert preamble is not None + self.write_c_source_to_f(f, preamble) + + def _rel_readlines(self, filename): + g = open(os.path.join(os.path.dirname(__file__), filename), 'r') + lines = g.readlines() + g.close() + return lines + + def write_c_source_to_f(self, f, preamble): + self._f = f + prnt = self._prnt + if self.ffi._embedding is not None: + prnt('#define _CFFI_USE_EMBEDDING') + if not USE_LIMITED_API: + prnt('#define _CFFI_NO_LIMITED_API') + # + # first the '#include' (actually done by inlining the file's content) + lines = self._rel_readlines('_cffi_include.h') + i = lines.index('#include "parse_c_type.h"\n') + lines[i:i+1] = self._rel_readlines('parse_c_type.h') + prnt(''.join(lines)) + # + # if we have ffi._embedding != None, we give it here as a macro + # and include an extra file + base_module_name = self.module_name.split('.')[-1] + if self.ffi._embedding is not None: + prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,)) + prnt('static const char _CFFI_PYTHON_STARTUP_CODE[] = {') + self._print_string_literal_in_array(self.ffi._embedding) + prnt('0 };') + prnt('#ifdef PYPY_VERSION') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % ( + base_module_name,)) + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % ( + base_module_name,)) + prnt('#else') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC init%s' % ( + base_module_name,)) + prnt('#endif') + lines = self._rel_readlines('_embedding.h') + i = lines.index('#include "_cffi_errors.h"\n') + lines[i:i+1] = self._rel_readlines('_cffi_errors.h') + prnt(''.join(lines)) + self.needs_version(VERSION_EMBEDDED) + # + # then paste the C source given by the user, verbatim. + prnt('/************************************************************/') + prnt() + prnt(preamble) + prnt() + prnt('/************************************************************/') + prnt() + # + # the declaration of '_cffi_types' + prnt('static void *_cffi_types[] = {') + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + for i, op in enumerate(self.cffi_types): + comment = '' + if i in typeindex2type: + comment = ' // ' + typeindex2type[i]._get_c_name() + prnt('/* %2d */ %s,%s' % (i, op.as_c_expr(), comment)) + if not self.cffi_types: + prnt(' 0') + prnt('};') + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._seen_constants = set() + self._generate("decl") + # + # the declaration of '_cffi_globals' and '_cffi_typenames' + nums = {} + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + nums[step_name] = len(lst) + if nums[step_name] > 0: + prnt('static const struct _cffi_%s_s _cffi_%ss[] = {' % ( + step_name, step_name)) + for entry in lst: + prnt(entry.as_c_expr()) + prnt('};') + prnt() + # + # the declaration of '_cffi_includes' + if self.ffi._included_ffis: + prnt('static const char * const _cffi_includes[] = {') + for ffi_to_include in self.ffi._included_ffis: + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is None: + raise VerificationError( + "not implemented yet: ffi.include() of a Python-based " + "ffi inside a C-based ffi") + prnt(' "%s",' % (included_module_name,)) + prnt(' NULL') + prnt('};') + prnt() + # + # the declaration of '_cffi_type_context' + prnt('static const struct _cffi_type_context_s _cffi_type_context = {') + prnt(' _cffi_types,') + for step_name in self.ALL_STEPS: + if nums[step_name] > 0: + prnt(' _cffi_%ss,' % step_name) + else: + prnt(' NULL, /* no %ss */' % step_name) + for step_name in self.ALL_STEPS: + if step_name != "field": + prnt(' %d, /* num_%ss */' % (nums[step_name], step_name)) + if self.ffi._included_ffis: + prnt(' _cffi_includes,') + else: + prnt(' NULL, /* no includes */') + prnt(' %d, /* num_types */' % (len(self.cffi_types),)) + flags = 0 + if self._num_externpy > 0 or self.ffi._embedding is not None: + flags |= 1 # set to mean that we use extern "Python" + prnt(' %d, /* flags */' % flags) + prnt('};') + prnt() + # + # the init function + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility push(default) /* for -fvisibility= */') + prnt('#endif') + prnt() + prnt('#ifdef PYPY_VERSION') + prnt('PyMODINIT_FUNC') + prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,)) + prnt('{') + if flags & 1: + prnt(' if (((intptr_t)p[0]) >= 0x0A03) {') + prnt(' _cffi_call_python_org = ' + '(void(*)(struct _cffi_externpy_s *, char *))p[1];') + prnt(' }') + prnt(' p[0] = (const void *)0x%x;' % self._version) + prnt(' p[1] = &_cffi_type_context;') + prnt('#if PY_MAJOR_VERSION >= 3') + prnt(' return NULL;') + prnt('#endif') + prnt('}') + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + prnt('# ifdef _MSC_VER') + prnt(' PyMODINIT_FUNC') + prnt('# if PY_MAJOR_VERSION >= 3') + prnt(' PyInit_%s(void) { return NULL; }' % (base_module_name,)) + prnt('# else') + prnt(' init%s(void) { }' % (base_module_name,)) + prnt('# endif') + prnt('# endif') + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % (base_module_name,)) + prnt('{') + prnt(' return _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( + self.module_name, self._version)) + prnt('}') + prnt('#else') + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % (base_module_name,)) + prnt('{') + prnt(' _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( + self.module_name, self._version)) + prnt('}') + prnt('#endif') + prnt() + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility pop') + prnt('#endif') + self._version = None + + def _to_py(self, x): + if isinstance(x, str): + return "b'%s'" % (x,) + if isinstance(x, (list, tuple)): + rep = [self._to_py(item) for item in x] + if len(rep) == 1: + rep.append('') + return "(%s)" % (','.join(rep),) + return x.as_python_expr() # Py2: unicode unexpected; Py3: bytes unexp. + + def write_py_source_to_f(self, f): + self._f = f + prnt = self._prnt + # + # header + prnt("# auto-generated file") + prnt("import _cffi_backend") + # + # the 'import' of the included ffis + num_includes = len(self.ffi._included_ffis or ()) + for i in range(num_includes): + ffi_to_include = self.ffi._included_ffis[i] + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is not None: + raise VerificationError( + "not implemented yet: ffi.include() of a C-based " + "ffi inside a Python-based ffi") + prnt('from %s import ffi as _ffi%d' % (included_module_name, i)) + prnt() + prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,)) + prnt(" _version = 0x%x," % (self._version,)) + self._version = None + # + # the '_types' keyword argument + self.cffi_types = tuple(self.cffi_types) # don't change any more + types_lst = [op.as_python_bytes() for op in self.cffi_types] + prnt(' _types = %s,' % (self._to_py(''.join(types_lst)),)) + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + # + # the keyword arguments from ALL_STEPS + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if len(lst) > 0 and step_name != "field": + prnt(' _%ss = %s,' % (step_name, self._to_py(lst))) + # + # the '_includes' keyword argument + if num_includes > 0: + prnt(' _includes = (%s,),' % ( + ', '.join(['_ffi%d' % i for i in range(num_includes)]),)) + # + # the footer + prnt(')') + + # ---------- + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.BasePrimitiveType) and not tp.is_complex_type(): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + elif isinstance(tp, model.UnknownFloatType): + # don't check with is_float_type(): it may be a 'long + # double' here, and _cffi_to_c_double would loose precision + converter = '(%s)_cffi_to_c_double' % (tp.get_c_name(''),) + else: + cname = tp.get_c_name('') + converter = '(%s)_cffi_to_c_%s' % (cname, + tp.name.replace(' ', '_')) + if cname in ('char16_t', 'char32_t'): + self.needs_version(VERSION_CHAR16CHAR32) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif (isinstance(tp, model.StructOrUnionOrEnum) or + isinstance(tp, model.BasePrimitiveType)): + # a struct (not a struct pointer) as a function argument; + # or, a complex (the same code works) + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars, freelines): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + localvars.add('struct _cffi_freeme_s *large_args_free = NULL') + freelines.add('if (large_args_free != NULL)' + ' _cffi_free_array_arguments(large_args_free);') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' %s = ((size_t)datasize) <= 640 ? ' + '(%s)alloca((size_t)datasize) : NULL;' % ( + tovar, tp.get_c_name(''))) + self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' + '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) + self._prnt(' datasize, &large_args_free) < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.BasePrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif isinstance(tp, model.UnknownFloatType): + return '_cffi_from_c_double(%s)' % (var,) + elif tp.name != 'long double' and not tp.is_complex_type(): + cname = tp.name.replace(' ', '_') + if cname in ('char16_t', 'char32_t'): + self.needs_version(VERSION_CHAR16CHAR32) + return '_cffi_from_c_%s(%s)' % (cname, var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructOrUnion): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs + + def _typedef_type(self, tp, name): + return self._global_type(tp, "(*(%s *)0)" % (name,)) + + def _generate_cpy_typedef_collecttype(self, tp, name): + self._do_collect_type(self._typedef_type(tp, name)) + + def _generate_cpy_typedef_decl(self, tp, name): + pass + + def _typedef_ctx(self, tp, name): + type_index = self._typesdict[tp] + self._lsts["typename"].append(TypenameExpr(name, type_index)) + + def _generate_cpy_typedef_ctx(self, tp, name): + tp = self._typedef_type(tp, name) + self._typedef_ctx(tp, name) + if getattr(tp, "origin", None) == "unknown_type": + self._struct_ctx(tp, tp.name, approxname=None) + elif isinstance(tp, model.NamedPointerType): + self._struct_ctx(tp.totype, tp.totype.name, approxname=tp.name, + named_ptr=tp) + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + self._do_collect_type(tp.as_raw_function()) + if tp.ellipsis and not self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_function_decl(self, tp, name): + assert not self.target_is_python + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_constant_decl(tp, name) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + # + # ------------------------------ + # the 'd' version of the function, only for addressof(lib, 'func') + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arguments.append(type.get_c_name(' x%d' % i, context)) + call_arguments.append('x%d' % i) + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + name_and_arguments = '%s_cffi_d_%s(%s)' % (abi, name, repr_arguments) + prnt('static %s' % (tp.result.get_c_name(name_and_arguments),)) + prnt('{') + call_arguments = ', '.join(call_arguments) + result_code = 'return ' + if isinstance(tp.result, model.VoidType): + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, call_arguments)) + prnt('}') + # + prnt('#ifndef PYPY_VERSION') # ------------------------------ + # + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' x%d' % i, context) + prnt(' %s;' % arg) + # + localvars = set() + freelines = set() + for type in tp.args: + self._extra_local_variables(type, localvars, freelines) + for decl in sorted(localvars): + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + result_decl = ' %s;' % tp.result.get_c_name(' result', context) + prnt(result_decl) + prnt(' PyObject *pyresult;') + else: + result_decl = None + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_UnpackTuple(args, "%s", %d, %d, %s))' % ( + name, len(rng), len(rng), + ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + call_arguments = ['x%d' % i for i in range(len(tp.args))] + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' pyresult = %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + for freeline in freelines: + prnt(' ' + freeline) + prnt(' return pyresult;') + else: + for freeline in freelines: + prnt(' ' + freeline) + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + # + prnt('#else') # ------------------------------ + # + # the PyPy version: need to replace struct/union arguments with + # pointers, and if the result is a struct/union, insert a first + # arg that is a pointer to the result. We also do that for + # complex args and return type. + def need_indirection(type): + return (isinstance(type, model.StructOrUnion) or + (isinstance(type, model.PrimitiveType) and + type.is_complex_type())) + difference = False + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + indirection = '' + if need_indirection(type): + indirection = '*' + difference = True + arg = type.get_c_name(' %sx%d' % (indirection, i), context) + arguments.append(arg) + call_arguments.append('%sx%d' % (indirection, i)) + tp_result = tp.result + if need_indirection(tp_result): + context = 'result of %s' % name + arg = tp_result.get_c_name(' *result', context) + arguments.insert(0, arg) + tp_result = model.void_type + result_decl = None + result_code = '*result = ' + difference = True + if difference: + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s_cffi_f_%s(%s)' % (abi, name, + repr_arguments) + prnt('static %s' % (tp_result.get_c_name(name_and_arguments),)) + prnt('{') + if result_decl: + prnt(result_decl) + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + if result_decl: + prnt(' return result;') + prnt('}') + else: + prnt('# define _cffi_f_%s _cffi_d_%s' % (name, name)) + # + prnt('#endif') # ------------------------------ + prnt() + + def _generate_cpy_function_ctx(self, tp, name): + if tp.ellipsis and not self.target_is_python: + self._generate_cpy_constant_ctx(tp, name) + return + type_index = self._typesdict[tp.as_raw_function()] + numargs = len(tp.args) + if self.target_is_python: + meth_kind = OP_DLOPEN_FUNC + elif numargs == 0: + meth_kind = OP_CPYTHON_BLTN_N # 'METH_NOARGS' + elif numargs == 1: + meth_kind = OP_CPYTHON_BLTN_O # 'METH_O' + else: + meth_kind = OP_CPYTHON_BLTN_V # 'METH_VARARGS' + self._lsts["global"].append( + GlobalExpr(name, '_cffi_f_%s' % name, + CffiOp(meth_kind, type_index), + size='_cffi_d_%s' % name)) + + # ---------- + # named structs or unions + + def _field_type(self, tp_struct, field_name, tp_field): + if isinstance(tp_field, model.ArrayType): + actual_length = tp_field.length + if actual_length == '...': + ptr_struct_name = tp_struct.get_c_name('*') + actual_length = '_cffi_array_len(((%s)0)->%s)' % ( + ptr_struct_name, field_name) + tp_item = self._field_type(tp_struct, '%s[0]' % field_name, + tp_field.item) + tp_field = model.ArrayType(tp_item, actual_length) + return tp_field + + def _struct_collecttype(self, tp): + self._do_collect_type(tp) + if self.target_is_python: + # also requires nested anon struct/unions in ABI mode, recursively + for fldtype in tp.anonymous_struct_fields(): + self._struct_collecttype(fldtype) + + def _struct_decl(self, tp, cname, approxname): + if tp.fldtypes is None: + return + prnt = self._prnt + checkfuncname = '_cffi_checkfld_%s' % (approxname,) + prnt('_CFFI_UNUSED_FN') + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in self._enum_fields(tp): + try: + if ftype.is_integer_type() or fbitsize >= 0: + # accept all integers, but complain on float or double + if fname != '': + prnt(" (void)((p->%s) | 0); /* check that '%s.%s' is " + "an integer */" % (fname, cname, fname)) + continue + # only accept exactly the type declared, except that '[]' + # is interpreted as a '*' and so will match any array length. + # (It would also match '*', but that's harder to detect...) + while (isinstance(ftype, model.ArrayType) + and (ftype.length is None or ftype.length == '...')): + ftype = ftype.item + fname = fname + '[0]' + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname)) + prnt() + + def _struct_ctx(self, tp, cname, approxname, named_ptr=None): + type_index = self._typesdict[tp] + reason_for_not_expanding = None + flags = [] + if isinstance(tp, model.UnionType): + flags.append("_CFFI_F_UNION") + if tp.fldtypes is None: + flags.append("_CFFI_F_OPAQUE") + reason_for_not_expanding = "opaque" + if (tp not in self.ffi._parser._included_declarations and + (named_ptr is None or + named_ptr not in self.ffi._parser._included_declarations)): + if tp.fldtypes is None: + pass # opaque + elif tp.partial or any(tp.anonymous_struct_fields()): + pass # field layout obtained silently from the C compiler + else: + flags.append("_CFFI_F_CHECK_FIELDS") + if tp.packed: + if tp.packed > 1: + raise NotImplementedError( + "%r is declared with 'pack=%r'; only 0 or 1 are " + "supported in API mode (try to use \"...;\", which " + "does not require a 'pack' declaration)" % + (tp, tp.packed)) + flags.append("_CFFI_F_PACKED") + else: + flags.append("_CFFI_F_EXTERNAL") + reason_for_not_expanding = "external" + flags = '|'.join(flags) or '0' + c_fields = [] + if reason_for_not_expanding is None: + enumfields = list(self._enum_fields(tp)) + for fldname, fldtype, fbitsize, fqual in enumfields: + fldtype = self._field_type(tp, fldname, fldtype) + self._check_not_opaque(fldtype, + "field '%s.%s'" % (tp.name, fldname)) + # cname is None for _add_missing_struct_unions() only + op = OP_NOOP + if fbitsize >= 0: + op = OP_BITFIELD + size = '%d /* bits */' % fbitsize + elif cname is None or ( + isinstance(fldtype, model.ArrayType) and + fldtype.length is None): + size = '(size_t)-1' + else: + size = 'sizeof(((%s)0)->%s)' % ( + tp.get_c_name('*') if named_ptr is None + else named_ptr.name, + fldname) + if cname is None or fbitsize >= 0: + offset = '(size_t)-1' + elif named_ptr is not None: + offset = '((char *)&((%s)0)->%s) - (char *)0' % ( + named_ptr.name, fldname) + else: + offset = 'offsetof(%s, %s)' % (tp.get_c_name(''), fldname) + c_fields.append( + FieldExpr(fldname, offset, size, fbitsize, + CffiOp(op, self._typesdict[fldtype]))) + first_field_index = len(self._lsts["field"]) + self._lsts["field"].extend(c_fields) + # + if cname is None: # unknown name, for _add_missing_struct_unions + size = '(size_t)-2' + align = -2 + comment = "unnamed" + else: + if named_ptr is not None: + size = 'sizeof(*(%s)0)' % (named_ptr.name,) + align = '-1 /* unknown alignment */' + else: + size = 'sizeof(%s)' % (cname,) + align = 'offsetof(struct _cffi_align_%s, y)' % (approxname,) + comment = None + else: + size = '(size_t)-1' + align = -1 + first_field_index = -1 + comment = reason_for_not_expanding + self._lsts["struct_union"].append( + StructUnionExpr(tp.name, type_index, flags, size, align, comment, + first_field_index, c_fields)) + self._seen_struct_unions.add(tp) + + def _check_not_opaque(self, tp, location): + while isinstance(tp, model.ArrayType): + tp = tp.item + if isinstance(tp, model.StructOrUnion) and tp.fldtypes is None: + raise TypeError( + "%s is of an opaque type (not declared in cdef())" % location) + + def _add_missing_struct_unions(self): + # not very nice, but some struct declarations might be missing + # because they don't have any known C name. Check that they are + # not partial (we can't complete or verify them!) and emit them + # anonymously. + lst = list(self._struct_unions.items()) + lst.sort(key=lambda tp_order: tp_order[1]) + for tp, order in lst: + if tp not in self._seen_struct_unions: + if tp.partial: + raise NotImplementedError("internal inconsistency: %r is " + "partial but was not seen at " + "this point" % (tp,)) + if tp.name.startswith('$') and tp.name[1:].isdigit(): + approxname = tp.name[1:] + elif tp.name == '_IO_FILE' and tp.forcename == 'FILE': + approxname = 'FILE' + self._typedef_ctx(tp, 'FILE') + else: + raise NotImplementedError("internal inconsistency: %r" % + (tp,)) + self._struct_ctx(tp, None, approxname) + + def _generate_cpy_struct_collecttype(self, tp, name): + self._struct_collecttype(tp) + _generate_cpy_union_collecttype = _generate_cpy_struct_collecttype + + def _struct_names(self, tp): + cname = tp.get_c_name('') + if ' ' in cname: + return cname, cname.replace(' ', '_') + else: + return cname, '_' + cname + + def _generate_cpy_struct_decl(self, tp, name): + self._struct_decl(tp, *self._struct_names(tp)) + _generate_cpy_union_decl = _generate_cpy_struct_decl + + def _generate_cpy_struct_ctx(self, tp, name): + self._struct_ctx(tp, *self._struct_names(tp)) + _generate_cpy_union_ctx = _generate_cpy_struct_ctx + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_cpy_anonymous_collecttype(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_collecttype(tp, name) + else: + self._struct_collecttype(tp) + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp) + else: + self._struct_decl(tp, name, 'typedef_' + name) + + def _generate_cpy_anonymous_ctx(self, tp, name): + if isinstance(tp, model.EnumType): + self._enum_ctx(tp, name) + else: + self._struct_ctx(tp, name, 'typedef_' + name) + + # ---------- + # constants, declared with "static const ..." + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + check_value=None): + if (category, name) in self._seen_constants: + raise VerificationError( + "duplicate declaration of %s '%s'" % (category, name)) + self._seen_constants.add((category, name)) + # + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + if is_int: + prnt('static int %s(unsigned long long *o)' % funcname) + prnt('{') + prnt(' int n = (%s) <= 0;' % (name,)) + prnt(' *o = (unsigned long long)((%s) | 0);' + ' /* check that %s is an integer */' % (name, name)) + if check_value is not None: + if check_value > 0: + check_value = '%dU' % (check_value,) + prnt(' if (!_cffi_check_int(*o, n, %s))' % (check_value,)) + prnt(' n |= 2;') + prnt(' return n;') + prnt('}') + else: + assert check_value is None + prnt('static void %s(char *o)' % funcname) + prnt('{') + prnt(' *(%s)o = %s;' % (tp.get_c_name('*'), name)) + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = tp.is_integer_type() + if not is_int or self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + def _generate_cpy_constant_ctx(self, tp, name): + if not self.target_is_python and tp.is_integer_type(): + type_op = CffiOp(OP_CONSTANT_INT, -1) + else: + if self.target_is_python: + const_kind = OP_DLOPEN_CONST + else: + const_kind = OP_CONSTANT + type_index = self._typesdict[tp] + type_op = CffiOp(const_kind, type_index) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op)) + + # ---------- + # enums + + def _generate_cpy_enum_collecttype(self, tp, name): + self._do_collect_type(tp) + + def _generate_cpy_enum_decl(self, tp, name=None): + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator) + + def _enum_ctx(self, tp, cname): + type_index = self._typesdict[tp] + type_op = CffiOp(OP_ENUM, -1) + if self.target_is_python: + tp.check_not_partial() + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._lsts["global"].append( + GlobalExpr(enumerator, '_cffi_const_%s' % enumerator, type_op, + check_value=enumvalue)) + # + if cname is not None and '$' not in cname and not self.target_is_python: + size = "sizeof(%s)" % cname + signed = "((%s)-1) <= 0" % cname + else: + basetp = tp.build_baseinttype(self.ffi, []) + size = self.ffi.sizeof(basetp) + signed = int(int(self.ffi.cast(basetp, -1)) < 0) + allenums = ",".join(tp.enumerators) + self._lsts["enum"].append( + EnumExpr(tp.name, type_index, size, signed, allenums)) + + def _generate_cpy_enum_ctx(self, tp, name): + self._enum_ctx(tp, tp._get_c_name()) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_collecttype(self, tp, name): + pass + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + def _generate_cpy_macro_ctx(self, tp, name): + if tp == '...': + if self.target_is_python: + raise VerificationError( + "cannot use the syntax '...' in '#define %s ...' when " + "using the ABI mode" % (name,)) + check_value = None + else: + check_value = tp # an integer + type_op = CffiOp(OP_CONSTANT_INT, -1) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op, + check_value=check_value)) + + # ---------- + # global variables + + def _global_type(self, tp, global_name): + if isinstance(tp, model.ArrayType): + actual_length = tp.length + if actual_length == '...': + actual_length = '_cffi_array_len(%s)' % (global_name,) + tp_item = self._global_type(tp.item, '%s[0]' % global_name) + tp = model.ArrayType(tp_item, actual_length) + return tp + + def _generate_cpy_variable_collecttype(self, tp, name): + self._do_collect_type(self._global_type(tp, name)) + + def _generate_cpy_variable_decl(self, tp, name): + prnt = self._prnt + tp = self._global_type(tp, name) + if isinstance(tp, model.ArrayType) and tp.length is None: + tp = tp.item + ampersand = '' + else: + ampersand = '&' + # This code assumes that casts from "tp *" to "void *" is a + # no-op, i.e. a function that returns a "tp *" can be called + # as if it returned a "void *". This should be generally true + # on any modern machine. The only exception to that rule (on + # uncommon architectures, and as far as I can tell) might be + # if 'tp' were a function type, but that is not possible here. + # (If 'tp' is a function _pointer_ type, then casts from "fn_t + # **" to "void *" are again no-ops, as far as I can tell.) + decl = '*_cffi_var_%s(void)' % (name,) + prnt('static ' + tp.get_c_name(decl, quals=self._current_quals)) + prnt('{') + prnt(' return %s(%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_cpy_variable_ctx(self, tp, name): + tp = self._global_type(tp, name) + type_index = self._typesdict[tp] + if self.target_is_python: + op = OP_GLOBAL_VAR + else: + op = OP_GLOBAL_VAR_F + self._lsts["global"].append( + GlobalExpr(name, '_cffi_var_%s' % name, CffiOp(op, type_index))) + + # ---------- + # extern "Python" + + def _generate_cpy_extern_python_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + self._do_collect_type(tp) + _generate_cpy_dllexport_python_collecttype = \ + _generate_cpy_extern_python_plus_c_collecttype = \ + _generate_cpy_extern_python_collecttype + + def _extern_python_decl(self, tp, name, tag_and_space): + prnt = self._prnt + if isinstance(tp.result, model.VoidType): + size_of_result = '0' + else: + context = 'result of %s' % name + size_of_result = '(int)sizeof(%s)' % ( + tp.result.get_c_name('', context),) + prnt('static struct _cffi_externpy_s _cffi_externpy__%s =' % name) + prnt(' { "%s.%s", %s, 0, 0 };' % ( + self.module_name, name, size_of_result)) + prnt() + # + arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' a%d' % i, context) + arguments.append(arg) + # + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments + # + def may_need_128_bits(tp): + return (isinstance(tp, model.PrimitiveType) and + tp.name == 'long double') + # + size_of_a = max(len(tp.args)*8, 8) + if may_need_128_bits(tp.result): + size_of_a = max(size_of_a, 16) + if isinstance(tp.result, model.StructOrUnion): + size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % ( + tp.result.get_c_name(''), size_of_a, + tp.result.get_c_name(''), size_of_a) + prnt('%s%s' % (tag_and_space, tp.result.get_c_name(name_and_arguments))) + prnt('{') + prnt(' char a[%s];' % size_of_a) + prnt(' char *p = a;') + for i, type in enumerate(tp.args): + arg = 'a%d' % i + if (isinstance(type, model.StructOrUnion) or + may_need_128_bits(type)): + arg = '&' + arg + type = model.PointerType(type) + prnt(' *(%s)(p + %d) = %s;' % (type.get_c_name('*'), i*8, arg)) + prnt(' _cffi_call_python(&_cffi_externpy__%s, p);' % name) + if not isinstance(tp.result, model.VoidType): + prnt(' return *(%s)p;' % (tp.result.get_c_name('*'),)) + prnt('}') + prnt() + self._num_externpy += 1 + + def _generate_cpy_extern_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'static ') + + def _generate_cpy_dllexport_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'CFFI_DLLEXPORT ') + + def _generate_cpy_extern_python_plus_c_decl(self, tp, name): + self._extern_python_decl(tp, name, '') + + def _generate_cpy_extern_python_ctx(self, tp, name): + if self.target_is_python: + raise VerificationError( + "cannot use 'extern \"Python\"' in the ABI mode") + if tp.ellipsis: + raise NotImplementedError("a vararg function is extern \"Python\"") + type_index = self._typesdict[tp] + type_op = CffiOp(OP_EXTERN_PYTHON, type_index) + self._lsts["global"].append( + GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name)) + + _generate_cpy_dllexport_python_ctx = \ + _generate_cpy_extern_python_plus_c_ctx = \ + _generate_cpy_extern_python_ctx + + def _print_string_literal_in_array(self, s): + prnt = self._prnt + prnt('// # NB. this is not a string because of a size limit in MSVC') + if not isinstance(s, bytes): # unicode + s = s.encode('utf-8') # -> bytes + else: + s.decode('utf-8') # got bytes, check for valid utf-8 + try: + s.decode('ascii') + except UnicodeDecodeError: + s = b'# -*- encoding: utf8 -*-\n' + s + for line in s.splitlines(True): + comment = line + if type('//') is bytes: # python2 + line = map(ord, line) # make a list of integers + else: # python3 + # type(line) is bytes, which enumerates like a list of integers + comment = ascii(comment)[1:-1] + prnt(('// ' + comment).rstrip()) + printed_line = '' + for c in line: + if len(printed_line) >= 76: + prnt(printed_line) + printed_line = '' + printed_line += '%d,' % (c,) + prnt(printed_line) + + # ---------- + # emitting the opcodes for individual types + + def _emit_bytecode_VoidType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, PRIM_VOID) + + def _emit_bytecode_PrimitiveType(self, tp, index): + prim_index = PRIMITIVE_TO_INDEX[tp.name] + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, prim_index) + + def _emit_bytecode_UnknownIntegerType(self, tp, index): + s = ('_cffi_prim_int(sizeof(%s), (\n' + ' ((%s)-1) | 0 /* check that %s is an integer type */\n' + ' ) <= 0)' % (tp.name, tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_UnknownFloatType(self, tp, index): + s = ('_cffi_prim_float(sizeof(%s) *\n' + ' (((%s)1) / 2) * 2 /* integer => 0, float => 1 */\n' + ' )' % (tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_RawFunctionType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_FUNCTION, self._typesdict[tp.result]) + index += 1 + for tp1 in tp.args: + realindex = self._typesdict[tp1] + if index != realindex: + if isinstance(tp1, model.PrimitiveType): + self._emit_bytecode_PrimitiveType(tp1, index) + else: + self.cffi_types[index] = CffiOp(OP_NOOP, realindex) + index += 1 + flags = int(tp.ellipsis) + if tp.abi is not None: + if tp.abi == '__stdcall': + flags |= 2 + else: + raise NotImplementedError("abi=%r" % (tp.abi,)) + self.cffi_types[index] = CffiOp(OP_FUNCTION_END, flags) + + def _emit_bytecode_PointerType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[tp.totype]) + + _emit_bytecode_ConstPointerType = _emit_bytecode_PointerType + _emit_bytecode_NamedPointerType = _emit_bytecode_PointerType + + def _emit_bytecode_FunctionPtrType(self, tp, index): + raw = tp.as_raw_function() + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[raw]) + + def _emit_bytecode_ArrayType(self, tp, index): + item_index = self._typesdict[tp.item] + if tp.length is None: + self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index) + elif tp.length == '...': + raise VerificationError( + "type %s badly placed: the '...' array length can only be " + "used on global arrays or on fields of structures" % ( + str(tp).replace('/*...*/', '...'),)) + else: + assert self.cffi_types[index + 1] == 'LEN' + self.cffi_types[index] = CffiOp(OP_ARRAY, item_index) + self.cffi_types[index + 1] = CffiOp(None, str(tp.length)) + + def _emit_bytecode_StructType(self, tp, index): + struct_index = self._struct_unions[tp] + self.cffi_types[index] = CffiOp(OP_STRUCT_UNION, struct_index) + _emit_bytecode_UnionType = _emit_bytecode_StructType + + def _emit_bytecode_EnumType(self, tp, index): + enum_index = self._enums[tp] + self.cffi_types[index] = CffiOp(OP_ENUM, enum_index) + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + +def _make_c_or_py_source(ffi, module_name, preamble, target_file, verbose): + if verbose: + print("generating %s" % (target_file,)) + recompiler = Recompiler(ffi, module_name, + target_is_python=(preamble is None)) + recompiler.collect_type_table() + recompiler.collect_step_tables() + f = NativeIO() + recompiler.write_source_to_f(f, preamble) + output = f.getvalue() + try: + with open(target_file, 'r') as f1: + if f1.read(len(output) + 1) != output: + raise IOError + if verbose: + print("(already up-to-date)") + return False # already up-to-date + except IOError: + tmp_file = '%s.~%d' % (target_file, os.getpid()) + with open(tmp_file, 'w') as f1: + f1.write(output) + try: + os.rename(tmp_file, target_file) + except OSError: + os.unlink(target_file) + os.rename(tmp_file, target_file) + return True + +def make_c_source(ffi, module_name, preamble, target_c_file, verbose=False): + assert preamble is not None + return _make_c_or_py_source(ffi, module_name, preamble, target_c_file, + verbose) + +def make_py_source(ffi, module_name, target_py_file, verbose=False): + return _make_c_or_py_source(ffi, module_name, None, target_py_file, + verbose) + +def _modname_to_file(outputdir, modname, extension): + parts = modname.split('.') + try: + os.makedirs(os.path.join(outputdir, *parts[:-1])) + except OSError: + pass + parts[-1] += extension + return os.path.join(outputdir, *parts), parts + + +# Aaargh. Distutils is not tested at all for the purpose of compiling +# DLLs that are not extension modules. Here are some hacks to work +# around that, in the _patch_for_*() functions... + +def _patch_meth(patchlist, cls, name, new_meth): + old = getattr(cls, name) + patchlist.append((cls, name, old)) + setattr(cls, name, new_meth) + return old + +def _unpatch_meths(patchlist): + for cls, name, old_meth in reversed(patchlist): + setattr(cls, name, old_meth) + +def _patch_for_embedding(patchlist): + if sys.platform == 'win32': + # we must not remove the manifest when building for embedding! + from cffi._shimmed_dist_utils import MSVCCompiler + _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', + lambda self, manifest_file: manifest_file) + + if sys.platform == 'darwin': + # we must not make a '-bundle', but a '-dynamiclib' instead + from cffi._shimmed_dist_utils import CCompiler + def my_link_shared_object(self, *args, **kwds): + if '-bundle' in self.linker_so: + self.linker_so = list(self.linker_so) + i = self.linker_so.index('-bundle') + self.linker_so[i] = '-dynamiclib' + return old_link_shared_object(self, *args, **kwds) + old_link_shared_object = _patch_meth(patchlist, CCompiler, + 'link_shared_object', + my_link_shared_object) + +def _patch_for_target(patchlist, target): + from cffi._shimmed_dist_utils import build_ext + # if 'target' is different from '*', we need to patch some internal + # method to just return this 'target' value, instead of having it + # built from module_name + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + elif sys.platform == 'darwin': + target += '.dylib' + else: + target += '.so' + _patch_meth(patchlist, build_ext, 'get_ext_filename', + lambda self, ext_name: target) + + +def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, + c_file=None, source_extension='.c', extradir=None, + compiler_verbose=1, target=None, debug=None, **kwds): + if not isinstance(module_name, str): + module_name = module_name.encode('ascii') + if ffi._windows_unicode: + ffi._apply_windows_unicode(kwds) + if preamble is not None: + embedding = (ffi._embedding is not None) + if embedding: + ffi._apply_embedding_fix(kwds) + if c_file is None: + c_file, parts = _modname_to_file(tmpdir, module_name, + source_extension) + if extradir: + parts = [extradir] + parts + ext_c_file = os.path.join(*parts) + else: + ext_c_file = c_file + # + if target is None: + if embedding: + target = '%s.*' % module_name + else: + target = '*' + # + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) + updated = make_c_source(ffi, module_name, preamble, c_file, + verbose=compiler_verbose) + if call_c_compiler: + patchlist = [] + cwd = os.getcwd() + try: + if embedding: + _patch_for_embedding(patchlist) + if target != '*': + _patch_for_target(patchlist, target) + if compiler_verbose: + if tmpdir == '.': + msg = 'the current directory is' + else: + msg = 'setting the current directory to' + print('%s %r' % (msg, os.path.abspath(tmpdir))) + os.chdir(tmpdir) + outputfilename = ffiplatform.compile('.', ext, + compiler_verbose, debug) + finally: + os.chdir(cwd) + _unpatch_meths(patchlist) + return outputfilename + else: + return ext, updated + else: + if c_file is None: + c_file, _ = _modname_to_file(tmpdir, module_name, '.py') + updated = make_py_source(ffi, module_name, c_file, + verbose=compiler_verbose) + if call_c_compiler: + return c_file + else: + return None, updated + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/setuptools_ext.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/setuptools_ext.py new file mode 100644 index 000000000..681b49d7a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/setuptools_ext.py @@ -0,0 +1,216 @@ +import os +import sys + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +def error(msg): + from cffi._shimmed_dist_utils import DistutilsSetupError + raise DistutilsSetupError(msg) + + +def execfile(filename, glob): + # We use execfile() (here rewritten for Python 3) instead of + # __import__() to load the build script. The problem with + # a normal import is that in some packages, the intermediate + # __init__.py files may already try to import the file that + # we are generating. + with open(filename) as f: + src = f.read() + src += '\n' # Python 2.6 compatibility + code = compile(src, filename, 'exec') + exec(code, glob, glob) + + +def add_cffi_module(dist, mod_spec): + from cffi.api import FFI + + if not isinstance(mod_spec, basestring): + error("argument to 'cffi_modules=...' must be a str or a list of str," + " not %r" % (type(mod_spec).__name__,)) + mod_spec = str(mod_spec) + try: + build_file_name, ffi_var_name = mod_spec.split(':') + except ValueError: + error("%r must be of the form 'path/build.py:ffi_variable'" % + (mod_spec,)) + if not os.path.exists(build_file_name): + ext = '' + rewritten = build_file_name.replace('.', '/') + '.py' + if os.path.exists(rewritten): + ext = ' (rewrite cffi_modules to [%r])' % ( + rewritten + ':' + ffi_var_name,) + error("%r does not name an existing file%s" % (build_file_name, ext)) + + mod_vars = {'__name__': '__cffi__', '__file__': build_file_name} + execfile(build_file_name, mod_vars) + + try: + ffi = mod_vars[ffi_var_name] + except KeyError: + error("%r: object %r not found in module" % (mod_spec, + ffi_var_name)) + if not isinstance(ffi, FFI): + ffi = ffi() # maybe it's a function instead of directly an ffi + if not isinstance(ffi, FFI): + error("%r is not an FFI instance (got %r)" % (mod_spec, + type(ffi).__name__)) + if not hasattr(ffi, '_assigned_source'): + error("%r: the set_source() method was not called" % (mod_spec,)) + module_name, source, source_extension, kwds = ffi._assigned_source + if ffi._windows_unicode: + kwds = kwds.copy() + ffi._apply_windows_unicode(kwds) + + if source is None: + _add_py_module(dist, ffi, module_name) + else: + _add_c_module(dist, ffi, module_name, source, source_extension, kwds) + +def _set_py_limited_api(Extension, kwds): + """ + Add py_limited_api to kwds if setuptools >= 26 is in use. + Do not alter the setting if it already exists. + Setuptools takes care of ignoring the flag on Python 2 and PyPy. + + CPython itself should ignore the flag in a debugging version + (by not listing .abi3.so in the extensions it supports), but + it doesn't so far, creating troubles. That's why we check + for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent + of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401) + + On Windows, with CPython <= 3.4, it's better not to use py_limited_api + because virtualenv *still* doesn't copy PYTHON3.DLL on these versions. + Recently (2020) we started shipping only >= 3.5 wheels, though. So + we'll give it another try and set py_limited_api on Windows >= 3.5. + """ + from cffi import recompiler + + if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount') + and recompiler.USE_LIMITED_API): + import setuptools + try: + setuptools_major_version = int(setuptools.__version__.partition('.')[0]) + if setuptools_major_version >= 26: + kwds['py_limited_api'] = True + except ValueError: # certain development versions of setuptools + # If we don't know the version number of setuptools, we + # try to set 'py_limited_api' anyway. At worst, we get a + # warning. + kwds['py_limited_api'] = True + return kwds + +def _add_c_module(dist, ffi, module_name, source, source_extension, kwds): + # We are a setuptools extension. Need this build_ext for py_limited_api. + from setuptools.command.build_ext import build_ext + from cffi._shimmed_dist_utils import Extension, log, mkpath + from cffi import recompiler + + allsources = ['$PLACEHOLDER'] + allsources.extend(kwds.pop('sources', [])) + kwds = _set_py_limited_api(Extension, kwds) + ext = Extension(name=module_name, sources=allsources, **kwds) + + def make_mod(tmpdir, pre_run=None): + c_file = os.path.join(tmpdir, module_name + source_extension) + log.info("generating cffi module %r" % c_file) + mkpath(tmpdir) + # a setuptools-only, API-only hook: called with the "ext" and "ffi" + # arguments just before we turn the ffi into C code. To use it, + # subclass the 'distutils.command.build_ext.build_ext' class and + # add a method 'def pre_run(self, ext, ffi)'. + if pre_run is not None: + pre_run(ext, ffi) + updated = recompiler.make_c_source(ffi, module_name, source, c_file) + if not updated: + log.info("already up-to-date") + return c_file + + if dist.ext_modules is None: + dist.ext_modules = [] + dist.ext_modules.append(ext) + + base_class = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class): + def run(self): + if ext.sources[0] == '$PLACEHOLDER': + pre_run = getattr(self, 'pre_run', None) + ext.sources[0] = make_mod(self.build_temp, pre_run) + base_class.run(self) + dist.cmdclass['build_ext'] = build_ext_make_mod + # NB. multiple runs here will create multiple 'build_ext_make_mod' + # classes. Even in this case the 'build_ext' command should be + # run once; but just in case, the logic above does nothing if + # called again. + + +def _add_py_module(dist, ffi, module_name): + from setuptools.command.build_py import build_py + from setuptools.command.build_ext import build_ext + from cffi._shimmed_dist_utils import log, mkpath + from cffi import recompiler + + def generate_mod(py_file): + log.info("generating cffi module %r" % py_file) + mkpath(os.path.dirname(py_file)) + updated = recompiler.make_py_source(ffi, module_name, py_file) + if not updated: + log.info("already up-to-date") + + base_class = dist.cmdclass.get('build_py', build_py) + class build_py_make_mod(base_class): + def run(self): + base_class.run(self) + module_path = module_name.split('.') + module_path[-1] += '.py' + generate_mod(os.path.join(self.build_lib, *module_path)) + def get_source_files(self): + # This is called from 'setup.py sdist' only. Exclude + # the generate .py module in this case. + saved_py_modules = self.py_modules + try: + if saved_py_modules: + self.py_modules = [m for m in saved_py_modules + if m != module_name] + return base_class.get_source_files(self) + finally: + self.py_modules = saved_py_modules + dist.cmdclass['build_py'] = build_py_make_mod + + # distutils and setuptools have no notion I could find of a + # generated python module. If we don't add module_name to + # dist.py_modules, then things mostly work but there are some + # combination of options (--root and --record) that will miss + # the module. So we add it here, which gives a few apparently + # harmless warnings about not finding the file outside the + # build directory. + # Then we need to hack more in get_source_files(); see above. + if dist.py_modules is None: + dist.py_modules = [] + dist.py_modules.append(module_name) + + # the following is only for "build_ext -i" + base_class_2 = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class_2): + def run(self): + base_class_2.run(self) + if self.inplace: + # from get_ext_fullpath() in distutils/command/build_ext.py + module_path = module_name.split('.') + package = '.'.join(module_path[:-1]) + build_py = self.get_finalized_command('build_py') + package_dir = build_py.get_package_dir(package) + file_name = module_path[-1] + '.py' + generate_mod(os.path.join(package_dir, file_name)) + dist.cmdclass['build_ext'] = build_ext_make_mod + +def cffi_modules(dist, attr, value): + assert attr == 'cffi_modules' + if isinstance(value, basestring): + value = [value] + + for cffi_module in value: + add_cffi_module(dist, cffi_module) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/vengine_cpy.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/vengine_cpy.py new file mode 100644 index 000000000..49727d36e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/vengine_cpy.py @@ -0,0 +1,1077 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys +from . import model +from .error import VerificationError +from . import _imp_emulation as imp + + +class VCPythonEngine(object): + _class_key = 'x' + _gen_python_module = True + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self._struct_pending_verification = {} + self._types_of_builtin_functions = {} + + def patch_extension_kwds(self, kwds): + pass + + def find_module(self, module_name, path, so_suffixes): + try: + f, filename, descr = imp.find_module(module_name, path) + except ImportError: + return None + if f is not None: + f.close() + # Note that after a setuptools installation, there are both .py + # and .so files with the same basename. The code here relies on + # imp.find_module() locating the .so in priority. + if descr[0] not in so_suffixes: + return None + return filename + + def collect_types(self): + self._typesdict = {} + self._generate("collecttype") + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _do_collect_type(self, tp): + if ((not isinstance(tp, model.PrimitiveType) + or tp.name == 'long double') + and tp not in self._typesdict): + num = len(self._typesdict) + self._typesdict[tp] = num + + def write_source_to_f(self): + self.collect_types() + # + # The new module will have a _cffi_setup() function that receives + # objects from the ffi world, and that calls some setup code in + # the module. This setup code is split in several independent + # functions, e.g. one per constant. The functions are "chained" + # by ending in a tail call to each other. + # + # This is further split in two chained lists, depending on if we + # can do it at import-time or if we must wait for _cffi_setup() to + # provide us with the objects. This is needed because we + # need the values of the enum constants in order to build the + # that we may have to pass to _cffi_setup(). + # + # The following two 'chained_list_constants' items contains + # the head of these two chained lists, as a string that gives the + # call to do, if any. + self._chained_list_constants = ['((void)lib,0)', '((void)lib,0)'] + # + prnt = self._prnt + # first paste some standard set of lines that are mostly '#define' + prnt(cffimod_header) + prnt() + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate("decl") + # + # implement the function _cffi_setup_custom() as calling the + # head of the chained list. + self._generate_setup_custom() + prnt() + # + # produce the method table, including the entries for the + # generated Python->C function wrappers, which are done + # by generate_cpy_function_method(). + prnt('static PyMethodDef _cffi_methods[] = {') + self._generate("method") + prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS, NULL},') + prnt(' {NULL, NULL, 0, NULL} /* Sentinel */') + prnt('};') + prnt() + # + # standard init. + modname = self.verifier.get_module_name() + constants = self._chained_list_constants[False] + prnt('#if PY_MAJOR_VERSION >= 3') + prnt() + prnt('static struct PyModuleDef _cffi_module_def = {') + prnt(' PyModuleDef_HEAD_INIT,') + prnt(' "%s",' % modname) + prnt(' NULL,') + prnt(' -1,') + prnt(' _cffi_methods,') + prnt(' NULL, NULL, NULL, NULL') + prnt('};') + prnt() + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = PyModule_Create(&_cffi_module_def);') + prnt(' if (lib == NULL)') + prnt(' return NULL;') + prnt(' if (%s < 0 || _cffi_init() < 0) {' % (constants,)) + prnt(' Py_DECREF(lib);') + prnt(' return NULL;') + prnt(' }') + prnt(' return lib;') + prnt('}') + prnt() + prnt('#else') + prnt() + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = Py_InitModule("%s", _cffi_methods);' % modname) + prnt(' if (lib == NULL)') + prnt(' return;') + prnt(' if (%s < 0 || _cffi_init() < 0)' % (constants,)) + prnt(' return;') + prnt(' return;') + prnt('}') + prnt() + prnt('#endif') + + def load_library(self, flags=None): + # XXX review all usages of 'self' here! + # import it as a new extension module + imp.acquire_lock() + try: + if hasattr(sys, "getdlopenflags"): + previous_flags = sys.getdlopenflags() + try: + if hasattr(sys, "setdlopenflags") and flags is not None: + sys.setdlopenflags(flags) + module = imp.load_dynamic(self.verifier.get_module_name(), + self.verifier.modulefilename) + except ImportError as e: + error = "importing %r: %s" % (self.verifier.modulefilename, e) + raise VerificationError(error) + finally: + if hasattr(sys, "setdlopenflags"): + sys.setdlopenflags(previous_flags) + finally: + imp.release_lock() + # + # call loading_cpy_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + # + # the C code will need the objects. Collect them in + # order in a list. + revmapping = dict([(value, key) + for (key, value) in self._typesdict.items()]) + lst = [revmapping[i] for i in range(len(revmapping))] + lst = list(map(self.ffi._get_cached_btype, lst)) + # + # build the FFILibrary class and instance and call _cffi_setup(). + # this will set up some fields like '_cffi_types', and only then + # it will invoke the chained list of functions that will really + # build (notably) the constant objects, as if they are + # pointers, and store them as attributes on the 'library' object. + class FFILibrary(object): + _cffi_python_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + list(self.__dict__) + library = FFILibrary() + if module._cffi_setup(lst, VerificationError, library): + import warnings + warnings.warn("reimporting %r might overwrite older definitions" + % (self.verifier.get_module_name())) + # + # finally, call the loaded_cpy_xxx() functions. This will perform + # the final adjustments, like copying the Python->C wrapper + # functions from the module to the 'library' object, and setting + # up the FFILibrary class with properties for the global C variables. + self._load(module, 'loaded', library=library) + module._cffi_original_ffi = self.ffi + module._cffi_types_of_builtin_funcs = self._types_of_builtin_functions + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_cpy_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + else: + converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''), + tp.name.replace(' ', '_')) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif isinstance(tp, (model.StructOrUnion, model.EnumType)): + # a struct (not a struct pointer) as a function argument + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars, freelines): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + localvars.add('struct _cffi_freeme_s *large_args_free = NULL') + freelines.add('if (large_args_free != NULL)' + ' _cffi_free_array_arguments(large_args_free);') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' %s = ((size_t)datasize) <= 640 ? ' + 'alloca((size_t)datasize) : NULL;' % (tovar,)) + self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' + '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) + self._prnt(' datasize, &large_args_free) < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif tp.name != 'long double': + return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructOrUnion): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs: generates no code so far + + _generate_cpy_typedef_collecttype = _generate_nothing + _generate_cpy_typedef_decl = _generate_nothing + _generate_cpy_typedef_method = _generate_nothing + _loading_cpy_typedef = _loaded_noop + _loaded_cpy_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + self._do_collect_type(tp) + else: + # don't call _do_collect_type(tp) in this common case, + # otherwise test_autofilled_struct_as_argument fails + for type in tp.args: + self._do_collect_type(type) + self._do_collect_type(tp.result) + + def _generate_cpy_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + prnt(' %s;' % type.get_c_name(' x%d' % i, context)) + # + localvars = set() + freelines = set() + for type in tp.args: + self._extra_local_variables(type, localvars, freelines) + for decl in sorted(localvars): + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + prnt(' %s;' % tp.result.get_c_name(' result', context)) + prnt(' PyObject *pyresult;') + else: + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_ParseTuple(args, "%s:%s", %s))' % ( + 'O' * numargs, name, ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + prnt(' { %s%s(%s); }' % ( + result_code, name, + ', '.join(['x%d' % i for i in range(len(tp.args))]))) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' pyresult = %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + for freeline in freelines: + prnt(' ' + freeline) + prnt(' return pyresult;') + else: + for freeline in freelines: + prnt(' ' + freeline) + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + prnt() + + def _generate_cpy_function_method(self, tp, name): + if tp.ellipsis: + return + numargs = len(tp.args) + if numargs == 0: + meth = 'METH_NOARGS' + elif numargs == 1: + meth = 'METH_O' + else: + meth = 'METH_VARARGS' + self._prnt(' {"%s", _cffi_f_%s, %s, NULL},' % (name, name, meth)) + + _loading_cpy_function = _loaded_noop + + def _loaded_cpy_function(self, tp, name, module, library): + if tp.ellipsis: + return + func = getattr(module, name) + setattr(library, name, func) + self._types_of_builtin_functions[func] = tp + + # ---------- + # named structs + + _generate_cpy_struct_collecttype = _generate_nothing + def _generate_cpy_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + def _generate_cpy_struct_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'struct', name) + def _loading_cpy_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + def _loaded_cpy_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + _generate_cpy_union_collecttype = _generate_nothing + def _generate_cpy_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + def _generate_cpy_union_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'union', name) + def _loading_cpy_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + def _loaded_cpy_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('static PyObject *') + prnt('%s(PyObject *self, PyObject *noarg)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static Py_ssize_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' (void)self; /* unused */') + prnt(' (void)noarg; /* unused */') + prnt(' return _cffi_get_struct_layout(nums);') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _generate_struct_or_union_method(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + self._prnt(' {"%s", %s, METH_NOARGS, NULL},' % (layoutfuncname, + layoutfuncname)) + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + function = getattr(module, layoutfuncname) + layout = function() + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + _generate_cpy_anonymous_collecttype = _generate_nothing + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _generate_cpy_anonymous_method(self, tp, name): + if not isinstance(tp, model.EnumType): + self._generate_struct_or_union_method(tp, '', name) + + def _loading_cpy_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_cpy_enum(tp, name, module) + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_cpy_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_cpy_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + vartp=None, delayed=True, size_too=False, + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + prnt(' PyObject *o;') + prnt(' int res;') + if not is_int: + prnt(' %s;' % (vartp or tp).get_c_name(' i', name)) + else: + assert category == 'const' + # + if check_value is not None: + self._check_int_constant_value(name, check_value) + # + if not is_int: + if category == 'var': + realexpr = '&' + name + else: + realexpr = name + prnt(' i = (%s);' % (realexpr,)) + prnt(' o = %s;' % (self._convert_expr_from_c(tp, 'i', + 'variable type'),)) + assert delayed + else: + prnt(' o = _cffi_from_c_int_const(%s);' % name) + prnt(' if (o == NULL)') + prnt(' return -1;') + if size_too: + prnt(' {') + prnt(' PyObject *o1 = o;') + prnt(' o = Py_BuildValue("On", o1, (Py_ssize_t)sizeof(%s));' + % (name,)) + prnt(' Py_DECREF(o1);') + prnt(' if (o == NULL)') + prnt(' return -1;') + prnt(' }') + prnt(' res = PyObject_SetAttrString(lib, "%s", o);' % name) + prnt(' Py_DECREF(o);') + prnt(' if (res < 0)') + prnt(' return -1;') + prnt(' return %s;' % self._chained_list_constants[delayed]) + self._chained_list_constants[delayed] = funcname + '(lib)' + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + if not is_int: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + _generate_cpy_constant_method = _generate_nothing + _loading_cpy_constant = _loaded_noop + _loaded_cpy_constant = _loaded_noop + + # ---------- + # enums + + def _check_int_constant_value(self, name, value, err_prefix=''): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' % + name) + prnt(' PyErr_Format(_cffi_VerificationError,') + prnt(' "%s%s has the real value %s, not %s",') + prnt(' "%s", "%s", buf, "%d");' % ( + err_prefix, name, value)) + prnt(' return -1;') + prnt(' }') + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_cpy_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator, delayed=False) + return + # + funcname = self._enum_funcname(prefix, name) + prnt = self._prnt + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue, + "enum %s: " % name) + prnt(' return %s;' % self._chained_list_constants[True]) + self._chained_list_constants[True] = funcname + '(lib)' + prnt('}') + prnt() + + _generate_cpy_enum_collecttype = _generate_nothing + _generate_cpy_enum_method = _generate_nothing + + def _loading_cpy_enum(self, tp, name, module): + if tp.partial: + enumvalues = [getattr(module, enumerator) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + + def _loaded_cpy_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + _generate_cpy_macro_collecttype = _generate_nothing + _generate_cpy_macro_method = _generate_nothing + _loading_cpy_macro = _loaded_noop + _loaded_cpy_macro = _loaded_noop + + # ---------- + # global variables + + def _generate_cpy_variable_collecttype(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + else: + tp_ptr = model.PointerType(tp) + self._do_collect_type(tp_ptr) + + def _generate_cpy_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + self._generate_cpy_const(False, name, tp, vartp=tp_ptr, + size_too = tp.length_is_unknown()) + else: + tp_ptr = model.PointerType(tp) + self._generate_cpy_const(False, name, tp_ptr, category='var') + + _generate_cpy_variable_method = _generate_nothing + _loading_cpy_variable = _loaded_noop + + def _loaded_cpy_variable(self, tp, name, module, library): + value = getattr(library, name) + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length_is_unknown(): + assert isinstance(value, tuple) + (value, size) = value + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + ptr = value + delattr(library, name) + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + + # ---------- + + def _generate_setup_custom(self): + prnt = self._prnt + prnt('static int _cffi_setup_custom(PyObject *lib)') + prnt('{') + prnt(' return %s;' % self._chained_list_constants[True]) + prnt('}') + +cffimod_header = r''' +#include +#include + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +#endif + +#if PY_MAJOR_VERSION < 3 +# undef PyCapsule_CheckExact +# undef PyCapsule_GetPointer +# define PyCapsule_CheckExact(capsule) (PyCObject_Check(capsule)) +# define PyCapsule_GetPointer(capsule, name) \ + (PyCObject_AsVoidPtr(capsule)) +#endif + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong +#define _cffi_from_c__Bool PyBool_FromLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int_const(x) \ + (((x) > 0) ? \ + ((unsigned long long)(x) <= (unsigned long long)LONG_MAX) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromUnsignedLongLong((unsigned long long)(x)) : \ + ((long long)(x) >= (long long)LONG_MIN) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromLongLong((long long)(x))) + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + ((PyObject *(*)(Py_ssize_t[]))_cffi_exports[12]) +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) +#define _CFFI_NUM_EXPORTS 25 + +typedef struct _ctypedescr CTypeDescrObject; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; +static PyObject *_cffi_types, *_cffi_VerificationError; + +static int _cffi_setup_custom(PyObject *lib); /* forward */ + +static PyObject *_cffi_setup(PyObject *self, PyObject *args) +{ + PyObject *library; + int was_alive = (_cffi_types != NULL); + (void)self; /* unused */ + if (!PyArg_ParseTuple(args, "OOO", &_cffi_types, &_cffi_VerificationError, + &library)) + return NULL; + Py_INCREF(_cffi_types); + Py_INCREF(_cffi_VerificationError); + if (_cffi_setup_custom(library) < 0) + return NULL; + return PyBool_FromLong(was_alive); +} + +union _cffi_union_alignment_u { + unsigned char m_char; + unsigned short m_short; + unsigned int m_int; + unsigned long m_long; + unsigned long long m_longlong; + float m_float; + double m_double; + long double m_longdouble; +}; + +struct _cffi_freeme_s { + struct _cffi_freeme_s *next; + union _cffi_union_alignment_u alignment; +}; + +#ifdef __GNUC__ + __attribute__((unused)) +#endif +static int _cffi_convert_array_argument(CTypeDescrObject *ctptr, PyObject *arg, + char **output_data, Py_ssize_t datasize, + struct _cffi_freeme_s **freeme) +{ + char *p; + if (datasize < 0) + return -1; + + p = *output_data; + if (p == NULL) { + struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( + offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); + if (fp == NULL) + return -1; + fp->next = *freeme; + *freeme = fp; + p = *output_data = (char *)&fp->alignment; + } + memset((void *)p, 0, (size_t)datasize); + return _cffi_convert_array_from_object(p, ctptr, arg); +} + +#ifdef __GNUC__ + __attribute__((unused)) +#endif +static void _cffi_free_array_arguments(struct _cffi_freeme_s *freeme) +{ + do { + void *p = (void *)freeme; + freeme = freeme->next; + PyObject_Free(p); + } while (freeme != NULL); +} + +static int _cffi_init(void) +{ + PyObject *module, *c_api_object = NULL; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + c_api_object = PyObject_GetAttrString(module, "_C_API"); + if (c_api_object == NULL) + goto failure; + if (!PyCapsule_CheckExact(c_api_object)) { + PyErr_SetNone(PyExc_ImportError); + goto failure; + } + memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"), + _CFFI_NUM_EXPORTS * sizeof(void *)); + + Py_DECREF(module); + Py_DECREF(c_api_object); + return 0; + + failure: + Py_XDECREF(module); + Py_XDECREF(c_api_object); + return -1; +} + +#define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num)) + +/**********/ +''' diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/vengine_gen.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/vengine_gen.py new file mode 100644 index 000000000..26421526f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/vengine_gen.py @@ -0,0 +1,675 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os +import types + +from . import model +from .error import VerificationError + + +class VGenericEngine(object): + _class_key = 'g' + _gen_python_module = False + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self.export_symbols = [] + self._struct_pending_verification = {} + + def patch_extension_kwds(self, kwds): + # add 'export_symbols' to the dictionary. Note that we add the + # list before filling it. When we fill it, it will thus also show + # up in kwds['export_symbols']. + kwds.setdefault('export_symbols', self.export_symbols) + + def find_module(self, module_name, path, so_suffixes): + for so_suffix in so_suffixes: + basename = module_name + so_suffix + if path is None: + path = sys.path + for dirname in path: + filename = os.path.join(dirname, basename) + if os.path.isfile(filename): + return filename + + def collect_types(self): + pass # not needed in the generic engine + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self): + prnt = self._prnt + # first paste some standard set of lines that are mostly '#include' + prnt(cffimod_header) + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + # + # call generate_gen_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate('decl') + # + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + if sys.platform == 'win32': + if sys.version_info >= (3,): + prefix = 'PyInit_' + else: + prefix = 'init' + modname = self.verifier.get_module_name() + prnt("void %s%s(void) { }\n" % (prefix, modname)) + + def load_library(self, flags=0): + # import it with the CFFI backend + backend = self.ffi._backend + # needs to make a path that contains '/', on Posix + filename = os.path.join(os.curdir, self.verifier.modulefilename) + module = backend.load_library(filename, flags) + # + # call loading_gen_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + + # build the FFILibrary class and instance, this is a module subclass + # because modules are expected to have usually-constant-attributes and + # in PyPy this means the JIT is able to treat attributes as constant, + # which we want. + class FFILibrary(types.ModuleType): + _cffi_generic_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + library = FFILibrary("") + # + # finally, call the loaded_gen_xxx() functions. This will set + # up the 'library' object. + self._load(module, 'loaded', library=library) + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_gen_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_gen_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + # typedefs: generates no code so far + + _generate_gen_typedef_decl = _generate_nothing + _loading_gen_typedef = _loaded_noop + _loaded_gen_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_gen_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no _cffi_f_%s wrapper) + self._generate_gen_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + argnames = [] + for i, type in enumerate(tp.args): + indirection = '' + if isinstance(type, model.StructOrUnion): + indirection = '*' + argnames.append('%sx%d' % (indirection, i)) + context = 'argument of %s' % name + arglist = [type.get_c_name(' %s' % arg, context) + for type, arg in zip(tp.args, argnames)] + tpresult = tp.result + if isinstance(tpresult, model.StructOrUnion): + arglist.insert(0, tpresult.get_c_name(' *r', context)) + tpresult = model.void_type + arglist = ', '.join(arglist) or 'void' + wrappername = '_cffi_f_%s' % name + self.export_symbols.append(wrappername) + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist) + context = 'result of %s' % name + prnt(tpresult.get_c_name(funcdecl, context)) + prnt('{') + # + if isinstance(tp.result, model.StructOrUnion): + result_code = '*r = ' + elif not isinstance(tp.result, model.VoidType): + result_code = 'return ' + else: + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames))) + prnt('}') + prnt() + + _loading_gen_function = _loaded_noop + + def _loaded_gen_function(self, tp, name, module, library): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + newfunction = self._load_constant(False, tp, name, module) + else: + indirections = [] + base_tp = tp + if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args) + or isinstance(tp.result, model.StructOrUnion)): + indirect_args = [] + for i, typ in enumerate(tp.args): + if isinstance(typ, model.StructOrUnion): + typ = model.PointerType(typ) + indirections.append((i, typ)) + indirect_args.append(typ) + indirect_result = tp.result + if isinstance(indirect_result, model.StructOrUnion): + if indirect_result.fldtypes is None: + raise TypeError("'%s' is used as result type, " + "but is opaque" % ( + indirect_result._get_c_name(),)) + indirect_result = model.PointerType(indirect_result) + indirect_args.insert(0, indirect_result) + indirections.insert(0, ("result", indirect_result)) + indirect_result = model.void_type + tp = model.FunctionPtrType(tuple(indirect_args), + indirect_result, tp.ellipsis) + BFunc = self.ffi._get_cached_btype(tp) + wrappername = '_cffi_f_%s' % name + newfunction = module.load_function(BFunc, wrappername) + for i, typ in indirections: + newfunction = self._make_struct_wrapper(newfunction, i, typ, + base_tp) + setattr(library, name, newfunction) + type(library)._cffi_dir.append(name) + + def _make_struct_wrapper(self, oldfunc, i, tp, base_tp): + backend = self.ffi._backend + BType = self.ffi._get_cached_btype(tp) + if i == "result": + ffi = self.ffi + def newfunc(*args): + res = ffi.new(BType) + oldfunc(res, *args) + return res[0] + else: + def newfunc(*args): + args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:] + return oldfunc(*args) + newfunc._cffi_base_type = base_tp + return newfunc + + # ---------- + # named structs + + def _generate_gen_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + + def _loading_gen_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + + def _loaded_gen_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_gen_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + + def _loading_gen_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + + def _loaded_gen_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + self.export_symbols.append(layoutfuncname) + prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static intptr_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' return nums[i];') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0] + function = module.load_function(BFunc, layoutfuncname) + layout = [] + num = 0 + while True: + x = function(num) + if x < 0: break + layout.append(x) + num += 1 + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_gen_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_gen_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _loading_gen_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_gen_enum(tp, name, module, '') + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_gen_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_gen_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_gen_const(self, is_int, name, tp=None, category='const', + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + self.export_symbols.append(funcname) + if check_value is not None: + assert is_int + assert category == 'const' + prnt('int %s(char *out_error)' % funcname) + prnt('{') + self._check_int_constant_value(name, check_value) + prnt(' return 0;') + prnt('}') + elif is_int: + assert category == 'const' + prnt('int %s(long long *out_value)' % funcname) + prnt('{') + prnt(' *out_value = (long long)(%s);' % (name,)) + prnt(' return (%s) <= 0;' % (name,)) + prnt('}') + else: + assert tp is not None + assert check_value is None + if category == 'var': + ampersand = '&' + else: + ampersand = '' + extra = '' + if category == 'const' and isinstance(tp, model.StructOrUnion): + extra = 'const *' + ampersand = '&' + prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name)) + prnt('{') + prnt(' return (%s%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_gen_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_gen_const(is_int, name, tp) + + _loading_gen_constant = _loaded_noop + + def _load_constant(self, is_int, tp, name, module, check_value=None): + funcname = '_cffi_const_%s' % name + if check_value is not None: + assert is_int + self._load_known_int_constant(module, funcname) + value = check_value + elif is_int: + BType = self.ffi._typeof_locked("long long*")[0] + BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType) + negative = function(p) + value = int(p[0]) + if value < 0 and not negative: + BLongLong = self.ffi._typeof_locked("long long")[0] + value += (1 << (8*self.ffi.sizeof(BLongLong))) + else: + assert check_value is None + fntypeextra = '(*)(void)' + if isinstance(tp, model.StructOrUnion): + fntypeextra = '*' + fntypeextra + BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0] + function = module.load_function(BFunc, funcname) + value = function() + if isinstance(tp, model.StructOrUnion): + value = value[0] + return value + + def _loaded_gen_constant(self, tp, name, module, library): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + value = self._load_constant(is_int, tp, name, module) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # enums + + def _check_int_constant_value(self, name, value): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' sprintf(buf, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' % + name) + prnt(' sprintf(out_error, "%s has the real value %s, not %s",') + prnt(' "%s", buf, "%d");' % (name[:100], value)) + prnt(' return -1;') + prnt(' }') + + def _load_known_int_constant(self, module, funcname): + BType = self.ffi._typeof_locked("char[]")[0] + BFunc = self.ffi._typeof_locked("int(*)(char*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType, 256) + if function(p) < 0: + error = self.ffi.string(p) + if sys.version_info >= (3,): + error = str(error, 'utf-8') + raise VerificationError(error) + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_gen_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_gen_const(True, enumerator) + return + # + funcname = self._enum_funcname(prefix, name) + self.export_symbols.append(funcname) + prnt = self._prnt + prnt('int %s(char *out_error)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue) + prnt(' return 0;') + prnt('}') + prnt() + + def _loading_gen_enum(self, tp, name, module, prefix='enum'): + if tp.partial: + enumvalues = [self._load_constant(True, tp, enumerator, module) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + else: + funcname = self._enum_funcname(prefix, name) + self._load_known_int_constant(module, funcname) + + def _loaded_gen_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + type(library)._cffi_dir.append(enumerator) + + # ---------- + # macros: for now only for integers + + def _generate_gen_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_gen_const(True, name, check_value=check_value) + + _loading_gen_macro = _loaded_noop + + def _loaded_gen_macro(self, tp, name, module, library): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + value = self._load_constant(True, tp, name, module, + check_value=check_value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # global variables + + def _generate_gen_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + if tp.length_is_unknown(): + prnt = self._prnt + funcname = '_cffi_sizeof_%s' % (name,) + self.export_symbols.append(funcname) + prnt("size_t %s(void)" % funcname) + prnt("{") + prnt(" return sizeof(%s);" % (name,)) + prnt("}") + tp_ptr = model.PointerType(tp.item) + self._generate_gen_const(False, name, tp_ptr) + else: + tp_ptr = model.PointerType(tp) + self._generate_gen_const(False, name, tp_ptr, category='var') + + _loading_gen_variable = _loaded_noop + + def _loaded_gen_variable(self, tp, name, module, library): + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length_is_unknown(): + funcname = '_cffi_sizeof_%s' % (name,) + BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0] + function = module.load_function(BFunc, funcname) + size = function() + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + tp_ptr = model.PointerType(tp.item) + value = self._load_constant(False, tp_ptr, name, module) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + funcname = '_cffi_var_%s' % name + BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0] + function = module.load_function(BFunc, funcname) + ptr = function() + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + +cffimod_header = r''' +#include +#include +#include +#include +#include /* XXX for ssize_t on some platforms */ + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +#endif +''' diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/verifier.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/verifier.py new file mode 100644 index 000000000..e392a2b7f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/cffi/verifier.py @@ -0,0 +1,306 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os, binascii, shutil, io +from . import __version_verifier_modules__ +from . import ffiplatform +from .error import VerificationError + +if sys.version_info >= (3, 3): + import importlib.machinery + def _extension_suffixes(): + return importlib.machinery.EXTENSION_SUFFIXES[:] +else: + import imp + def _extension_suffixes(): + return [suffix for suffix, _, type in imp.get_suffixes() + if type == imp.C_EXTENSION] + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + + +class Verifier(object): + + def __init__(self, ffi, preamble, tmpdir=None, modulename=None, + ext_package=None, tag='', force_generic_engine=False, + source_extension='.c', flags=None, relative_to=None, **kwds): + if ffi._parser._uses_new_feature: + raise VerificationError( + "feature not supported with ffi.verify(), but only " + "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,)) + self.ffi = ffi + self.preamble = preamble + if not modulename: + flattened_kwds = ffiplatform.flatten(kwds) + vengine_class = _locate_engine_class(ffi, force_generic_engine) + self._vengine = vengine_class(self) + self._vengine.patch_extension_kwds(kwds) + self.flags = flags + self.kwds = self.make_relative_to(kwds, relative_to) + # + if modulename: + if tag: + raise TypeError("can't specify both 'modulename' and 'tag'") + else: + key = '\x00'.join(['%d.%d' % sys.version_info[:2], + __version_verifier_modules__, + preamble, flattened_kwds] + + ffi._cdefsources) + if sys.version_info >= (3,): + key = key.encode('utf-8') + k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff) + k1 = k1.lstrip('0x').rstrip('L') + k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff) + k2 = k2.lstrip('0').rstrip('L') + modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key, + k1, k2) + suffix = _get_so_suffixes()[0] + self.tmpdir = tmpdir or _caller_dir_pycache() + self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension) + self.modulefilename = os.path.join(self.tmpdir, modulename + suffix) + self.ext_package = ext_package + self._has_source = False + self._has_module = False + + def write_source(self, file=None): + """Write the C source code. It is produced in 'self.sourcefilename', + which can be tweaked beforehand.""" + with self.ffi._lock: + if self._has_source and file is None: + raise VerificationError( + "source code already written") + self._write_source(file) + + def compile_module(self): + """Write the C source code (if not done already) and compile it. + This produces a dynamic link library in 'self.modulefilename'.""" + with self.ffi._lock: + if self._has_module: + raise VerificationError("module already compiled") + if not self._has_source: + self._write_source() + self._compile_module() + + def load_library(self): + """Get a C module from this Verifier instance. + Returns an instance of a FFILibrary class that behaves like the + objects returned by ffi.dlopen(), but that delegates all + operations to the C module. If necessary, the C code is written + and compiled first. + """ + with self.ffi._lock: + if not self._has_module: + self._locate_module() + if not self._has_module: + if not self._has_source: + self._write_source() + self._compile_module() + return self._load_library() + + def get_module_name(self): + basename = os.path.basename(self.modulefilename) + # kill both the .so extension and the other .'s, as introduced + # by Python 3: 'basename.cpython-33m.so' + basename = basename.split('.', 1)[0] + # and the _d added in Python 2 debug builds --- but try to be + # conservative and not kill a legitimate _d + if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'): + basename = basename[:-2] + return basename + + def get_extension(self): + if not self._has_source: + with self.ffi._lock: + if not self._has_source: + self._write_source() + sourcename = ffiplatform.maybe_relative_path(self.sourcefilename) + modname = self.get_module_name() + return ffiplatform.get_extension(sourcename, modname, **self.kwds) + + def generates_python_module(self): + return self._vengine._gen_python_module + + def make_relative_to(self, kwds, relative_to): + if relative_to and os.path.dirname(relative_to): + dirname = os.path.dirname(relative_to) + kwds = kwds.copy() + for key in ffiplatform.LIST_OF_FILE_NAMES: + if key in kwds: + lst = kwds[key] + if not isinstance(lst, (list, tuple)): + raise TypeError("keyword '%s' should be a list or tuple" + % (key,)) + lst = [os.path.join(dirname, fn) for fn in lst] + kwds[key] = lst + return kwds + + # ---------- + + def _locate_module(self): + if not os.path.isfile(self.modulefilename): + if self.ext_package: + try: + pkg = __import__(self.ext_package, None, None, ['__doc__']) + except ImportError: + return # cannot import the package itself, give up + # (e.g. it might be called differently before installation) + path = pkg.__path__ + else: + path = None + filename = self._vengine.find_module(self.get_module_name(), path, + _get_so_suffixes()) + if filename is None: + return + self.modulefilename = filename + self._vengine.collect_types() + self._has_module = True + + def _write_source_to(self, file): + self._vengine._f = file + try: + self._vengine.write_source_to_f() + finally: + del self._vengine._f + + def _write_source(self, file=None): + if file is not None: + self._write_source_to(file) + else: + # Write our source file to an in memory file. + f = NativeIO() + self._write_source_to(f) + source_data = f.getvalue() + + # Determine if this matches the current file + if os.path.exists(self.sourcefilename): + with open(self.sourcefilename, "r") as fp: + needs_written = not (fp.read() == source_data) + else: + needs_written = True + + # Actually write the file out if it doesn't match + if needs_written: + _ensure_dir(self.sourcefilename) + with open(self.sourcefilename, "w") as fp: + fp.write(source_data) + + # Set this flag + self._has_source = True + + def _compile_module(self): + # compile this C source + tmpdir = os.path.dirname(self.sourcefilename) + outputfilename = ffiplatform.compile(tmpdir, self.get_extension()) + try: + same = ffiplatform.samefile(outputfilename, self.modulefilename) + except OSError: + same = False + if not same: + _ensure_dir(self.modulefilename) + shutil.move(outputfilename, self.modulefilename) + self._has_module = True + + def _load_library(self): + assert self._has_module + if self.flags is not None: + return self._vengine.load_library(self.flags) + else: + return self._vengine.load_library() + +# ____________________________________________________________ + +_FORCE_GENERIC_ENGINE = False # for tests + +def _locate_engine_class(ffi, force_generic_engine): + if _FORCE_GENERIC_ENGINE: + force_generic_engine = True + if not force_generic_engine: + if '__pypy__' in sys.builtin_module_names: + force_generic_engine = True + else: + try: + import _cffi_backend + except ImportError: + _cffi_backend = '?' + if ffi._backend is not _cffi_backend: + force_generic_engine = True + if force_generic_engine: + from . import vengine_gen + return vengine_gen.VGenericEngine + else: + from . import vengine_cpy + return vengine_cpy.VCPythonEngine + +# ____________________________________________________________ + +_TMPDIR = None + +def _caller_dir_pycache(): + if _TMPDIR: + return _TMPDIR + result = os.environ.get('CFFI_TMPDIR') + if result: + return result + filename = sys._getframe(2).f_code.co_filename + return os.path.abspath(os.path.join(os.path.dirname(filename), + '__pycache__')) + +def set_tmpdir(dirname): + """Set the temporary directory to use instead of __pycache__.""" + global _TMPDIR + _TMPDIR = dirname + +def cleanup_tmpdir(tmpdir=None, keep_so=False): + """Clean up the temporary directory by removing all files in it + called `_cffi_*.{c,so}` as well as the `build` subdirectory.""" + tmpdir = tmpdir or _caller_dir_pycache() + try: + filelist = os.listdir(tmpdir) + except OSError: + return + if keep_so: + suffix = '.c' # only remove .c files + else: + suffix = _get_so_suffixes()[0].lower() + for fn in filelist: + if fn.lower().startswith('_cffi_') and ( + fn.lower().endswith(suffix) or fn.lower().endswith('.c')): + try: + os.unlink(os.path.join(tmpdir, fn)) + except OSError: + pass + clean_dir = [os.path.join(tmpdir, 'build')] + for dir in clean_dir: + try: + for fn in os.listdir(dir): + fn = os.path.join(dir, fn) + if os.path.isdir(fn): + clean_dir.append(fn) + else: + os.unlink(fn) + except OSError: + pass + +def _get_so_suffixes(): + suffixes = _extension_suffixes() + if not suffixes: + # bah, no C_EXTENSION available. Occurs on pypy without cpyext + if sys.platform == 'win32': + suffixes = [".pyd"] + else: + suffixes = [".so"] + + return suffixes + +def _ensure_dir(filename): + dirname = os.path.dirname(filename) + if dirname and not os.path.isdir(dirname): + os.makedirs(dirname) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer-3.3.2.dist-info/INSTALLER b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer-3.3.2.dist-info/INSTALLER new file mode 100644 index 000000000..ce3133dc1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer-3.3.2.dist-info/INSTALLER @@ -0,0 +1 @@ +Poetry 2.2.1 \ No newline at end of file diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer-3.3.2.dist-info/LICENSE b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer-3.3.2.dist-info/LICENSE new file mode 100644 index 000000000..ad82355b8 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer-3.3.2.dist-info/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 TAHRI Ahmed R. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer-3.3.2.dist-info/METADATA b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer-3.3.2.dist-info/METADATA new file mode 100644 index 000000000..822550e36 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer-3.3.2.dist-info/METADATA @@ -0,0 +1,683 @@ +Metadata-Version: 2.1 +Name: charset-normalizer +Version: 3.3.2 +Summary: The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet. +Home-page: https://github.com/Ousret/charset_normalizer +Author: Ahmed TAHRI +Author-email: ahmed.tahri@cloudnursery.dev +License: MIT +Project-URL: Bug Reports, https://github.com/Ousret/charset_normalizer/issues +Project-URL: Documentation, https://charset-normalizer.readthedocs.io/en/latest +Keywords: encoding,charset,charset-detector,detector,normalization,unicode,chardet,detect +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: MIT License +Classifier: Intended Audience :: Developers +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Text Processing :: Linguistic +Classifier: Topic :: Utilities +Classifier: Typing :: Typed +Requires-Python: >=3.7.0 +Description-Content-Type: text/markdown +License-File: LICENSE +Provides-Extra: unicode_backport + +

    Charset Detection, for Everyone 👋

    + +

    + The Real First Universal Charset Detector
    + + + + + Download Count Total + + + + +

    +

    + Featured Packages
    + + Static Badge + + + Static Badge + +

    +

    + In other language (unofficial port - by the community)
    + + Static Badge + +

    + +> A library that helps you read text from an unknown charset encoding.
    Motivated by `chardet`, +> I'm trying to resolve the issue by taking a new approach. +> All IANA character set names for which the Python core library provides codecs are supported. + +

    + >>>>> 👉 Try Me Online Now, Then Adopt Me 👈 <<<<< +

    + +This project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**. + +| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) | +|--------------------------------------------------|:---------------------------------------------:|:--------------------------------------------------------------------------------------------------:|:-----------------------------------------------:| +| `Fast` | ❌ | ✅ | ✅ | +| `Universal**` | ❌ | ✅ | ❌ | +| `Reliable` **without** distinguishable standards | ❌ | ✅ | ✅ | +| `Reliable` **with** distinguishable standards | ✅ | ✅ | ✅ | +| `License` | LGPL-2.1
    _restrictive_ | MIT | MPL-1.1
    _restrictive_ | +| `Native Python` | ✅ | ✅ | ❌ | +| `Detect spoken language` | ❌ | ✅ | N/A | +| `UnicodeDecodeError Safety` | ❌ | ✅ | ❌ | +| `Whl Size (min)` | 193.6 kB | 42 kB | ~200 kB | +| `Supported Encoding` | 33 | 🎉 [99](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 | + +

    +Reading Normalized TextCat Reading Text +

    + +*\*\* : They are clearly using specific code for a specific encoding even if covering most of used one*
    +Did you got there because of the logs? See [https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html](https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html) + +## ⚡ Performance + +This package offer better performance than its counterpart Chardet. Here are some numbers. + +| Package | Accuracy | Mean per file (ms) | File per sec (est) | +|-----------------------------------------------|:--------:|:------------------:|:------------------:| +| [chardet](https://github.com/chardet/chardet) | 86 % | 200 ms | 5 file/sec | +| charset-normalizer | **98 %** | **10 ms** | 100 file/sec | + +| Package | 99th percentile | 95th percentile | 50th percentile | +|-----------------------------------------------|:---------------:|:---------------:|:---------------:| +| [chardet](https://github.com/chardet/chardet) | 1200 ms | 287 ms | 23 ms | +| charset-normalizer | 100 ms | 50 ms | 5 ms | + +Chardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload. + +> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows. +> And yes, these results might change at any time. The dataset can be updated to include more files. +> The actual delays heavily depends on your CPU capabilities. The factors should remain the same. +> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability +> (eg. Supported Encoding) Challenge-them if you want. + +## ✨ Installation + +Using pip: + +```sh +pip install charset-normalizer -U +``` + +## 🚀 Basic Usage + +### CLI +This package comes with a CLI. + +``` +usage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD] + file [file ...] + +The Real First Universal Charset Detector. Discover originating encoding used +on text file. Normalize text to unicode. + +positional arguments: + files File(s) to be analysed + +optional arguments: + -h, --help show this help message and exit + -v, --verbose Display complementary information about file if any. + Stdout will contain logs about the detection process. + -a, --with-alternative + Output complementary possibilities if any. Top-level + JSON WILL be a list. + -n, --normalize Permit to normalize input file. If not set, program + does not write anything. + -m, --minimal Only output the charset detected to STDOUT. Disabling + JSON output. + -r, --replace Replace file when trying to normalize it instead of + creating a new one. + -f, --force Replace file without asking if you are sure, use this + flag with caution. + -t THRESHOLD, --threshold THRESHOLD + Define a custom maximum amount of chaos allowed in + decoded content. 0. <= chaos <= 1. + --version Show version information and exit. +``` + +```bash +normalizer ./data/sample.1.fr.srt +``` + +or + +```bash +python -m charset_normalizer ./data/sample.1.fr.srt +``` + +🎉 Since version 1.4.0 the CLI produce easily usable stdout result in JSON format. + +```json +{ + "path": "/home/default/projects/charset_normalizer/data/sample.1.fr.srt", + "encoding": "cp1252", + "encoding_aliases": [ + "1252", + "windows_1252" + ], + "alternative_encodings": [ + "cp1254", + "cp1256", + "cp1258", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + "mbcs" + ], + "language": "French", + "alphabets": [ + "Basic Latin", + "Latin-1 Supplement" + ], + "has_sig_or_bom": false, + "chaos": 0.149, + "coherence": 97.152, + "unicode_path": null, + "is_preferred": true +} +``` + +### Python +*Just print out normalized text* +```python +from charset_normalizer import from_path + +results = from_path('./my_subtitle.srt') + +print(str(results.best())) +``` + +*Upgrade your code without effort* +```python +from charset_normalizer import detect +``` + +The above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible. + +See the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/) + +## 😇 Why + +When I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a +reliable alternative using a completely different method. Also! I never back down on a good challenge! + +I **don't care** about the **originating charset** encoding, because **two different tables** can +produce **two identical rendered string.** +What I want is to get readable text, the best I can. + +In a way, **I'm brute forcing text decoding.** How cool is that ? 😎 + +Don't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode. + +## 🍰 How + + - Discard all charset encoding table that could not fit the binary content. + - Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding. + - Extract matches with the lowest mess detected. + - Additionally, we measure coherence / probe for a language. + +**Wait a minute**, what is noise/mess and coherence according to **YOU ?** + +*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then +**I established** some ground rules about **what is obvious** when **it seems like** a mess. + I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to + improve or rewrite it. + +*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought +that intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design. + +## ⚡ Known limitations + + - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters)) + - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content. + +## ⚠️ About Python EOLs + +**If you are running:** + +- Python >=2.7,<3.5: Unsupported +- Python 3.5: charset-normalizer < 2.1 +- Python 3.6: charset-normalizer < 3.1 +- Python 3.7: charset-normalizer < 4.0 + +Upgrade your Python interpreter as soon as possible. + +## 👤 Contributing + +Contributions, issues and feature requests are very much welcome.
    +Feel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute. + +## 📝 License + +Copyright © [Ahmed TAHRI @Ousret](https://github.com/Ousret).
    +This project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed. + +Characters frequencies used in this project © 2012 [Denny Vrandečić](http://simia.net/letters/) + +## 💼 For Enterprise + +Professional support for charset-normalizer is available as part of the [Tidelift +Subscription][1]. Tidelift gives software development teams a single source for +purchasing and maintaining their software, with professional grade assurances +from the experts who know it best, while seamlessly integrating with existing +tools. + +[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme + +# Changelog +All notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). + +## [3.3.2](https://github.com/Ousret/charset_normalizer/compare/3.3.1...3.3.2) (2023-10-31) + +### Fixed +- Unintentional memory usage regression when using large payload that match several encoding (#376) +- Regression on some detection case showcased in the documentation (#371) + +### Added +- Noise (md) probe that identify malformed arabic representation due to the presence of letters in isolated form (credit to my wife) + +## [3.3.1](https://github.com/Ousret/charset_normalizer/compare/3.3.0...3.3.1) (2023-10-22) + +### Changed +- Optional mypyc compilation upgraded to version 1.6.1 for Python >= 3.8 +- Improved the general detection reliability based on reports from the community + +## [3.3.0](https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0) (2023-09-30) + +### Added +- Allow to execute the CLI (e.g. normalizer) through `python -m charset_normalizer.cli` or `python -m charset_normalizer` +- Support for 9 forgotten encoding that are supported by Python but unlisted in `encoding.aliases` as they have no alias (#323) + +### Removed +- (internal) Redundant utils.is_ascii function and unused function is_private_use_only +- (internal) charset_normalizer.assets is moved inside charset_normalizer.constant + +### Changed +- (internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection +- Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.8 + +### Fixed +- Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in \_\_lt\_\_ (#350) + +## [3.2.0](https://github.com/Ousret/charset_normalizer/compare/3.1.0...3.2.0) (2023-06-07) + +### Changed +- Typehint for function `from_path` no longer enforce `PathLike` as its first argument +- Minor improvement over the global detection reliability + +### Added +- Introduce function `is_binary` that relies on main capabilities, and optimized to detect binaries +- Propagate `enable_fallback` argument throughout `from_bytes`, `from_path`, and `from_fp` that allow a deeper control over the detection (default True) +- Explicit support for Python 3.12 + +### Fixed +- Edge case detection failure where a file would contain 'very-long' camel cased word (Issue #289) + +## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06) + +### Added +- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262) + +### Removed +- Support for Python 3.6 (PR #260) + +### Changed +- Optional speedup provided by mypy/c 1.0.1 + +## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18) + +### Fixed +- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233) + +### Changed +- Speedup provided by mypy/c 0.990 on Python >= 3.7 + +## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20) + +### Added +- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results +- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES +- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio +- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl) + +### Changed +- Build with static metadata using 'build' frontend +- Make the language detection stricter +- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1 + +### Fixed +- CLI with opt --normalize fail when using full path for files +- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it +- Sphinx warnings when generating the documentation + +### Removed +- Coherence detector no longer return 'Simple English' instead return 'English' +- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese' +- Breaking: Method `first()` and `best()` from CharsetMatch +- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII) +- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches +- Breaking: Top-level function `normalize` +- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch +- Support for the backport `unicodedata2` + +## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18) + +### Added +- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results +- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES +- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio + +### Changed +- Build with static metadata using 'build' frontend +- Make the language detection stricter + +### Fixed +- CLI with opt --normalize fail when using full path for files +- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it + +### Removed +- Coherence detector no longer return 'Simple English' instead return 'English' +- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese' + +## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21) + +### Added +- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl) + +### Removed +- Breaking: Method `first()` and `best()` from CharsetMatch +- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII) + +### Fixed +- Sphinx warnings when generating the documentation + +## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15) + +### Changed +- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1 + +### Removed +- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches +- Breaking: Top-level function `normalize` +- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch +- Support for the backport `unicodedata2` + +## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19) + +### Deprecated +- Function `normalize` scheduled for removal in 3.0 + +### Changed +- Removed useless call to decode in fn is_unprintable (#206) + +### Fixed +- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204) + +## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19) + +### Added +- Output the Unicode table version when running the CLI with `--version` (PR #194) + +### Changed +- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175) +- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183) + +### Fixed +- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175) +- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181) + +### Removed +- Support for Python 3.5 (PR #192) + +### Deprecated +- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194) + +## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12) + +### Fixed +- ASCII miss-detection on rare cases (PR #170) + +## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30) + +### Added +- Explicit support for Python 3.11 (PR #164) + +### Changed +- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165) + +## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04) + +### Fixed +- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154) + +### Changed +- Skipping the language-detection (CD) on ASCII (PR #155) + +## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03) + +### Changed +- Moderating the logging impact (since 2.0.8) for specific environments (PR #147) + +### Fixed +- Wrong logging level applied when setting kwarg `explain` to True (PR #146) + +## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24) +### Changed +- Improvement over Vietnamese detection (PR #126) +- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124) +- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122) +- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129) +- Code style as refactored by Sourcery-AI (PR #131) +- Minor adjustment on the MD around european words (PR #133) +- Remove and replace SRTs from assets / tests (PR #139) +- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135) +- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135) + +### Fixed +- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137) +- Avoid using too insignificant chunk (PR #137) + +### Added +- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135) +- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141) + +## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11) +### Added +- Add support for Kazakh (Cyrillic) language detection (PR #109) + +### Changed +- Further, improve inferring the language from a given single-byte code page (PR #112) +- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116) +- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113) +- Various detection improvement (MD+CD) (PR #117) + +### Removed +- Remove redundant logging entry about detected language(s) (PR #115) + +### Fixed +- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102) + +## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18) +### Fixed +- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100) +- Fix CLI crash when using --minimal output in certain cases (PR #103) + +### Changed +- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101) + +## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14) +### Changed +- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81) +- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82) +- The Unicode detection is slightly improved (PR #93) +- Add syntax sugar \_\_bool\_\_ for results CharsetMatches list-container (PR #91) + +### Removed +- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92) + +### Fixed +- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95) +- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96) +- The MANIFEST.in was not exhaustive (PR #78) + +## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30) +### Fixed +- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70) +- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68) +- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72) +- Submatch factoring could be wrong in rare edge cases (PR #72) +- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72) +- Fix line endings from CRLF to LF for certain project files (PR #67) + +### Changed +- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76) +- Allow fallback on specified encoding if any (PR #71) + +## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16) +### Changed +- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63) +- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64) + +## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15) +### Fixed +- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59) + +### Changed +- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57) + +## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13) +### Fixed +- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55) +- Using explain=False permanently disable the verbose output in the current runtime (PR #47) +- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47) +- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52) + +### Changed +- Public function normalize default args values were not aligned with from_bytes (PR #53) + +### Added +- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47) + +## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02) +### Changed +- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet. +- Accent has been made on UTF-8 detection, should perform rather instantaneous. +- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible. +- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time) +- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+ +- utf_7 detection has been reinstated. + +### Removed +- This package no longer require anything when used with Python 3.5 (Dropped cached_property) +- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volapük, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian. +- The exception hook on UnicodeDecodeError has been removed. + +### Deprecated +- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0 + +### Fixed +- The CLI output used the relative path of the file(s). Should be absolute. + +## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28) +### Fixed +- Logger configuration/usage no longer conflict with others (PR #44) + +## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21) +### Removed +- Using standard logging instead of using the package loguru. +- Dropping nose test framework in favor of the maintained pytest. +- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text. +- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version. +- Stop support for UTF-7 that does not contain a SIG. +- Dropping PrettyTable, replaced with pure JSON output in CLI. + +### Fixed +- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process. +- Not searching properly for the BOM when trying utf32/16 parent codec. + +### Changed +- Improving the package final size by compressing frequencies.json. +- Huge improvement over the larges payload. + +### Added +- CLI now produces JSON consumable output. +- Return ASCII if given sequences fit. Given reasonable confidence. + +## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13) + +### Fixed +- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40) + +## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12) + +### Fixed +- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39) + +## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12) + +### Fixed +- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38) + +## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09) + +### Changed +- Amend the previous release to allow prettytable 2.0 (PR #35) + +## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08) + +### Fixed +- Fix error while using the package with a python pre-release interpreter (PR #33) + +### Changed +- Dependencies refactoring, constraints revised. + +### Added +- Add python 3.9 and 3.10 to the supported interpreters + +MIT License + +Copyright (c) 2019 TAHRI Ahmed R. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer-3.3.2.dist-info/RECORD b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer-3.3.2.dist-info/RECORD new file mode 100644 index 000000000..eeb7fbc49 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer-3.3.2.dist-info/RECORD @@ -0,0 +1,23 @@ +../../../bin/normalizer,sha256=CcSYfwxtuixeUSamdJecGDrDukPT45jbtCgbEpGIFDM,293 +charset_normalizer/constant.py,sha256=p0IsOVcEbPWYPOdWhnhRbjK1YVBy6fs05C5vKC-zoxU,40481 +charset_normalizer/md.cpython-38-x86_64-linux-gnu.so,sha256=Y7QSLD5QLoSFAWys0-tL7R6QB7oi5864zM6zr7RWek4,16064 +charset_normalizer/md__mypyc.cpython-38-x86_64-linux-gnu.so,sha256=d8E0dOlnRXncnIFlWCUfjlbyQODPyHD44RJzcK1E5AU,268752 +charset_normalizer/__init__.py,sha256=UzI3xC8PhmcLRMzSgPb6minTmRq0kWznnCBJ8ZCc2XI,1577 +charset_normalizer/__main__.py,sha256=JxY8bleaENOFlLRb9HfoeZCzAMnn2A1oGR5Xm2eyqg0,73 +charset_normalizer/cd.py,sha256=xwZliZcTQFA3jU0c00PRiu9MNxXTFxQkFLWmMW24ZzI,12560 +charset_normalizer/legacy.py,sha256=T-QuVMsMeDiQEk8WSszMrzVJg_14AMeSkmHdRYhdl1k,2071 +charset_normalizer/md.py,sha256=NkSuVLK13_a8c7BxZ4cGIQ5vOtGIWOdh22WZEvjp-7U,19624 +charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +charset_normalizer/api.py,sha256=WOlWjy6wT8SeMYFpaGbXZFN1TMXa-s8vZYfkL4G29iQ,21097 +charset_normalizer/version.py,sha256=iHKUfHD3kDRSyrh_BN2ojh43TA5-UZQjvbVIEFfpHDs,79 +charset_normalizer/models.py,sha256=I5i0s4aKCCgLPY2tUY3pwkgFA-BUbbNxQ7hVkVTt62s,11624 +charset_normalizer/utils.py,sha256=teiosMqzKjXyAHXnGdjSBOgnBZwx-SkBbCLrx0UXy8M,11894 +charset_normalizer/cli/__init__.py,sha256=D5ERp8P62llm2FuoMzydZ7d9rs8cvvLXqE-1_6oViPc,100 +charset_normalizer/cli/__main__.py,sha256=2F-xURZJzo063Ye-2RLJ2wcmURpbKeAzKwpiws65dAs,9744 +charset_normalizer-3.3.2.dist-info/entry_points.txt,sha256=ADSTKrkXZ3hhdOVFi6DcUEHQRS0xfxDIE_pEz4wLIXA,65 +charset_normalizer-3.3.2.dist-info/LICENSE,sha256=6zGgxaT7Cbik4yBV0lweX5w1iidS_vPNcgIT0cz-4kE,1070 +charset_normalizer-3.3.2.dist-info/WHEEL,sha256=CzyHWKXay4N1oFds0wxFofK9MEs-L6SZ6gHGZF6-4Co,148 +charset_normalizer-3.3.2.dist-info/METADATA,sha256=cfLhl5A6SI-F0oclm8w8ux9wshL1nipdeCdVnYb4AaA,33550 +charset_normalizer-3.3.2.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19 +charset_normalizer-3.3.2.dist-info/INSTALLER,sha256=sz0Tnp99msIbbLB51q7U9nA6AvRKcsOeUKhrHH0Ulg4,12 +charset_normalizer-3.3.2.dist-info/RECORD,, diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer-3.3.2.dist-info/WHEEL b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer-3.3.2.dist-info/WHEEL new file mode 100644 index 000000000..e8d19a232 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer-3.3.2.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.41.2) +Root-Is-Purelib: false +Tag: cp38-cp38-manylinux_2_17_x86_64 +Tag: cp38-cp38-manylinux2014_x86_64 + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer-3.3.2.dist-info/entry_points.txt b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer-3.3.2.dist-info/entry_points.txt new file mode 100644 index 000000000..65619e73e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer-3.3.2.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +normalizer = charset_normalizer.cli:cli_detect diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer-3.3.2.dist-info/top_level.txt b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer-3.3.2.dist-info/top_level.txt new file mode 100644 index 000000000..66958f0a0 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer-3.3.2.dist-info/top_level.txt @@ -0,0 +1 @@ +charset_normalizer diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/__init__.py new file mode 100644 index 000000000..55991fc38 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/__init__.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +""" +Charset-Normalizer +~~~~~~~~~~~~~~ +The Real First Universal Charset Detector. +A library that helps you read text from an unknown charset encoding. +Motivated by chardet, This package is trying to resolve the issue by taking a new approach. +All IANA character set names for which the Python core library provides codecs are supported. + +Basic usage: + >>> from charset_normalizer import from_bytes + >>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8')) + >>> best_guess = results.best() + >>> str(best_guess) + 'Bсеки човек има право на образование. Oбразованието!' + +Others methods and usages are available - see the full documentation +at . +:copyright: (c) 2021 by Ahmed TAHRI +:license: MIT, see LICENSE for more details. +""" +import logging + +from .api import from_bytes, from_fp, from_path, is_binary +from .legacy import detect +from .models import CharsetMatch, CharsetMatches +from .utils import set_logging_handler +from .version import VERSION, __version__ + +__all__ = ( + "from_fp", + "from_path", + "from_bytes", + "is_binary", + "detect", + "CharsetMatch", + "CharsetMatches", + "__version__", + "VERSION", + "set_logging_handler", +) + +# Attach a NullHandler to the top level logger by default +# https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library + +logging.getLogger("charset_normalizer").addHandler(logging.NullHandler()) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/__main__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/__main__.py new file mode 100644 index 000000000..beae2ef77 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/__main__.py @@ -0,0 +1,4 @@ +from .cli import cli_detect + +if __name__ == "__main__": + cli_detect() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/api.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/api.py new file mode 100644 index 000000000..0ba08e3a5 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/api.py @@ -0,0 +1,626 @@ +import logging +from os import PathLike +from typing import BinaryIO, List, Optional, Set, Union + +from .cd import ( + coherence_ratio, + encoding_languages, + mb_encoding_languages, + merge_coherence_ratios, +) +from .constant import IANA_SUPPORTED, TOO_BIG_SEQUENCE, TOO_SMALL_SEQUENCE, TRACE +from .md import mess_ratio +from .models import CharsetMatch, CharsetMatches +from .utils import ( + any_specified_encoding, + cut_sequence_chunks, + iana_name, + identify_sig_or_bom, + is_cp_similar, + is_multi_byte_encoding, + should_strip_sig_or_bom, +) + +# Will most likely be controversial +# logging.addLevelName(TRACE, "TRACE") +logger = logging.getLogger("charset_normalizer") +explain_handler = logging.StreamHandler() +explain_handler.setFormatter( + logging.Formatter("%(asctime)s | %(levelname)s | %(message)s") +) + + +def from_bytes( + sequences: Union[bytes, bytearray], + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.2, + cp_isolation: Optional[List[str]] = None, + cp_exclusion: Optional[List[str]] = None, + preemptive_behaviour: bool = True, + explain: bool = False, + language_threshold: float = 0.1, + enable_fallback: bool = True, +) -> CharsetMatches: + """ + Given a raw bytes sequence, return the best possibles charset usable to render str objects. + If there is no results, it is a strong indicator that the source is binary/not text. + By default, the process will extract 5 blocks of 512o each to assess the mess and coherence of a given sequence. + And will give up a particular code page after 20% of measured mess. Those criteria are customizable at will. + + The preemptive behavior DOES NOT replace the traditional detection workflow, it prioritize a particular code page + but never take it for granted. Can improve the performance. + + You may want to focus your attention to some code page or/and not others, use cp_isolation and cp_exclusion for that + purpose. + + This function will strip the SIG in the payload/sequence every time except on UTF-16, UTF-32. + By default the library does not setup any handler other than the NullHandler, if you choose to set the 'explain' + toggle to True it will alter the logger configuration to add a StreamHandler that is suitable for debugging. + Custom logging format and handler can be set manually. + """ + + if not isinstance(sequences, (bytearray, bytes)): + raise TypeError( + "Expected object of type bytes or bytearray, got: {0}".format( + type(sequences) + ) + ) + + if explain: + previous_logger_level: int = logger.level + logger.addHandler(explain_handler) + logger.setLevel(TRACE) + + length: int = len(sequences) + + if length == 0: + logger.debug("Encoding detection on empty bytes, assuming utf_8 intention.") + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level or logging.WARNING) + return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")]) + + if cp_isolation is not None: + logger.log( + TRACE, + "cp_isolation is set. use this flag for debugging purpose. " + "limited list of encoding allowed : %s.", + ", ".join(cp_isolation), + ) + cp_isolation = [iana_name(cp, False) for cp in cp_isolation] + else: + cp_isolation = [] + + if cp_exclusion is not None: + logger.log( + TRACE, + "cp_exclusion is set. use this flag for debugging purpose. " + "limited list of encoding excluded : %s.", + ", ".join(cp_exclusion), + ) + cp_exclusion = [iana_name(cp, False) for cp in cp_exclusion] + else: + cp_exclusion = [] + + if length <= (chunk_size * steps): + logger.log( + TRACE, + "override steps (%i) and chunk_size (%i) as content does not fit (%i byte(s) given) parameters.", + steps, + chunk_size, + length, + ) + steps = 1 + chunk_size = length + + if steps > 1 and length / steps < chunk_size: + chunk_size = int(length / steps) + + is_too_small_sequence: bool = len(sequences) < TOO_SMALL_SEQUENCE + is_too_large_sequence: bool = len(sequences) >= TOO_BIG_SEQUENCE + + if is_too_small_sequence: + logger.log( + TRACE, + "Trying to detect encoding from a tiny portion of ({}) byte(s).".format( + length + ), + ) + elif is_too_large_sequence: + logger.log( + TRACE, + "Using lazy str decoding because the payload is quite large, ({}) byte(s).".format( + length + ), + ) + + prioritized_encodings: List[str] = [] + + specified_encoding: Optional[str] = ( + any_specified_encoding(sequences) if preemptive_behaviour else None + ) + + if specified_encoding is not None: + prioritized_encodings.append(specified_encoding) + logger.log( + TRACE, + "Detected declarative mark in sequence. Priority +1 given for %s.", + specified_encoding, + ) + + tested: Set[str] = set() + tested_but_hard_failure: List[str] = [] + tested_but_soft_failure: List[str] = [] + + fallback_ascii: Optional[CharsetMatch] = None + fallback_u8: Optional[CharsetMatch] = None + fallback_specified: Optional[CharsetMatch] = None + + results: CharsetMatches = CharsetMatches() + + sig_encoding, sig_payload = identify_sig_or_bom(sequences) + + if sig_encoding is not None: + prioritized_encodings.append(sig_encoding) + logger.log( + TRACE, + "Detected a SIG or BOM mark on first %i byte(s). Priority +1 given for %s.", + len(sig_payload), + sig_encoding, + ) + + prioritized_encodings.append("ascii") + + if "utf_8" not in prioritized_encodings: + prioritized_encodings.append("utf_8") + + for encoding_iana in prioritized_encodings + IANA_SUPPORTED: + if cp_isolation and encoding_iana not in cp_isolation: + continue + + if cp_exclusion and encoding_iana in cp_exclusion: + continue + + if encoding_iana in tested: + continue + + tested.add(encoding_iana) + + decoded_payload: Optional[str] = None + bom_or_sig_available: bool = sig_encoding == encoding_iana + strip_sig_or_bom: bool = bom_or_sig_available and should_strip_sig_or_bom( + encoding_iana + ) + + if encoding_iana in {"utf_16", "utf_32"} and not bom_or_sig_available: + logger.log( + TRACE, + "Encoding %s won't be tested as-is because it require a BOM. Will try some sub-encoder LE/BE.", + encoding_iana, + ) + continue + if encoding_iana in {"utf_7"} and not bom_or_sig_available: + logger.log( + TRACE, + "Encoding %s won't be tested as-is because detection is unreliable without BOM/SIG.", + encoding_iana, + ) + continue + + try: + is_multi_byte_decoder: bool = is_multi_byte_encoding(encoding_iana) + except (ModuleNotFoundError, ImportError): + logger.log( + TRACE, + "Encoding %s does not provide an IncrementalDecoder", + encoding_iana, + ) + continue + + try: + if is_too_large_sequence and is_multi_byte_decoder is False: + str( + sequences[: int(50e4)] + if strip_sig_or_bom is False + else sequences[len(sig_payload) : int(50e4)], + encoding=encoding_iana, + ) + else: + decoded_payload = str( + sequences + if strip_sig_or_bom is False + else sequences[len(sig_payload) :], + encoding=encoding_iana, + ) + except (UnicodeDecodeError, LookupError) as e: + if not isinstance(e, LookupError): + logger.log( + TRACE, + "Code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + tested_but_hard_failure.append(encoding_iana) + continue + + similar_soft_failure_test: bool = False + + for encoding_soft_failed in tested_but_soft_failure: + if is_cp_similar(encoding_iana, encoding_soft_failed): + similar_soft_failure_test = True + break + + if similar_soft_failure_test: + logger.log( + TRACE, + "%s is deemed too similar to code page %s and was consider unsuited already. Continuing!", + encoding_iana, + encoding_soft_failed, + ) + continue + + r_ = range( + 0 if not bom_or_sig_available else len(sig_payload), + length, + int(length / steps), + ) + + multi_byte_bonus: bool = ( + is_multi_byte_decoder + and decoded_payload is not None + and len(decoded_payload) < length + ) + + if multi_byte_bonus: + logger.log( + TRACE, + "Code page %s is a multi byte encoding table and it appear that at least one character " + "was encoded using n-bytes.", + encoding_iana, + ) + + max_chunk_gave_up: int = int(len(r_) / 4) + + max_chunk_gave_up = max(max_chunk_gave_up, 2) + early_stop_count: int = 0 + lazy_str_hard_failure = False + + md_chunks: List[str] = [] + md_ratios = [] + + try: + for chunk in cut_sequence_chunks( + sequences, + encoding_iana, + r_, + chunk_size, + bom_or_sig_available, + strip_sig_or_bom, + sig_payload, + is_multi_byte_decoder, + decoded_payload, + ): + md_chunks.append(chunk) + + md_ratios.append( + mess_ratio( + chunk, + threshold, + explain is True and 1 <= len(cp_isolation) <= 2, + ) + ) + + if md_ratios[-1] >= threshold: + early_stop_count += 1 + + if (early_stop_count >= max_chunk_gave_up) or ( + bom_or_sig_available and strip_sig_or_bom is False + ): + break + except ( + UnicodeDecodeError + ) as e: # Lazy str loading may have missed something there + logger.log( + TRACE, + "LazyStr Loading: After MD chunk decode, code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + early_stop_count = max_chunk_gave_up + lazy_str_hard_failure = True + + # We might want to check the sequence again with the whole content + # Only if initial MD tests passes + if ( + not lazy_str_hard_failure + and is_too_large_sequence + and not is_multi_byte_decoder + ): + try: + sequences[int(50e3) :].decode(encoding_iana, errors="strict") + except UnicodeDecodeError as e: + logger.log( + TRACE, + "LazyStr Loading: After final lookup, code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + tested_but_hard_failure.append(encoding_iana) + continue + + mean_mess_ratio: float = sum(md_ratios) / len(md_ratios) if md_ratios else 0.0 + if mean_mess_ratio >= threshold or early_stop_count >= max_chunk_gave_up: + tested_but_soft_failure.append(encoding_iana) + logger.log( + TRACE, + "%s was excluded because of initial chaos probing. Gave up %i time(s). " + "Computed mean chaos is %f %%.", + encoding_iana, + early_stop_count, + round(mean_mess_ratio * 100, ndigits=3), + ) + # Preparing those fallbacks in case we got nothing. + if ( + enable_fallback + and encoding_iana in ["ascii", "utf_8", specified_encoding] + and not lazy_str_hard_failure + ): + fallback_entry = CharsetMatch( + sequences, encoding_iana, threshold, False, [], decoded_payload + ) + if encoding_iana == specified_encoding: + fallback_specified = fallback_entry + elif encoding_iana == "ascii": + fallback_ascii = fallback_entry + else: + fallback_u8 = fallback_entry + continue + + logger.log( + TRACE, + "%s passed initial chaos probing. Mean measured chaos is %f %%", + encoding_iana, + round(mean_mess_ratio * 100, ndigits=3), + ) + + if not is_multi_byte_decoder: + target_languages: List[str] = encoding_languages(encoding_iana) + else: + target_languages = mb_encoding_languages(encoding_iana) + + if target_languages: + logger.log( + TRACE, + "{} should target any language(s) of {}".format( + encoding_iana, str(target_languages) + ), + ) + + cd_ratios = [] + + # We shall skip the CD when its about ASCII + # Most of the time its not relevant to run "language-detection" on it. + if encoding_iana != "ascii": + for chunk in md_chunks: + chunk_languages = coherence_ratio( + chunk, + language_threshold, + ",".join(target_languages) if target_languages else None, + ) + + cd_ratios.append(chunk_languages) + + cd_ratios_merged = merge_coherence_ratios(cd_ratios) + + if cd_ratios_merged: + logger.log( + TRACE, + "We detected language {} using {}".format( + cd_ratios_merged, encoding_iana + ), + ) + + results.append( + CharsetMatch( + sequences, + encoding_iana, + mean_mess_ratio, + bom_or_sig_available, + cd_ratios_merged, + decoded_payload, + ) + ) + + if ( + encoding_iana in [specified_encoding, "ascii", "utf_8"] + and mean_mess_ratio < 0.1 + ): + logger.debug( + "Encoding detection: %s is most likely the one.", encoding_iana + ) + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + return CharsetMatches([results[encoding_iana]]) + + if encoding_iana == sig_encoding: + logger.debug( + "Encoding detection: %s is most likely the one as we detected a BOM or SIG within " + "the beginning of the sequence.", + encoding_iana, + ) + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + return CharsetMatches([results[encoding_iana]]) + + if len(results) == 0: + if fallback_u8 or fallback_ascii or fallback_specified: + logger.log( + TRACE, + "Nothing got out of the detection process. Using ASCII/UTF-8/Specified fallback.", + ) + + if fallback_specified: + logger.debug( + "Encoding detection: %s will be used as a fallback match", + fallback_specified.encoding, + ) + results.append(fallback_specified) + elif ( + (fallback_u8 and fallback_ascii is None) + or ( + fallback_u8 + and fallback_ascii + and fallback_u8.fingerprint != fallback_ascii.fingerprint + ) + or (fallback_u8 is not None) + ): + logger.debug("Encoding detection: utf_8 will be used as a fallback match") + results.append(fallback_u8) + elif fallback_ascii: + logger.debug("Encoding detection: ascii will be used as a fallback match") + results.append(fallback_ascii) + + if results: + logger.debug( + "Encoding detection: Found %s as plausible (best-candidate) for content. With %i alternatives.", + results.best().encoding, # type: ignore + len(results) - 1, + ) + else: + logger.debug("Encoding detection: Unable to determine any suitable charset.") + + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + + return results + + +def from_fp( + fp: BinaryIO, + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: Optional[List[str]] = None, + cp_exclusion: Optional[List[str]] = None, + preemptive_behaviour: bool = True, + explain: bool = False, + language_threshold: float = 0.1, + enable_fallback: bool = True, +) -> CharsetMatches: + """ + Same thing than the function from_bytes but using a file pointer that is already ready. + Will not close the file pointer. + """ + return from_bytes( + fp.read(), + steps, + chunk_size, + threshold, + cp_isolation, + cp_exclusion, + preemptive_behaviour, + explain, + language_threshold, + enable_fallback, + ) + + +def from_path( + path: Union[str, bytes, PathLike], # type: ignore[type-arg] + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: Optional[List[str]] = None, + cp_exclusion: Optional[List[str]] = None, + preemptive_behaviour: bool = True, + explain: bool = False, + language_threshold: float = 0.1, + enable_fallback: bool = True, +) -> CharsetMatches: + """ + Same thing than the function from_bytes but with one extra step. Opening and reading given file path in binary mode. + Can raise IOError. + """ + with open(path, "rb") as fp: + return from_fp( + fp, + steps, + chunk_size, + threshold, + cp_isolation, + cp_exclusion, + preemptive_behaviour, + explain, + language_threshold, + enable_fallback, + ) + + +def is_binary( + fp_or_path_or_payload: Union[PathLike, str, BinaryIO, bytes], # type: ignore[type-arg] + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: Optional[List[str]] = None, + cp_exclusion: Optional[List[str]] = None, + preemptive_behaviour: bool = True, + explain: bool = False, + language_threshold: float = 0.1, + enable_fallback: bool = False, +) -> bool: + """ + Detect if the given input (file, bytes, or path) points to a binary file. aka. not a string. + Based on the same main heuristic algorithms and default kwargs at the sole exception that fallbacks match + are disabled to be stricter around ASCII-compatible but unlikely to be a string. + """ + if isinstance(fp_or_path_or_payload, (str, PathLike)): + guesses = from_path( + fp_or_path_or_payload, + steps=steps, + chunk_size=chunk_size, + threshold=threshold, + cp_isolation=cp_isolation, + cp_exclusion=cp_exclusion, + preemptive_behaviour=preemptive_behaviour, + explain=explain, + language_threshold=language_threshold, + enable_fallback=enable_fallback, + ) + elif isinstance( + fp_or_path_or_payload, + ( + bytes, + bytearray, + ), + ): + guesses = from_bytes( + fp_or_path_or_payload, + steps=steps, + chunk_size=chunk_size, + threshold=threshold, + cp_isolation=cp_isolation, + cp_exclusion=cp_exclusion, + preemptive_behaviour=preemptive_behaviour, + explain=explain, + language_threshold=language_threshold, + enable_fallback=enable_fallback, + ) + else: + guesses = from_fp( + fp_or_path_or_payload, + steps=steps, + chunk_size=chunk_size, + threshold=threshold, + cp_isolation=cp_isolation, + cp_exclusion=cp_exclusion, + preemptive_behaviour=preemptive_behaviour, + explain=explain, + language_threshold=language_threshold, + enable_fallback=enable_fallback, + ) + + return not guesses diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/cd.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/cd.py new file mode 100644 index 000000000..4ea6760c4 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/cd.py @@ -0,0 +1,395 @@ +import importlib +from codecs import IncrementalDecoder +from collections import Counter +from functools import lru_cache +from typing import Counter as TypeCounter, Dict, List, Optional, Tuple + +from .constant import ( + FREQUENCIES, + KO_NAMES, + LANGUAGE_SUPPORTED_COUNT, + TOO_SMALL_SEQUENCE, + ZH_NAMES, +) +from .md import is_suspiciously_successive_range +from .models import CoherenceMatches +from .utils import ( + is_accentuated, + is_latin, + is_multi_byte_encoding, + is_unicode_range_secondary, + unicode_range, +) + + +def encoding_unicode_range(iana_name: str) -> List[str]: + """ + Return associated unicode ranges in a single byte code page. + """ + if is_multi_byte_encoding(iana_name): + raise IOError("Function not supported on multi-byte code page") + + decoder = importlib.import_module( + "encodings.{}".format(iana_name) + ).IncrementalDecoder + + p: IncrementalDecoder = decoder(errors="ignore") + seen_ranges: Dict[str, int] = {} + character_count: int = 0 + + for i in range(0x40, 0xFF): + chunk: str = p.decode(bytes([i])) + + if chunk: + character_range: Optional[str] = unicode_range(chunk) + + if character_range is None: + continue + + if is_unicode_range_secondary(character_range) is False: + if character_range not in seen_ranges: + seen_ranges[character_range] = 0 + seen_ranges[character_range] += 1 + character_count += 1 + + return sorted( + [ + character_range + for character_range in seen_ranges + if seen_ranges[character_range] / character_count >= 0.15 + ] + ) + + +def unicode_range_languages(primary_range: str) -> List[str]: + """ + Return inferred languages used with a unicode range. + """ + languages: List[str] = [] + + for language, characters in FREQUENCIES.items(): + for character in characters: + if unicode_range(character) == primary_range: + languages.append(language) + break + + return languages + + +@lru_cache() +def encoding_languages(iana_name: str) -> List[str]: + """ + Single-byte encoding language association. Some code page are heavily linked to particular language(s). + This function does the correspondence. + """ + unicode_ranges: List[str] = encoding_unicode_range(iana_name) + primary_range: Optional[str] = None + + for specified_range in unicode_ranges: + if "Latin" not in specified_range: + primary_range = specified_range + break + + if primary_range is None: + return ["Latin Based"] + + return unicode_range_languages(primary_range) + + +@lru_cache() +def mb_encoding_languages(iana_name: str) -> List[str]: + """ + Multi-byte encoding language association. Some code page are heavily linked to particular language(s). + This function does the correspondence. + """ + if ( + iana_name.startswith("shift_") + or iana_name.startswith("iso2022_jp") + or iana_name.startswith("euc_j") + or iana_name == "cp932" + ): + return ["Japanese"] + if iana_name.startswith("gb") or iana_name in ZH_NAMES: + return ["Chinese"] + if iana_name.startswith("iso2022_kr") or iana_name in KO_NAMES: + return ["Korean"] + + return [] + + +@lru_cache(maxsize=LANGUAGE_SUPPORTED_COUNT) +def get_target_features(language: str) -> Tuple[bool, bool]: + """ + Determine main aspects from a supported language if it contains accents and if is pure Latin. + """ + target_have_accents: bool = False + target_pure_latin: bool = True + + for character in FREQUENCIES[language]: + if not target_have_accents and is_accentuated(character): + target_have_accents = True + if target_pure_latin and is_latin(character) is False: + target_pure_latin = False + + return target_have_accents, target_pure_latin + + +def alphabet_languages( + characters: List[str], ignore_non_latin: bool = False +) -> List[str]: + """ + Return associated languages associated to given characters. + """ + languages: List[Tuple[str, float]] = [] + + source_have_accents = any(is_accentuated(character) for character in characters) + + for language, language_characters in FREQUENCIES.items(): + target_have_accents, target_pure_latin = get_target_features(language) + + if ignore_non_latin and target_pure_latin is False: + continue + + if target_have_accents is False and source_have_accents: + continue + + character_count: int = len(language_characters) + + character_match_count: int = len( + [c for c in language_characters if c in characters] + ) + + ratio: float = character_match_count / character_count + + if ratio >= 0.2: + languages.append((language, ratio)) + + languages = sorted(languages, key=lambda x: x[1], reverse=True) + + return [compatible_language[0] for compatible_language in languages] + + +def characters_popularity_compare( + language: str, ordered_characters: List[str] +) -> float: + """ + Determine if a ordered characters list (by occurrence from most appearance to rarest) match a particular language. + The result is a ratio between 0. (absolutely no correspondence) and 1. (near perfect fit). + Beware that is function is not strict on the match in order to ease the detection. (Meaning close match is 1.) + """ + if language not in FREQUENCIES: + raise ValueError("{} not available".format(language)) + + character_approved_count: int = 0 + FREQUENCIES_language_set = set(FREQUENCIES[language]) + + ordered_characters_count: int = len(ordered_characters) + target_language_characters_count: int = len(FREQUENCIES[language]) + + large_alphabet: bool = target_language_characters_count > 26 + + for character, character_rank in zip( + ordered_characters, range(0, ordered_characters_count) + ): + if character not in FREQUENCIES_language_set: + continue + + character_rank_in_language: int = FREQUENCIES[language].index(character) + expected_projection_ratio: float = ( + target_language_characters_count / ordered_characters_count + ) + character_rank_projection: int = int(character_rank * expected_projection_ratio) + + if ( + large_alphabet is False + and abs(character_rank_projection - character_rank_in_language) > 4 + ): + continue + + if ( + large_alphabet is True + and abs(character_rank_projection - character_rank_in_language) + < target_language_characters_count / 3 + ): + character_approved_count += 1 + continue + + characters_before_source: List[str] = FREQUENCIES[language][ + 0:character_rank_in_language + ] + characters_after_source: List[str] = FREQUENCIES[language][ + character_rank_in_language: + ] + characters_before: List[str] = ordered_characters[0:character_rank] + characters_after: List[str] = ordered_characters[character_rank:] + + before_match_count: int = len( + set(characters_before) & set(characters_before_source) + ) + + after_match_count: int = len( + set(characters_after) & set(characters_after_source) + ) + + if len(characters_before_source) == 0 and before_match_count <= 4: + character_approved_count += 1 + continue + + if len(characters_after_source) == 0 and after_match_count <= 4: + character_approved_count += 1 + continue + + if ( + before_match_count / len(characters_before_source) >= 0.4 + or after_match_count / len(characters_after_source) >= 0.4 + ): + character_approved_count += 1 + continue + + return character_approved_count / len(ordered_characters) + + +def alpha_unicode_split(decoded_sequence: str) -> List[str]: + """ + Given a decoded text sequence, return a list of str. Unicode range / alphabet separation. + Ex. a text containing English/Latin with a bit a Hebrew will return two items in the resulting list; + One containing the latin letters and the other hebrew. + """ + layers: Dict[str, str] = {} + + for character in decoded_sequence: + if character.isalpha() is False: + continue + + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + continue + + layer_target_range: Optional[str] = None + + for discovered_range in layers: + if ( + is_suspiciously_successive_range(discovered_range, character_range) + is False + ): + layer_target_range = discovered_range + break + + if layer_target_range is None: + layer_target_range = character_range + + if layer_target_range not in layers: + layers[layer_target_range] = character.lower() + continue + + layers[layer_target_range] += character.lower() + + return list(layers.values()) + + +def merge_coherence_ratios(results: List[CoherenceMatches]) -> CoherenceMatches: + """ + This function merge results previously given by the function coherence_ratio. + The return type is the same as coherence_ratio. + """ + per_language_ratios: Dict[str, List[float]] = {} + for result in results: + for sub_result in result: + language, ratio = sub_result + if language not in per_language_ratios: + per_language_ratios[language] = [ratio] + continue + per_language_ratios[language].append(ratio) + + merge = [ + ( + language, + round( + sum(per_language_ratios[language]) / len(per_language_ratios[language]), + 4, + ), + ) + for language in per_language_ratios + ] + + return sorted(merge, key=lambda x: x[1], reverse=True) + + +def filter_alt_coherence_matches(results: CoherenceMatches) -> CoherenceMatches: + """ + We shall NOT return "English—" in CoherenceMatches because it is an alternative + of "English". This function only keeps the best match and remove the em-dash in it. + """ + index_results: Dict[str, List[float]] = dict() + + for result in results: + language, ratio = result + no_em_name: str = language.replace("—", "") + + if no_em_name not in index_results: + index_results[no_em_name] = [] + + index_results[no_em_name].append(ratio) + + if any(len(index_results[e]) > 1 for e in index_results): + filtered_results: CoherenceMatches = [] + + for language in index_results: + filtered_results.append((language, max(index_results[language]))) + + return filtered_results + + return results + + +@lru_cache(maxsize=2048) +def coherence_ratio( + decoded_sequence: str, threshold: float = 0.1, lg_inclusion: Optional[str] = None +) -> CoherenceMatches: + """ + Detect ANY language that can be identified in given sequence. The sequence will be analysed by layers. + A layer = Character extraction by alphabets/ranges. + """ + + results: List[Tuple[str, float]] = [] + ignore_non_latin: bool = False + + sufficient_match_count: int = 0 + + lg_inclusion_list = lg_inclusion.split(",") if lg_inclusion is not None else [] + if "Latin Based" in lg_inclusion_list: + ignore_non_latin = True + lg_inclusion_list.remove("Latin Based") + + for layer in alpha_unicode_split(decoded_sequence): + sequence_frequencies: TypeCounter[str] = Counter(layer) + most_common = sequence_frequencies.most_common() + + character_count: int = sum(o for c, o in most_common) + + if character_count <= TOO_SMALL_SEQUENCE: + continue + + popular_character_ordered: List[str] = [c for c, o in most_common] + + for language in lg_inclusion_list or alphabet_languages( + popular_character_ordered, ignore_non_latin + ): + ratio: float = characters_popularity_compare( + language, popular_character_ordered + ) + + if ratio < threshold: + continue + elif ratio >= 0.8: + sufficient_match_count += 1 + + results.append((language, round(ratio, 4))) + + if sufficient_match_count >= 3: + break + + return sorted( + filter_alt_coherence_matches(results), key=lambda x: x[1], reverse=True + ) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/cli/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/cli/__init__.py new file mode 100644 index 000000000..d95fedfe5 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/cli/__init__.py @@ -0,0 +1,6 @@ +from .__main__ import cli_detect, query_yes_no + +__all__ = ( + "cli_detect", + "query_yes_no", +) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/cli/__main__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/cli/__main__.py new file mode 100644 index 000000000..f4bcbaac0 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/cli/__main__.py @@ -0,0 +1,296 @@ +import argparse +import sys +from json import dumps +from os.path import abspath, basename, dirname, join, realpath +from platform import python_version +from typing import List, Optional +from unicodedata import unidata_version + +import charset_normalizer.md as md_module +from charset_normalizer import from_fp +from charset_normalizer.models import CliDetectionResult +from charset_normalizer.version import __version__ + + +def query_yes_no(question: str, default: str = "yes") -> bool: + """Ask a yes/no question via input() and return their answer. + + "question" is a string that is presented to the user. + "default" is the presumed answer if the user just hits . + It must be "yes" (the default), "no" or None (meaning + an answer is required of the user). + + The "answer" return value is True for "yes" or False for "no". + + Credit goes to (c) https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input + """ + valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False} + if default is None: + prompt = " [y/n] " + elif default == "yes": + prompt = " [Y/n] " + elif default == "no": + prompt = " [y/N] " + else: + raise ValueError("invalid default answer: '%s'" % default) + + while True: + sys.stdout.write(question + prompt) + choice = input().lower() + if default is not None and choice == "": + return valid[default] + elif choice in valid: + return valid[choice] + else: + sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n") + + +def cli_detect(argv: Optional[List[str]] = None) -> int: + """ + CLI assistant using ARGV and ArgumentParser + :param argv: + :return: 0 if everything is fine, anything else equal trouble + """ + parser = argparse.ArgumentParser( + description="The Real First Universal Charset Detector. " + "Discover originating encoding used on text file. " + "Normalize text to unicode." + ) + + parser.add_argument( + "files", type=argparse.FileType("rb"), nargs="+", help="File(s) to be analysed" + ) + parser.add_argument( + "-v", + "--verbose", + action="store_true", + default=False, + dest="verbose", + help="Display complementary information about file if any. " + "Stdout will contain logs about the detection process.", + ) + parser.add_argument( + "-a", + "--with-alternative", + action="store_true", + default=False, + dest="alternatives", + help="Output complementary possibilities if any. Top-level JSON WILL be a list.", + ) + parser.add_argument( + "-n", + "--normalize", + action="store_true", + default=False, + dest="normalize", + help="Permit to normalize input file. If not set, program does not write anything.", + ) + parser.add_argument( + "-m", + "--minimal", + action="store_true", + default=False, + dest="minimal", + help="Only output the charset detected to STDOUT. Disabling JSON output.", + ) + parser.add_argument( + "-r", + "--replace", + action="store_true", + default=False, + dest="replace", + help="Replace file when trying to normalize it instead of creating a new one.", + ) + parser.add_argument( + "-f", + "--force", + action="store_true", + default=False, + dest="force", + help="Replace file without asking if you are sure, use this flag with caution.", + ) + parser.add_argument( + "-t", + "--threshold", + action="store", + default=0.2, + type=float, + dest="threshold", + help="Define a custom maximum amount of chaos allowed in decoded content. 0. <= chaos <= 1.", + ) + parser.add_argument( + "--version", + action="version", + version="Charset-Normalizer {} - Python {} - Unicode {} - SpeedUp {}".format( + __version__, + python_version(), + unidata_version, + "OFF" if md_module.__file__.lower().endswith(".py") else "ON", + ), + help="Show version information and exit.", + ) + + args = parser.parse_args(argv) + + if args.replace is True and args.normalize is False: + print("Use --replace in addition of --normalize only.", file=sys.stderr) + return 1 + + if args.force is True and args.replace is False: + print("Use --force in addition of --replace only.", file=sys.stderr) + return 1 + + if args.threshold < 0.0 or args.threshold > 1.0: + print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr) + return 1 + + x_ = [] + + for my_file in args.files: + matches = from_fp(my_file, threshold=args.threshold, explain=args.verbose) + + best_guess = matches.best() + + if best_guess is None: + print( + 'Unable to identify originating encoding for "{}". {}'.format( + my_file.name, + "Maybe try increasing maximum amount of chaos." + if args.threshold < 1.0 + else "", + ), + file=sys.stderr, + ) + x_.append( + CliDetectionResult( + abspath(my_file.name), + None, + [], + [], + "Unknown", + [], + False, + 1.0, + 0.0, + None, + True, + ) + ) + else: + x_.append( + CliDetectionResult( + abspath(my_file.name), + best_guess.encoding, + best_guess.encoding_aliases, + [ + cp + for cp in best_guess.could_be_from_charset + if cp != best_guess.encoding + ], + best_guess.language, + best_guess.alphabets, + best_guess.bom, + best_guess.percent_chaos, + best_guess.percent_coherence, + None, + True, + ) + ) + + if len(matches) > 1 and args.alternatives: + for el in matches: + if el != best_guess: + x_.append( + CliDetectionResult( + abspath(my_file.name), + el.encoding, + el.encoding_aliases, + [ + cp + for cp in el.could_be_from_charset + if cp != el.encoding + ], + el.language, + el.alphabets, + el.bom, + el.percent_chaos, + el.percent_coherence, + None, + False, + ) + ) + + if args.normalize is True: + if best_guess.encoding.startswith("utf") is True: + print( + '"{}" file does not need to be normalized, as it already came from unicode.'.format( + my_file.name + ), + file=sys.stderr, + ) + if my_file.closed is False: + my_file.close() + continue + + dir_path = dirname(realpath(my_file.name)) + file_name = basename(realpath(my_file.name)) + + o_: List[str] = file_name.split(".") + + if args.replace is False: + o_.insert(-1, best_guess.encoding) + if my_file.closed is False: + my_file.close() + elif ( + args.force is False + and query_yes_no( + 'Are you sure to normalize "{}" by replacing it ?'.format( + my_file.name + ), + "no", + ) + is False + ): + if my_file.closed is False: + my_file.close() + continue + + try: + x_[0].unicode_path = join(dir_path, ".".join(o_)) + + with open(x_[0].unicode_path, "w", encoding="utf-8") as fp: + fp.write(str(best_guess)) + except IOError as e: + print(str(e), file=sys.stderr) + if my_file.closed is False: + my_file.close() + return 2 + + if my_file.closed is False: + my_file.close() + + if args.minimal is False: + print( + dumps( + [el.__dict__ for el in x_] if len(x_) > 1 else x_[0].__dict__, + ensure_ascii=True, + indent=4, + ) + ) + else: + for my_file in args.files: + print( + ", ".join( + [ + el.encoding or "undefined" + for el in x_ + if el.path == abspath(my_file.name) + ] + ) + ) + + return 0 + + +if __name__ == "__main__": + cli_detect() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/constant.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/constant.py new file mode 100644 index 000000000..863490461 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/constant.py @@ -0,0 +1,1995 @@ +# -*- coding: utf-8 -*- +from codecs import BOM_UTF8, BOM_UTF16_BE, BOM_UTF16_LE, BOM_UTF32_BE, BOM_UTF32_LE +from encodings.aliases import aliases +from re import IGNORECASE, compile as re_compile +from typing import Dict, List, Set, Union + +# Contain for each eligible encoding a list of/item bytes SIG/BOM +ENCODING_MARKS: Dict[str, Union[bytes, List[bytes]]] = { + "utf_8": BOM_UTF8, + "utf_7": [ + b"\x2b\x2f\x76\x38", + b"\x2b\x2f\x76\x39", + b"\x2b\x2f\x76\x2b", + b"\x2b\x2f\x76\x2f", + b"\x2b\x2f\x76\x38\x2d", + ], + "gb18030": b"\x84\x31\x95\x33", + "utf_32": [BOM_UTF32_BE, BOM_UTF32_LE], + "utf_16": [BOM_UTF16_BE, BOM_UTF16_LE], +} + +TOO_SMALL_SEQUENCE: int = 32 +TOO_BIG_SEQUENCE: int = int(10e6) + +UTF8_MAXIMAL_ALLOCATION: int = 1_112_064 + +# Up-to-date Unicode ucd/15.0.0 +UNICODE_RANGES_COMBINED: Dict[str, range] = { + "Control character": range(32), + "Basic Latin": range(32, 128), + "Latin-1 Supplement": range(128, 256), + "Latin Extended-A": range(256, 384), + "Latin Extended-B": range(384, 592), + "IPA Extensions": range(592, 688), + "Spacing Modifier Letters": range(688, 768), + "Combining Diacritical Marks": range(768, 880), + "Greek and Coptic": range(880, 1024), + "Cyrillic": range(1024, 1280), + "Cyrillic Supplement": range(1280, 1328), + "Armenian": range(1328, 1424), + "Hebrew": range(1424, 1536), + "Arabic": range(1536, 1792), + "Syriac": range(1792, 1872), + "Arabic Supplement": range(1872, 1920), + "Thaana": range(1920, 1984), + "NKo": range(1984, 2048), + "Samaritan": range(2048, 2112), + "Mandaic": range(2112, 2144), + "Syriac Supplement": range(2144, 2160), + "Arabic Extended-B": range(2160, 2208), + "Arabic Extended-A": range(2208, 2304), + "Devanagari": range(2304, 2432), + "Bengali": range(2432, 2560), + "Gurmukhi": range(2560, 2688), + "Gujarati": range(2688, 2816), + "Oriya": range(2816, 2944), + "Tamil": range(2944, 3072), + "Telugu": range(3072, 3200), + "Kannada": range(3200, 3328), + "Malayalam": range(3328, 3456), + "Sinhala": range(3456, 3584), + "Thai": range(3584, 3712), + "Lao": range(3712, 3840), + "Tibetan": range(3840, 4096), + "Myanmar": range(4096, 4256), + "Georgian": range(4256, 4352), + "Hangul Jamo": range(4352, 4608), + "Ethiopic": range(4608, 4992), + "Ethiopic Supplement": range(4992, 5024), + "Cherokee": range(5024, 5120), + "Unified Canadian Aboriginal Syllabics": range(5120, 5760), + "Ogham": range(5760, 5792), + "Runic": range(5792, 5888), + "Tagalog": range(5888, 5920), + "Hanunoo": range(5920, 5952), + "Buhid": range(5952, 5984), + "Tagbanwa": range(5984, 6016), + "Khmer": range(6016, 6144), + "Mongolian": range(6144, 6320), + "Unified Canadian Aboriginal Syllabics Extended": range(6320, 6400), + "Limbu": range(6400, 6480), + "Tai Le": range(6480, 6528), + "New Tai Lue": range(6528, 6624), + "Khmer Symbols": range(6624, 6656), + "Buginese": range(6656, 6688), + "Tai Tham": range(6688, 6832), + "Combining Diacritical Marks Extended": range(6832, 6912), + "Balinese": range(6912, 7040), + "Sundanese": range(7040, 7104), + "Batak": range(7104, 7168), + "Lepcha": range(7168, 7248), + "Ol Chiki": range(7248, 7296), + "Cyrillic Extended-C": range(7296, 7312), + "Georgian Extended": range(7312, 7360), + "Sundanese Supplement": range(7360, 7376), + "Vedic Extensions": range(7376, 7424), + "Phonetic Extensions": range(7424, 7552), + "Phonetic Extensions Supplement": range(7552, 7616), + "Combining Diacritical Marks Supplement": range(7616, 7680), + "Latin Extended Additional": range(7680, 7936), + "Greek Extended": range(7936, 8192), + "General Punctuation": range(8192, 8304), + "Superscripts and Subscripts": range(8304, 8352), + "Currency Symbols": range(8352, 8400), + "Combining Diacritical Marks for Symbols": range(8400, 8448), + "Letterlike Symbols": range(8448, 8528), + "Number Forms": range(8528, 8592), + "Arrows": range(8592, 8704), + "Mathematical Operators": range(8704, 8960), + "Miscellaneous Technical": range(8960, 9216), + "Control Pictures": range(9216, 9280), + "Optical Character Recognition": range(9280, 9312), + "Enclosed Alphanumerics": range(9312, 9472), + "Box Drawing": range(9472, 9600), + "Block Elements": range(9600, 9632), + "Geometric Shapes": range(9632, 9728), + "Miscellaneous Symbols": range(9728, 9984), + "Dingbats": range(9984, 10176), + "Miscellaneous Mathematical Symbols-A": range(10176, 10224), + "Supplemental Arrows-A": range(10224, 10240), + "Braille Patterns": range(10240, 10496), + "Supplemental Arrows-B": range(10496, 10624), + "Miscellaneous Mathematical Symbols-B": range(10624, 10752), + "Supplemental Mathematical Operators": range(10752, 11008), + "Miscellaneous Symbols and Arrows": range(11008, 11264), + "Glagolitic": range(11264, 11360), + "Latin Extended-C": range(11360, 11392), + "Coptic": range(11392, 11520), + "Georgian Supplement": range(11520, 11568), + "Tifinagh": range(11568, 11648), + "Ethiopic Extended": range(11648, 11744), + "Cyrillic Extended-A": range(11744, 11776), + "Supplemental Punctuation": range(11776, 11904), + "CJK Radicals Supplement": range(11904, 12032), + "Kangxi Radicals": range(12032, 12256), + "Ideographic Description Characters": range(12272, 12288), + "CJK Symbols and Punctuation": range(12288, 12352), + "Hiragana": range(12352, 12448), + "Katakana": range(12448, 12544), + "Bopomofo": range(12544, 12592), + "Hangul Compatibility Jamo": range(12592, 12688), + "Kanbun": range(12688, 12704), + "Bopomofo Extended": range(12704, 12736), + "CJK Strokes": range(12736, 12784), + "Katakana Phonetic Extensions": range(12784, 12800), + "Enclosed CJK Letters and Months": range(12800, 13056), + "CJK Compatibility": range(13056, 13312), + "CJK Unified Ideographs Extension A": range(13312, 19904), + "Yijing Hexagram Symbols": range(19904, 19968), + "CJK Unified Ideographs": range(19968, 40960), + "Yi Syllables": range(40960, 42128), + "Yi Radicals": range(42128, 42192), + "Lisu": range(42192, 42240), + "Vai": range(42240, 42560), + "Cyrillic Extended-B": range(42560, 42656), + "Bamum": range(42656, 42752), + "Modifier Tone Letters": range(42752, 42784), + "Latin Extended-D": range(42784, 43008), + "Syloti Nagri": range(43008, 43056), + "Common Indic Number Forms": range(43056, 43072), + "Phags-pa": range(43072, 43136), + "Saurashtra": range(43136, 43232), + "Devanagari Extended": range(43232, 43264), + "Kayah Li": range(43264, 43312), + "Rejang": range(43312, 43360), + "Hangul Jamo Extended-A": range(43360, 43392), + "Javanese": range(43392, 43488), + "Myanmar Extended-B": range(43488, 43520), + "Cham": range(43520, 43616), + "Myanmar Extended-A": range(43616, 43648), + "Tai Viet": range(43648, 43744), + "Meetei Mayek Extensions": range(43744, 43776), + "Ethiopic Extended-A": range(43776, 43824), + "Latin Extended-E": range(43824, 43888), + "Cherokee Supplement": range(43888, 43968), + "Meetei Mayek": range(43968, 44032), + "Hangul Syllables": range(44032, 55216), + "Hangul Jamo Extended-B": range(55216, 55296), + "High Surrogates": range(55296, 56192), + "High Private Use Surrogates": range(56192, 56320), + "Low Surrogates": range(56320, 57344), + "Private Use Area": range(57344, 63744), + "CJK Compatibility Ideographs": range(63744, 64256), + "Alphabetic Presentation Forms": range(64256, 64336), + "Arabic Presentation Forms-A": range(64336, 65024), + "Variation Selectors": range(65024, 65040), + "Vertical Forms": range(65040, 65056), + "Combining Half Marks": range(65056, 65072), + "CJK Compatibility Forms": range(65072, 65104), + "Small Form Variants": range(65104, 65136), + "Arabic Presentation Forms-B": range(65136, 65280), + "Halfwidth and Fullwidth Forms": range(65280, 65520), + "Specials": range(65520, 65536), + "Linear B Syllabary": range(65536, 65664), + "Linear B Ideograms": range(65664, 65792), + "Aegean Numbers": range(65792, 65856), + "Ancient Greek Numbers": range(65856, 65936), + "Ancient Symbols": range(65936, 66000), + "Phaistos Disc": range(66000, 66048), + "Lycian": range(66176, 66208), + "Carian": range(66208, 66272), + "Coptic Epact Numbers": range(66272, 66304), + "Old Italic": range(66304, 66352), + "Gothic": range(66352, 66384), + "Old Permic": range(66384, 66432), + "Ugaritic": range(66432, 66464), + "Old Persian": range(66464, 66528), + "Deseret": range(66560, 66640), + "Shavian": range(66640, 66688), + "Osmanya": range(66688, 66736), + "Osage": range(66736, 66816), + "Elbasan": range(66816, 66864), + "Caucasian Albanian": range(66864, 66928), + "Vithkuqi": range(66928, 67008), + "Linear A": range(67072, 67456), + "Latin Extended-F": range(67456, 67520), + "Cypriot Syllabary": range(67584, 67648), + "Imperial Aramaic": range(67648, 67680), + "Palmyrene": range(67680, 67712), + "Nabataean": range(67712, 67760), + "Hatran": range(67808, 67840), + "Phoenician": range(67840, 67872), + "Lydian": range(67872, 67904), + "Meroitic Hieroglyphs": range(67968, 68000), + "Meroitic Cursive": range(68000, 68096), + "Kharoshthi": range(68096, 68192), + "Old South Arabian": range(68192, 68224), + "Old North Arabian": range(68224, 68256), + "Manichaean": range(68288, 68352), + "Avestan": range(68352, 68416), + "Inscriptional Parthian": range(68416, 68448), + "Inscriptional Pahlavi": range(68448, 68480), + "Psalter Pahlavi": range(68480, 68528), + "Old Turkic": range(68608, 68688), + "Old Hungarian": range(68736, 68864), + "Hanifi Rohingya": range(68864, 68928), + "Rumi Numeral Symbols": range(69216, 69248), + "Yezidi": range(69248, 69312), + "Arabic Extended-C": range(69312, 69376), + "Old Sogdian": range(69376, 69424), + "Sogdian": range(69424, 69488), + "Old Uyghur": range(69488, 69552), + "Chorasmian": range(69552, 69600), + "Elymaic": range(69600, 69632), + "Brahmi": range(69632, 69760), + "Kaithi": range(69760, 69840), + "Sora Sompeng": range(69840, 69888), + "Chakma": range(69888, 69968), + "Mahajani": range(69968, 70016), + "Sharada": range(70016, 70112), + "Sinhala Archaic Numbers": range(70112, 70144), + "Khojki": range(70144, 70224), + "Multani": range(70272, 70320), + "Khudawadi": range(70320, 70400), + "Grantha": range(70400, 70528), + "Newa": range(70656, 70784), + "Tirhuta": range(70784, 70880), + "Siddham": range(71040, 71168), + "Modi": range(71168, 71264), + "Mongolian Supplement": range(71264, 71296), + "Takri": range(71296, 71376), + "Ahom": range(71424, 71504), + "Dogra": range(71680, 71760), + "Warang Citi": range(71840, 71936), + "Dives Akuru": range(71936, 72032), + "Nandinagari": range(72096, 72192), + "Zanabazar Square": range(72192, 72272), + "Soyombo": range(72272, 72368), + "Unified Canadian Aboriginal Syllabics Extended-A": range(72368, 72384), + "Pau Cin Hau": range(72384, 72448), + "Devanagari Extended-A": range(72448, 72544), + "Bhaiksuki": range(72704, 72816), + "Marchen": range(72816, 72896), + "Masaram Gondi": range(72960, 73056), + "Gunjala Gondi": range(73056, 73136), + "Makasar": range(73440, 73472), + "Kawi": range(73472, 73568), + "Lisu Supplement": range(73648, 73664), + "Tamil Supplement": range(73664, 73728), + "Cuneiform": range(73728, 74752), + "Cuneiform Numbers and Punctuation": range(74752, 74880), + "Early Dynastic Cuneiform": range(74880, 75088), + "Cypro-Minoan": range(77712, 77824), + "Egyptian Hieroglyphs": range(77824, 78896), + "Egyptian Hieroglyph Format Controls": range(78896, 78944), + "Anatolian Hieroglyphs": range(82944, 83584), + "Bamum Supplement": range(92160, 92736), + "Mro": range(92736, 92784), + "Tangsa": range(92784, 92880), + "Bassa Vah": range(92880, 92928), + "Pahawh Hmong": range(92928, 93072), + "Medefaidrin": range(93760, 93856), + "Miao": range(93952, 94112), + "Ideographic Symbols and Punctuation": range(94176, 94208), + "Tangut": range(94208, 100352), + "Tangut Components": range(100352, 101120), + "Khitan Small Script": range(101120, 101632), + "Tangut Supplement": range(101632, 101760), + "Kana Extended-B": range(110576, 110592), + "Kana Supplement": range(110592, 110848), + "Kana Extended-A": range(110848, 110896), + "Small Kana Extension": range(110896, 110960), + "Nushu": range(110960, 111360), + "Duployan": range(113664, 113824), + "Shorthand Format Controls": range(113824, 113840), + "Znamenny Musical Notation": range(118528, 118736), + "Byzantine Musical Symbols": range(118784, 119040), + "Musical Symbols": range(119040, 119296), + "Ancient Greek Musical Notation": range(119296, 119376), + "Kaktovik Numerals": range(119488, 119520), + "Mayan Numerals": range(119520, 119552), + "Tai Xuan Jing Symbols": range(119552, 119648), + "Counting Rod Numerals": range(119648, 119680), + "Mathematical Alphanumeric Symbols": range(119808, 120832), + "Sutton SignWriting": range(120832, 121520), + "Latin Extended-G": range(122624, 122880), + "Glagolitic Supplement": range(122880, 122928), + "Cyrillic Extended-D": range(122928, 123024), + "Nyiakeng Puachue Hmong": range(123136, 123216), + "Toto": range(123536, 123584), + "Wancho": range(123584, 123648), + "Nag Mundari": range(124112, 124160), + "Ethiopic Extended-B": range(124896, 124928), + "Mende Kikakui": range(124928, 125152), + "Adlam": range(125184, 125280), + "Indic Siyaq Numbers": range(126064, 126144), + "Ottoman Siyaq Numbers": range(126208, 126288), + "Arabic Mathematical Alphabetic Symbols": range(126464, 126720), + "Mahjong Tiles": range(126976, 127024), + "Domino Tiles": range(127024, 127136), + "Playing Cards": range(127136, 127232), + "Enclosed Alphanumeric Supplement": range(127232, 127488), + "Enclosed Ideographic Supplement": range(127488, 127744), + "Miscellaneous Symbols and Pictographs": range(127744, 128512), + "Emoticons range(Emoji)": range(128512, 128592), + "Ornamental Dingbats": range(128592, 128640), + "Transport and Map Symbols": range(128640, 128768), + "Alchemical Symbols": range(128768, 128896), + "Geometric Shapes Extended": range(128896, 129024), + "Supplemental Arrows-C": range(129024, 129280), + "Supplemental Symbols and Pictographs": range(129280, 129536), + "Chess Symbols": range(129536, 129648), + "Symbols and Pictographs Extended-A": range(129648, 129792), + "Symbols for Legacy Computing": range(129792, 130048), + "CJK Unified Ideographs Extension B": range(131072, 173792), + "CJK Unified Ideographs Extension C": range(173824, 177984), + "CJK Unified Ideographs Extension D": range(177984, 178208), + "CJK Unified Ideographs Extension E": range(178208, 183984), + "CJK Unified Ideographs Extension F": range(183984, 191472), + "CJK Compatibility Ideographs Supplement": range(194560, 195104), + "CJK Unified Ideographs Extension G": range(196608, 201552), + "CJK Unified Ideographs Extension H": range(201552, 205744), + "Tags": range(917504, 917632), + "Variation Selectors Supplement": range(917760, 918000), + "Supplementary Private Use Area-A": range(983040, 1048576), + "Supplementary Private Use Area-B": range(1048576, 1114112), +} + + +UNICODE_SECONDARY_RANGE_KEYWORD: List[str] = [ + "Supplement", + "Extended", + "Extensions", + "Modifier", + "Marks", + "Punctuation", + "Symbols", + "Forms", + "Operators", + "Miscellaneous", + "Drawing", + "Block", + "Shapes", + "Supplemental", + "Tags", +] + +RE_POSSIBLE_ENCODING_INDICATION = re_compile( + r"(?:(?:encoding)|(?:charset)|(?:coding))(?:[\:= ]{1,10})(?:[\"\']?)([a-zA-Z0-9\-_]+)(?:[\"\']?)", + IGNORECASE, +) + +IANA_NO_ALIASES = [ + "cp720", + "cp737", + "cp856", + "cp874", + "cp875", + "cp1006", + "koi8_r", + "koi8_t", + "koi8_u", +] + +IANA_SUPPORTED: List[str] = sorted( + filter( + lambda x: x.endswith("_codec") is False + and x not in {"rot_13", "tactis", "mbcs"}, + list(set(aliases.values())) + IANA_NO_ALIASES, + ) +) + +IANA_SUPPORTED_COUNT: int = len(IANA_SUPPORTED) + +# pre-computed code page that are similar using the function cp_similarity. +IANA_SUPPORTED_SIMILAR: Dict[str, List[str]] = { + "cp037": ["cp1026", "cp1140", "cp273", "cp500"], + "cp1026": ["cp037", "cp1140", "cp273", "cp500"], + "cp1125": ["cp866"], + "cp1140": ["cp037", "cp1026", "cp273", "cp500"], + "cp1250": ["iso8859_2"], + "cp1251": ["kz1048", "ptcp154"], + "cp1252": ["iso8859_15", "iso8859_9", "latin_1"], + "cp1253": ["iso8859_7"], + "cp1254": ["iso8859_15", "iso8859_9", "latin_1"], + "cp1257": ["iso8859_13"], + "cp273": ["cp037", "cp1026", "cp1140", "cp500"], + "cp437": ["cp850", "cp858", "cp860", "cp861", "cp862", "cp863", "cp865"], + "cp500": ["cp037", "cp1026", "cp1140", "cp273"], + "cp850": ["cp437", "cp857", "cp858", "cp865"], + "cp857": ["cp850", "cp858", "cp865"], + "cp858": ["cp437", "cp850", "cp857", "cp865"], + "cp860": ["cp437", "cp861", "cp862", "cp863", "cp865"], + "cp861": ["cp437", "cp860", "cp862", "cp863", "cp865"], + "cp862": ["cp437", "cp860", "cp861", "cp863", "cp865"], + "cp863": ["cp437", "cp860", "cp861", "cp862", "cp865"], + "cp865": ["cp437", "cp850", "cp857", "cp858", "cp860", "cp861", "cp862", "cp863"], + "cp866": ["cp1125"], + "iso8859_10": ["iso8859_14", "iso8859_15", "iso8859_4", "iso8859_9", "latin_1"], + "iso8859_11": ["tis_620"], + "iso8859_13": ["cp1257"], + "iso8859_14": [ + "iso8859_10", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_15": [ + "cp1252", + "cp1254", + "iso8859_10", + "iso8859_14", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_16": [ + "iso8859_14", + "iso8859_15", + "iso8859_2", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_2": ["cp1250", "iso8859_16", "iso8859_4"], + "iso8859_3": ["iso8859_14", "iso8859_15", "iso8859_16", "iso8859_9", "latin_1"], + "iso8859_4": ["iso8859_10", "iso8859_2", "iso8859_9", "latin_1"], + "iso8859_7": ["cp1253"], + "iso8859_9": [ + "cp1252", + "cp1254", + "cp1258", + "iso8859_10", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_4", + "latin_1", + ], + "kz1048": ["cp1251", "ptcp154"], + "latin_1": [ + "cp1252", + "cp1254", + "cp1258", + "iso8859_10", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_4", + "iso8859_9", + ], + "mac_iceland": ["mac_roman", "mac_turkish"], + "mac_roman": ["mac_iceland", "mac_turkish"], + "mac_turkish": ["mac_iceland", "mac_roman"], + "ptcp154": ["cp1251", "kz1048"], + "tis_620": ["iso8859_11"], +} + + +CHARDET_CORRESPONDENCE: Dict[str, str] = { + "iso2022_kr": "ISO-2022-KR", + "iso2022_jp": "ISO-2022-JP", + "euc_kr": "EUC-KR", + "tis_620": "TIS-620", + "utf_32": "UTF-32", + "euc_jp": "EUC-JP", + "koi8_r": "KOI8-R", + "iso8859_1": "ISO-8859-1", + "iso8859_2": "ISO-8859-2", + "iso8859_5": "ISO-8859-5", + "iso8859_6": "ISO-8859-6", + "iso8859_7": "ISO-8859-7", + "iso8859_8": "ISO-8859-8", + "utf_16": "UTF-16", + "cp855": "IBM855", + "mac_cyrillic": "MacCyrillic", + "gb2312": "GB2312", + "gb18030": "GB18030", + "cp932": "CP932", + "cp866": "IBM866", + "utf_8": "utf-8", + "utf_8_sig": "UTF-8-SIG", + "shift_jis": "SHIFT_JIS", + "big5": "Big5", + "cp1250": "windows-1250", + "cp1251": "windows-1251", + "cp1252": "Windows-1252", + "cp1253": "windows-1253", + "cp1255": "windows-1255", + "cp1256": "windows-1256", + "cp1254": "Windows-1254", + "cp949": "CP949", +} + + +COMMON_SAFE_ASCII_CHARACTERS: Set[str] = { + "<", + ">", + "=", + ":", + "/", + "&", + ";", + "{", + "}", + "[", + "]", + ",", + "|", + '"', + "-", +} + + +KO_NAMES: Set[str] = {"johab", "cp949", "euc_kr"} +ZH_NAMES: Set[str] = {"big5", "cp950", "big5hkscs", "hz"} + +# Logging LEVEL below DEBUG +TRACE: int = 5 + + +# Language label that contain the em dash "—" +# character are to be considered alternative seq to origin +FREQUENCIES: Dict[str, List[str]] = { + "English": [ + "e", + "a", + "t", + "i", + "o", + "n", + "s", + "r", + "h", + "l", + "d", + "c", + "u", + "m", + "f", + "p", + "g", + "w", + "y", + "b", + "v", + "k", + "x", + "j", + "z", + "q", + ], + "English—": [ + "e", + "a", + "t", + "i", + "o", + "n", + "s", + "r", + "h", + "l", + "d", + "c", + "m", + "u", + "f", + "p", + "g", + "w", + "b", + "y", + "v", + "k", + "j", + "x", + "z", + "q", + ], + "German": [ + "e", + "n", + "i", + "r", + "s", + "t", + "a", + "d", + "h", + "u", + "l", + "g", + "o", + "c", + "m", + "b", + "f", + "k", + "w", + "z", + "p", + "v", + "ü", + "ä", + "ö", + "j", + ], + "French": [ + "e", + "a", + "s", + "n", + "i", + "t", + "r", + "l", + "u", + "o", + "d", + "c", + "p", + "m", + "é", + "v", + "g", + "f", + "b", + "h", + "q", + "à", + "x", + "è", + "y", + "j", + ], + "Dutch": [ + "e", + "n", + "a", + "i", + "r", + "t", + "o", + "d", + "s", + "l", + "g", + "h", + "v", + "m", + "u", + "k", + "c", + "p", + "b", + "w", + "j", + "z", + "f", + "y", + "x", + "ë", + ], + "Italian": [ + "e", + "i", + "a", + "o", + "n", + "l", + "t", + "r", + "s", + "c", + "d", + "u", + "p", + "m", + "g", + "v", + "f", + "b", + "z", + "h", + "q", + "è", + "à", + "k", + "y", + "ò", + ], + "Polish": [ + "a", + "i", + "o", + "e", + "n", + "r", + "z", + "w", + "s", + "c", + "t", + "k", + "y", + "d", + "p", + "m", + "u", + "l", + "j", + "ł", + "g", + "b", + "h", + "ą", + "ę", + "ó", + ], + "Spanish": [ + "e", + "a", + "o", + "n", + "s", + "r", + "i", + "l", + "d", + "t", + "c", + "u", + "m", + "p", + "b", + "g", + "v", + "f", + "y", + "ó", + "h", + "q", + "í", + "j", + "z", + "á", + ], + "Russian": [ + "о", + "а", + "е", + "и", + "н", + "с", + "т", + "р", + "в", + "л", + "к", + "м", + "д", + "п", + "у", + "г", + "я", + "ы", + "з", + "б", + "й", + "ь", + "ч", + "х", + "ж", + "ц", + ], + # Jap-Kanji + "Japanese": [ + "人", + "一", + "大", + "亅", + "丁", + "丨", + "竹", + "笑", + "口", + "日", + "今", + "二", + "彳", + "行", + "十", + "土", + "丶", + "寸", + "寺", + "時", + "乙", + "丿", + "乂", + "气", + "気", + "冂", + "巾", + "亠", + "市", + "目", + "儿", + "見", + "八", + "小", + "凵", + "県", + "月", + "彐", + "門", + "間", + "木", + "東", + "山", + "出", + "本", + "中", + "刀", + "分", + "耳", + "又", + "取", + "最", + "言", + "田", + "心", + "思", + "刂", + "前", + "京", + "尹", + "事", + "生", + "厶", + "云", + "会", + "未", + "来", + "白", + "冫", + "楽", + "灬", + "馬", + "尸", + "尺", + "駅", + "明", + "耂", + "者", + "了", + "阝", + "都", + "高", + "卜", + "占", + "厂", + "广", + "店", + "子", + "申", + "奄", + "亻", + "俺", + "上", + "方", + "冖", + "学", + "衣", + "艮", + "食", + "自", + ], + # Jap-Katakana + "Japanese—": [ + "ー", + "ン", + "ス", + "・", + "ル", + "ト", + "リ", + "イ", + "ア", + "ラ", + "ッ", + "ク", + "ド", + "シ", + "レ", + "ジ", + "タ", + "フ", + "ロ", + "カ", + "テ", + "マ", + "ィ", + "グ", + "バ", + "ム", + "プ", + "オ", + "コ", + "デ", + "ニ", + "ウ", + "メ", + "サ", + "ビ", + "ナ", + "ブ", + "ャ", + "エ", + "ュ", + "チ", + "キ", + "ズ", + "ダ", + "パ", + "ミ", + "ェ", + "ョ", + "ハ", + "セ", + "ベ", + "ガ", + "モ", + "ツ", + "ネ", + "ボ", + "ソ", + "ノ", + "ァ", + "ヴ", + "ワ", + "ポ", + "ペ", + "ピ", + "ケ", + "ゴ", + "ギ", + "ザ", + "ホ", + "ゲ", + "ォ", + "ヤ", + "ヒ", + "ユ", + "ヨ", + "ヘ", + "ゼ", + "ヌ", + "ゥ", + "ゾ", + "ヶ", + "ヂ", + "ヲ", + "ヅ", + "ヵ", + "ヱ", + "ヰ", + "ヮ", + "ヽ", + "゠", + "ヾ", + "ヷ", + "ヿ", + "ヸ", + "ヹ", + "ヺ", + ], + # Jap-Hiragana + "Japanese——": [ + "の", + "に", + "る", + "た", + "と", + "は", + "し", + "い", + "を", + "で", + "て", + "が", + "な", + "れ", + "か", + "ら", + "さ", + "っ", + "り", + "す", + "あ", + "も", + "こ", + "ま", + "う", + "く", + "よ", + "き", + "ん", + "め", + "お", + "け", + "そ", + "つ", + "だ", + "や", + "え", + "ど", + "わ", + "ち", + "み", + "せ", + "じ", + "ば", + "へ", + "び", + "ず", + "ろ", + "ほ", + "げ", + "む", + "べ", + "ひ", + "ょ", + "ゆ", + "ぶ", + "ご", + "ゃ", + "ね", + "ふ", + "ぐ", + "ぎ", + "ぼ", + "ゅ", + "づ", + "ざ", + "ぞ", + "ぬ", + "ぜ", + "ぱ", + "ぽ", + "ぷ", + "ぴ", + "ぃ", + "ぁ", + "ぇ", + "ぺ", + "ゞ", + "ぢ", + "ぉ", + "ぅ", + "ゐ", + "ゝ", + "ゑ", + "゛", + "゜", + "ゎ", + "ゔ", + "゚", + "ゟ", + "゙", + "ゕ", + "ゖ", + ], + "Portuguese": [ + "a", + "e", + "o", + "s", + "i", + "r", + "d", + "n", + "t", + "m", + "u", + "c", + "l", + "p", + "g", + "v", + "b", + "f", + "h", + "ã", + "q", + "é", + "ç", + "á", + "z", + "í", + ], + "Swedish": [ + "e", + "a", + "n", + "r", + "t", + "s", + "i", + "l", + "d", + "o", + "m", + "k", + "g", + "v", + "h", + "f", + "u", + "p", + "ä", + "c", + "b", + "ö", + "å", + "y", + "j", + "x", + ], + "Chinese": [ + "的", + "一", + "是", + "不", + "了", + "在", + "人", + "有", + "我", + "他", + "这", + "个", + "们", + "中", + "来", + "上", + "大", + "为", + "和", + "国", + "地", + "到", + "以", + "说", + "时", + "要", + "就", + "出", + "会", + "可", + "也", + "你", + "对", + "生", + "能", + "而", + "子", + "那", + "得", + "于", + "着", + "下", + "自", + "之", + "年", + "过", + "发", + "后", + "作", + "里", + "用", + "道", + "行", + "所", + "然", + "家", + "种", + "事", + "成", + "方", + "多", + "经", + "么", + "去", + "法", + "学", + "如", + "都", + "同", + "现", + "当", + "没", + "动", + "面", + "起", + "看", + "定", + "天", + "分", + "还", + "进", + "好", + "小", + "部", + "其", + "些", + "主", + "样", + "理", + "心", + "她", + "本", + "前", + "开", + "但", + "因", + "只", + "从", + "想", + "实", + ], + "Ukrainian": [ + "о", + "а", + "н", + "і", + "и", + "р", + "в", + "т", + "е", + "с", + "к", + "л", + "у", + "д", + "м", + "п", + "з", + "я", + "ь", + "б", + "г", + "й", + "ч", + "х", + "ц", + "ї", + ], + "Norwegian": [ + "e", + "r", + "n", + "t", + "a", + "s", + "i", + "o", + "l", + "d", + "g", + "k", + "m", + "v", + "f", + "p", + "u", + "b", + "h", + "å", + "y", + "j", + "ø", + "c", + "æ", + "w", + ], + "Finnish": [ + "a", + "i", + "n", + "t", + "e", + "s", + "l", + "o", + "u", + "k", + "ä", + "m", + "r", + "v", + "j", + "h", + "p", + "y", + "d", + "ö", + "g", + "c", + "b", + "f", + "w", + "z", + ], + "Vietnamese": [ + "n", + "h", + "t", + "i", + "c", + "g", + "a", + "o", + "u", + "m", + "l", + "r", + "à", + "đ", + "s", + "e", + "v", + "p", + "b", + "y", + "ư", + "d", + "á", + "k", + "ộ", + "ế", + ], + "Czech": [ + "o", + "e", + "a", + "n", + "t", + "s", + "i", + "l", + "v", + "r", + "k", + "d", + "u", + "m", + "p", + "í", + "c", + "h", + "z", + "á", + "y", + "j", + "b", + "ě", + "é", + "ř", + ], + "Hungarian": [ + "e", + "a", + "t", + "l", + "s", + "n", + "k", + "r", + "i", + "o", + "z", + "á", + "é", + "g", + "m", + "b", + "y", + "v", + "d", + "h", + "u", + "p", + "j", + "ö", + "f", + "c", + ], + "Korean": [ + "이", + "다", + "에", + "의", + "는", + "로", + "하", + "을", + "가", + "고", + "지", + "서", + "한", + "은", + "기", + "으", + "년", + "대", + "사", + "시", + "를", + "리", + "도", + "인", + "스", + "일", + ], + "Indonesian": [ + "a", + "n", + "e", + "i", + "r", + "t", + "u", + "s", + "d", + "k", + "m", + "l", + "g", + "p", + "b", + "o", + "h", + "y", + "j", + "c", + "w", + "f", + "v", + "z", + "x", + "q", + ], + "Turkish": [ + "a", + "e", + "i", + "n", + "r", + "l", + "ı", + "k", + "d", + "t", + "s", + "m", + "y", + "u", + "o", + "b", + "ü", + "ş", + "v", + "g", + "z", + "h", + "c", + "p", + "ç", + "ğ", + ], + "Romanian": [ + "e", + "i", + "a", + "r", + "n", + "t", + "u", + "l", + "o", + "c", + "s", + "d", + "p", + "m", + "ă", + "f", + "v", + "î", + "g", + "b", + "ș", + "ț", + "z", + "h", + "â", + "j", + ], + "Farsi": [ + "ا", + "ی", + "ر", + "د", + "ن", + "ه", + "و", + "م", + "ت", + "ب", + "س", + "ل", + "ک", + "ش", + "ز", + "ف", + "گ", + "ع", + "خ", + "ق", + "ج", + "آ", + "پ", + "ح", + "ط", + "ص", + ], + "Arabic": [ + "ا", + "ل", + "ي", + "م", + "و", + "ن", + "ر", + "ت", + "ب", + "ة", + "ع", + "د", + "س", + "ف", + "ه", + "ك", + "ق", + "أ", + "ح", + "ج", + "ش", + "ط", + "ص", + "ى", + "خ", + "إ", + ], + "Danish": [ + "e", + "r", + "n", + "t", + "a", + "i", + "s", + "d", + "l", + "o", + "g", + "m", + "k", + "f", + "v", + "u", + "b", + "h", + "p", + "å", + "y", + "ø", + "æ", + "c", + "j", + "w", + ], + "Serbian": [ + "а", + "и", + "о", + "е", + "н", + "р", + "с", + "у", + "т", + "к", + "ј", + "в", + "д", + "м", + "п", + "л", + "г", + "з", + "б", + "a", + "i", + "e", + "o", + "n", + "ц", + "ш", + ], + "Lithuanian": [ + "i", + "a", + "s", + "o", + "r", + "e", + "t", + "n", + "u", + "k", + "m", + "l", + "p", + "v", + "d", + "j", + "g", + "ė", + "b", + "y", + "ų", + "š", + "ž", + "c", + "ą", + "į", + ], + "Slovene": [ + "e", + "a", + "i", + "o", + "n", + "r", + "s", + "l", + "t", + "j", + "v", + "k", + "d", + "p", + "m", + "u", + "z", + "b", + "g", + "h", + "č", + "c", + "š", + "ž", + "f", + "y", + ], + "Slovak": [ + "o", + "a", + "e", + "n", + "i", + "r", + "v", + "t", + "s", + "l", + "k", + "d", + "m", + "p", + "u", + "c", + "h", + "j", + "b", + "z", + "á", + "y", + "ý", + "í", + "č", + "é", + ], + "Hebrew": [ + "י", + "ו", + "ה", + "ל", + "ר", + "ב", + "ת", + "מ", + "א", + "ש", + "נ", + "ע", + "ם", + "ד", + "ק", + "ח", + "פ", + "ס", + "כ", + "ג", + "ט", + "צ", + "ן", + "ז", + "ך", + ], + "Bulgarian": [ + "а", + "и", + "о", + "е", + "н", + "т", + "р", + "с", + "в", + "л", + "к", + "д", + "п", + "м", + "з", + "г", + "я", + "ъ", + "у", + "б", + "ч", + "ц", + "й", + "ж", + "щ", + "х", + ], + "Croatian": [ + "a", + "i", + "o", + "e", + "n", + "r", + "j", + "s", + "t", + "u", + "k", + "l", + "v", + "d", + "m", + "p", + "g", + "z", + "b", + "c", + "č", + "h", + "š", + "ž", + "ć", + "f", + ], + "Hindi": [ + "क", + "र", + "स", + "न", + "त", + "म", + "ह", + "प", + "य", + "ल", + "व", + "ज", + "द", + "ग", + "ब", + "श", + "ट", + "अ", + "ए", + "थ", + "भ", + "ड", + "च", + "ध", + "ष", + "इ", + ], + "Estonian": [ + "a", + "i", + "e", + "s", + "t", + "l", + "u", + "n", + "o", + "k", + "r", + "d", + "m", + "v", + "g", + "p", + "j", + "h", + "ä", + "b", + "õ", + "ü", + "f", + "c", + "ö", + "y", + ], + "Thai": [ + "า", + "น", + "ร", + "อ", + "ก", + "เ", + "ง", + "ม", + "ย", + "ล", + "ว", + "ด", + "ท", + "ส", + "ต", + "ะ", + "ป", + "บ", + "ค", + "ห", + "แ", + "จ", + "พ", + "ช", + "ข", + "ใ", + ], + "Greek": [ + "α", + "τ", + "ο", + "ι", + "ε", + "ν", + "ρ", + "σ", + "κ", + "η", + "π", + "ς", + "υ", + "μ", + "λ", + "ί", + "ό", + "ά", + "γ", + "έ", + "δ", + "ή", + "ω", + "χ", + "θ", + "ύ", + ], + "Tamil": [ + "க", + "த", + "ப", + "ட", + "ர", + "ம", + "ல", + "ன", + "வ", + "ற", + "ய", + "ள", + "ச", + "ந", + "இ", + "ண", + "அ", + "ஆ", + "ழ", + "ங", + "எ", + "உ", + "ஒ", + "ஸ", + ], + "Kazakh": [ + "а", + "ы", + "е", + "н", + "т", + "р", + "л", + "і", + "д", + "с", + "м", + "қ", + "к", + "о", + "б", + "и", + "у", + "ғ", + "ж", + "ң", + "з", + "ш", + "й", + "п", + "г", + "ө", + ], +} + +LANGUAGE_SUPPORTED_COUNT: int = len(FREQUENCIES) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/legacy.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/legacy.py new file mode 100644 index 000000000..43aad21a9 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/legacy.py @@ -0,0 +1,54 @@ +from typing import Any, Dict, Optional, Union +from warnings import warn + +from .api import from_bytes +from .constant import CHARDET_CORRESPONDENCE + + +def detect( + byte_str: bytes, should_rename_legacy: bool = False, **kwargs: Any +) -> Dict[str, Optional[Union[str, float]]]: + """ + chardet legacy method + Detect the encoding of the given byte string. It should be mostly backward-compatible. + Encoding name will match Chardet own writing whenever possible. (Not on encoding name unsupported by it) + This function is deprecated and should be used to migrate your project easily, consult the documentation for + further information. Not planned for removal. + + :param byte_str: The byte sequence to examine. + :param should_rename_legacy: Should we rename legacy encodings + to their more modern equivalents? + """ + if len(kwargs): + warn( + f"charset-normalizer disregard arguments '{','.join(list(kwargs.keys()))}' in legacy function detect()" + ) + + if not isinstance(byte_str, (bytearray, bytes)): + raise TypeError( # pragma: nocover + "Expected object of type bytes or bytearray, got: " + "{0}".format(type(byte_str)) + ) + + if isinstance(byte_str, bytearray): + byte_str = bytes(byte_str) + + r = from_bytes(byte_str).best() + + encoding = r.encoding if r is not None else None + language = r.language if r is not None and r.language != "Unknown" else "" + confidence = 1.0 - r.chaos if r is not None else None + + # Note: CharsetNormalizer does not return 'UTF-8-SIG' as the sig get stripped in the detection/normalization process + # but chardet does return 'utf-8-sig' and it is a valid codec name. + if r is not None and encoding == "utf_8" and r.bom: + encoding += "_sig" + + if should_rename_legacy is False and encoding in CHARDET_CORRESPONDENCE: + encoding = CHARDET_CORRESPONDENCE[encoding] + + return { + "encoding": encoding, + "language": language, + "confidence": confidence, + } diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/md.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/md.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..3824a428f Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/md.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/md.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/md.py new file mode 100644 index 000000000..77897aae4 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/md.py @@ -0,0 +1,615 @@ +from functools import lru_cache +from logging import getLogger +from typing import List, Optional + +from .constant import ( + COMMON_SAFE_ASCII_CHARACTERS, + TRACE, + UNICODE_SECONDARY_RANGE_KEYWORD, +) +from .utils import ( + is_accentuated, + is_arabic, + is_arabic_isolated_form, + is_case_variable, + is_cjk, + is_emoticon, + is_hangul, + is_hiragana, + is_katakana, + is_latin, + is_punctuation, + is_separator, + is_symbol, + is_thai, + is_unprintable, + remove_accent, + unicode_range, +) + + +class MessDetectorPlugin: + """ + Base abstract class used for mess detection plugins. + All detectors MUST extend and implement given methods. + """ + + def eligible(self, character: str) -> bool: + """ + Determine if given character should be fed in. + """ + raise NotImplementedError # pragma: nocover + + def feed(self, character: str) -> None: + """ + The main routine to be executed upon character. + Insert the logic in witch the text would be considered chaotic. + """ + raise NotImplementedError # pragma: nocover + + def reset(self) -> None: # pragma: no cover + """ + Permit to reset the plugin to the initial state. + """ + raise NotImplementedError + + @property + def ratio(self) -> float: + """ + Compute the chaos ratio based on what your feed() has seen. + Must NOT be lower than 0.; No restriction gt 0. + """ + raise NotImplementedError # pragma: nocover + + +class TooManySymbolOrPunctuationPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._punctuation_count: int = 0 + self._symbol_count: int = 0 + self._character_count: int = 0 + + self._last_printable_char: Optional[str] = None + self._frenzy_symbol_in_word: bool = False + + def eligible(self, character: str) -> bool: + return character.isprintable() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if ( + character != self._last_printable_char + and character not in COMMON_SAFE_ASCII_CHARACTERS + ): + if is_punctuation(character): + self._punctuation_count += 1 + elif ( + character.isdigit() is False + and is_symbol(character) + and is_emoticon(character) is False + ): + self._symbol_count += 2 + + self._last_printable_char = character + + def reset(self) -> None: # pragma: no cover + self._punctuation_count = 0 + self._character_count = 0 + self._symbol_count = 0 + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + ratio_of_punctuation: float = ( + self._punctuation_count + self._symbol_count + ) / self._character_count + + return ratio_of_punctuation if ratio_of_punctuation >= 0.3 else 0.0 + + +class TooManyAccentuatedPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._character_count: int = 0 + self._accentuated_count: int = 0 + + def eligible(self, character: str) -> bool: + return character.isalpha() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if is_accentuated(character): + self._accentuated_count += 1 + + def reset(self) -> None: # pragma: no cover + self._character_count = 0 + self._accentuated_count = 0 + + @property + def ratio(self) -> float: + if self._character_count < 8: + return 0.0 + + ratio_of_accentuation: float = self._accentuated_count / self._character_count + return ratio_of_accentuation if ratio_of_accentuation >= 0.35 else 0.0 + + +class UnprintablePlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._unprintable_count: int = 0 + self._character_count: int = 0 + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if is_unprintable(character): + self._unprintable_count += 1 + self._character_count += 1 + + def reset(self) -> None: # pragma: no cover + self._unprintable_count = 0 + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return (self._unprintable_count * 8) / self._character_count + + +class SuspiciousDuplicateAccentPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._successive_count: int = 0 + self._character_count: int = 0 + + self._last_latin_character: Optional[str] = None + + def eligible(self, character: str) -> bool: + return character.isalpha() and is_latin(character) + + def feed(self, character: str) -> None: + self._character_count += 1 + if ( + self._last_latin_character is not None + and is_accentuated(character) + and is_accentuated(self._last_latin_character) + ): + if character.isupper() and self._last_latin_character.isupper(): + self._successive_count += 1 + # Worse if its the same char duplicated with different accent. + if remove_accent(character) == remove_accent(self._last_latin_character): + self._successive_count += 1 + self._last_latin_character = character + + def reset(self) -> None: # pragma: no cover + self._successive_count = 0 + self._character_count = 0 + self._last_latin_character = None + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return (self._successive_count * 2) / self._character_count + + +class SuspiciousRange(MessDetectorPlugin): + def __init__(self) -> None: + self._suspicious_successive_range_count: int = 0 + self._character_count: int = 0 + self._last_printable_seen: Optional[str] = None + + def eligible(self, character: str) -> bool: + return character.isprintable() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if ( + character.isspace() + or is_punctuation(character) + or character in COMMON_SAFE_ASCII_CHARACTERS + ): + self._last_printable_seen = None + return + + if self._last_printable_seen is None: + self._last_printable_seen = character + return + + unicode_range_a: Optional[str] = unicode_range(self._last_printable_seen) + unicode_range_b: Optional[str] = unicode_range(character) + + if is_suspiciously_successive_range(unicode_range_a, unicode_range_b): + self._suspicious_successive_range_count += 1 + + self._last_printable_seen = character + + def reset(self) -> None: # pragma: no cover + self._character_count = 0 + self._suspicious_successive_range_count = 0 + self._last_printable_seen = None + + @property + def ratio(self) -> float: + if self._character_count <= 24: + return 0.0 + + ratio_of_suspicious_range_usage: float = ( + self._suspicious_successive_range_count * 2 + ) / self._character_count + + return ratio_of_suspicious_range_usage + + +class SuperWeirdWordPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._word_count: int = 0 + self._bad_word_count: int = 0 + self._foreign_long_count: int = 0 + + self._is_current_word_bad: bool = False + self._foreign_long_watch: bool = False + + self._character_count: int = 0 + self._bad_character_count: int = 0 + + self._buffer: str = "" + self._buffer_accent_count: int = 0 + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if character.isalpha(): + self._buffer += character + if is_accentuated(character): + self._buffer_accent_count += 1 + if ( + self._foreign_long_watch is False + and (is_latin(character) is False or is_accentuated(character)) + and is_cjk(character) is False + and is_hangul(character) is False + and is_katakana(character) is False + and is_hiragana(character) is False + and is_thai(character) is False + ): + self._foreign_long_watch = True + return + if not self._buffer: + return + if ( + character.isspace() or is_punctuation(character) or is_separator(character) + ) and self._buffer: + self._word_count += 1 + buffer_length: int = len(self._buffer) + + self._character_count += buffer_length + + if buffer_length >= 4: + if self._buffer_accent_count / buffer_length > 0.34: + self._is_current_word_bad = True + # Word/Buffer ending with an upper case accentuated letter are so rare, + # that we will consider them all as suspicious. Same weight as foreign_long suspicious. + if ( + is_accentuated(self._buffer[-1]) + and self._buffer[-1].isupper() + and all(_.isupper() for _ in self._buffer) is False + ): + self._foreign_long_count += 1 + self._is_current_word_bad = True + if buffer_length >= 24 and self._foreign_long_watch: + camel_case_dst = [ + i + for c, i in zip(self._buffer, range(0, buffer_length)) + if c.isupper() + ] + probable_camel_cased: bool = False + + if camel_case_dst and (len(camel_case_dst) / buffer_length <= 0.3): + probable_camel_cased = True + + if not probable_camel_cased: + self._foreign_long_count += 1 + self._is_current_word_bad = True + + if self._is_current_word_bad: + self._bad_word_count += 1 + self._bad_character_count += len(self._buffer) + self._is_current_word_bad = False + + self._foreign_long_watch = False + self._buffer = "" + self._buffer_accent_count = 0 + elif ( + character not in {"<", ">", "-", "=", "~", "|", "_"} + and character.isdigit() is False + and is_symbol(character) + ): + self._is_current_word_bad = True + self._buffer += character + + def reset(self) -> None: # pragma: no cover + self._buffer = "" + self._is_current_word_bad = False + self._foreign_long_watch = False + self._bad_word_count = 0 + self._word_count = 0 + self._character_count = 0 + self._bad_character_count = 0 + self._foreign_long_count = 0 + + @property + def ratio(self) -> float: + if self._word_count <= 10 and self._foreign_long_count == 0: + return 0.0 + + return self._bad_character_count / self._character_count + + +class CjkInvalidStopPlugin(MessDetectorPlugin): + """ + GB(Chinese) based encoding often render the stop incorrectly when the content does not fit and + can be easily detected. Searching for the overuse of '丅' and '丄'. + """ + + def __init__(self) -> None: + self._wrong_stop_count: int = 0 + self._cjk_character_count: int = 0 + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if character in {"丅", "丄"}: + self._wrong_stop_count += 1 + return + if is_cjk(character): + self._cjk_character_count += 1 + + def reset(self) -> None: # pragma: no cover + self._wrong_stop_count = 0 + self._cjk_character_count = 0 + + @property + def ratio(self) -> float: + if self._cjk_character_count < 16: + return 0.0 + return self._wrong_stop_count / self._cjk_character_count + + +class ArchaicUpperLowerPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._buf: bool = False + + self._character_count_since_last_sep: int = 0 + + self._successive_upper_lower_count: int = 0 + self._successive_upper_lower_count_final: int = 0 + + self._character_count: int = 0 + + self._last_alpha_seen: Optional[str] = None + self._current_ascii_only: bool = True + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + is_concerned = character.isalpha() and is_case_variable(character) + chunk_sep = is_concerned is False + + if chunk_sep and self._character_count_since_last_sep > 0: + if ( + self._character_count_since_last_sep <= 64 + and character.isdigit() is False + and self._current_ascii_only is False + ): + self._successive_upper_lower_count_final += ( + self._successive_upper_lower_count + ) + + self._successive_upper_lower_count = 0 + self._character_count_since_last_sep = 0 + self._last_alpha_seen = None + self._buf = False + self._character_count += 1 + self._current_ascii_only = True + + return + + if self._current_ascii_only is True and character.isascii() is False: + self._current_ascii_only = False + + if self._last_alpha_seen is not None: + if (character.isupper() and self._last_alpha_seen.islower()) or ( + character.islower() and self._last_alpha_seen.isupper() + ): + if self._buf is True: + self._successive_upper_lower_count += 2 + self._buf = False + else: + self._buf = True + else: + self._buf = False + + self._character_count += 1 + self._character_count_since_last_sep += 1 + self._last_alpha_seen = character + + def reset(self) -> None: # pragma: no cover + self._character_count = 0 + self._character_count_since_last_sep = 0 + self._successive_upper_lower_count = 0 + self._successive_upper_lower_count_final = 0 + self._last_alpha_seen = None + self._buf = False + self._current_ascii_only = True + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return self._successive_upper_lower_count_final / self._character_count + + +class ArabicIsolatedFormPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._character_count: int = 0 + self._isolated_form_count: int = 0 + + def reset(self) -> None: # pragma: no cover + self._character_count = 0 + self._isolated_form_count = 0 + + def eligible(self, character: str) -> bool: + return is_arabic(character) + + def feed(self, character: str) -> None: + self._character_count += 1 + + if is_arabic_isolated_form(character): + self._isolated_form_count += 1 + + @property + def ratio(self) -> float: + if self._character_count < 8: + return 0.0 + + isolated_form_usage: float = self._isolated_form_count / self._character_count + + return isolated_form_usage + + +@lru_cache(maxsize=1024) +def is_suspiciously_successive_range( + unicode_range_a: Optional[str], unicode_range_b: Optional[str] +) -> bool: + """ + Determine if two Unicode range seen next to each other can be considered as suspicious. + """ + if unicode_range_a is None or unicode_range_b is None: + return True + + if unicode_range_a == unicode_range_b: + return False + + if "Latin" in unicode_range_a and "Latin" in unicode_range_b: + return False + + if "Emoticons" in unicode_range_a or "Emoticons" in unicode_range_b: + return False + + # Latin characters can be accompanied with a combining diacritical mark + # eg. Vietnamese. + if ("Latin" in unicode_range_a or "Latin" in unicode_range_b) and ( + "Combining" in unicode_range_a or "Combining" in unicode_range_b + ): + return False + + keywords_range_a, keywords_range_b = unicode_range_a.split( + " " + ), unicode_range_b.split(" ") + + for el in keywords_range_a: + if el in UNICODE_SECONDARY_RANGE_KEYWORD: + continue + if el in keywords_range_b: + return False + + # Japanese Exception + range_a_jp_chars, range_b_jp_chars = ( + unicode_range_a + in ( + "Hiragana", + "Katakana", + ), + unicode_range_b in ("Hiragana", "Katakana"), + ) + if (range_a_jp_chars or range_b_jp_chars) and ( + "CJK" in unicode_range_a or "CJK" in unicode_range_b + ): + return False + if range_a_jp_chars and range_b_jp_chars: + return False + + if "Hangul" in unicode_range_a or "Hangul" in unicode_range_b: + if "CJK" in unicode_range_a or "CJK" in unicode_range_b: + return False + if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin": + return False + + # Chinese/Japanese use dedicated range for punctuation and/or separators. + if ("CJK" in unicode_range_a or "CJK" in unicode_range_b) or ( + unicode_range_a in ["Katakana", "Hiragana"] + and unicode_range_b in ["Katakana", "Hiragana"] + ): + if "Punctuation" in unicode_range_a or "Punctuation" in unicode_range_b: + return False + if "Forms" in unicode_range_a or "Forms" in unicode_range_b: + return False + if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin": + return False + + return True + + +@lru_cache(maxsize=2048) +def mess_ratio( + decoded_sequence: str, maximum_threshold: float = 0.2, debug: bool = False +) -> float: + """ + Compute a mess ratio given a decoded bytes sequence. The maximum threshold does stop the computation earlier. + """ + + detectors: List[MessDetectorPlugin] = [ + md_class() for md_class in MessDetectorPlugin.__subclasses__() + ] + + length: int = len(decoded_sequence) + 1 + + mean_mess_ratio: float = 0.0 + + if length < 512: + intermediary_mean_mess_ratio_calc: int = 32 + elif length <= 1024: + intermediary_mean_mess_ratio_calc = 64 + else: + intermediary_mean_mess_ratio_calc = 128 + + for character, index in zip(decoded_sequence + "\n", range(length)): + for detector in detectors: + if detector.eligible(character): + detector.feed(character) + + if ( + index > 0 and index % intermediary_mean_mess_ratio_calc == 0 + ) or index == length - 1: + mean_mess_ratio = sum(dt.ratio for dt in detectors) + + if mean_mess_ratio >= maximum_threshold: + break + + if debug: + logger = getLogger("charset_normalizer") + + logger.log( + TRACE, + "Mess-detector extended-analysis start. " + f"intermediary_mean_mess_ratio_calc={intermediary_mean_mess_ratio_calc} mean_mess_ratio={mean_mess_ratio} " + f"maximum_threshold={maximum_threshold}", + ) + + if len(decoded_sequence) > 16: + logger.log(TRACE, f"Starting with: {decoded_sequence[:16]}") + logger.log(TRACE, f"Ending with: {decoded_sequence[-16::]}") + + for dt in detectors: # pragma: nocover + logger.log(TRACE, f"{dt.__class__}: {dt.ratio}") + + return round(mean_mess_ratio, 3) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/md__mypyc.cpython-38-x86_64-linux-gnu.so b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/md__mypyc.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 000000000..710a1ab79 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/md__mypyc.cpython-38-x86_64-linux-gnu.so differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/models.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/models.py new file mode 100644 index 000000000..a760b9c55 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/models.py @@ -0,0 +1,340 @@ +from encodings.aliases import aliases +from hashlib import sha256 +from json import dumps +from typing import Any, Dict, Iterator, List, Optional, Tuple, Union + +from .constant import TOO_BIG_SEQUENCE +from .utils import iana_name, is_multi_byte_encoding, unicode_range + + +class CharsetMatch: + def __init__( + self, + payload: bytes, + guessed_encoding: str, + mean_mess_ratio: float, + has_sig_or_bom: bool, + languages: "CoherenceMatches", + decoded_payload: Optional[str] = None, + ): + self._payload: bytes = payload + + self._encoding: str = guessed_encoding + self._mean_mess_ratio: float = mean_mess_ratio + self._languages: CoherenceMatches = languages + self._has_sig_or_bom: bool = has_sig_or_bom + self._unicode_ranges: Optional[List[str]] = None + + self._leaves: List[CharsetMatch] = [] + self._mean_coherence_ratio: float = 0.0 + + self._output_payload: Optional[bytes] = None + self._output_encoding: Optional[str] = None + + self._string: Optional[str] = decoded_payload + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CharsetMatch): + raise TypeError( + "__eq__ cannot be invoked on {} and {}.".format( + str(other.__class__), str(self.__class__) + ) + ) + return self.encoding == other.encoding and self.fingerprint == other.fingerprint + + def __lt__(self, other: object) -> bool: + """ + Implemented to make sorted available upon CharsetMatches items. + """ + if not isinstance(other, CharsetMatch): + raise ValueError + + chaos_difference: float = abs(self.chaos - other.chaos) + coherence_difference: float = abs(self.coherence - other.coherence) + + # Below 1% difference --> Use Coherence + if chaos_difference < 0.01 and coherence_difference > 0.02: + return self.coherence > other.coherence + elif chaos_difference < 0.01 and coherence_difference <= 0.02: + # When having a difficult decision, use the result that decoded as many multi-byte as possible. + # preserve RAM usage! + if len(self._payload) >= TOO_BIG_SEQUENCE: + return self.chaos < other.chaos + return self.multi_byte_usage > other.multi_byte_usage + + return self.chaos < other.chaos + + @property + def multi_byte_usage(self) -> float: + return 1.0 - (len(str(self)) / len(self.raw)) + + def __str__(self) -> str: + # Lazy Str Loading + if self._string is None: + self._string = str(self._payload, self._encoding, "strict") + return self._string + + def __repr__(self) -> str: + return "".format(self.encoding, self.fingerprint) + + def add_submatch(self, other: "CharsetMatch") -> None: + if not isinstance(other, CharsetMatch) or other == self: + raise ValueError( + "Unable to add instance <{}> as a submatch of a CharsetMatch".format( + other.__class__ + ) + ) + + other._string = None # Unload RAM usage; dirty trick. + self._leaves.append(other) + + @property + def encoding(self) -> str: + return self._encoding + + @property + def encoding_aliases(self) -> List[str]: + """ + Encoding name are known by many name, using this could help when searching for IBM855 when it's listed as CP855. + """ + also_known_as: List[str] = [] + for u, p in aliases.items(): + if self.encoding == u: + also_known_as.append(p) + elif self.encoding == p: + also_known_as.append(u) + return also_known_as + + @property + def bom(self) -> bool: + return self._has_sig_or_bom + + @property + def byte_order_mark(self) -> bool: + return self._has_sig_or_bom + + @property + def languages(self) -> List[str]: + """ + Return the complete list of possible languages found in decoded sequence. + Usually not really useful. Returned list may be empty even if 'language' property return something != 'Unknown'. + """ + return [e[0] for e in self._languages] + + @property + def language(self) -> str: + """ + Most probable language found in decoded sequence. If none were detected or inferred, the property will return + "Unknown". + """ + if not self._languages: + # Trying to infer the language based on the given encoding + # Its either English or we should not pronounce ourselves in certain cases. + if "ascii" in self.could_be_from_charset: + return "English" + + # doing it there to avoid circular import + from charset_normalizer.cd import encoding_languages, mb_encoding_languages + + languages = ( + mb_encoding_languages(self.encoding) + if is_multi_byte_encoding(self.encoding) + else encoding_languages(self.encoding) + ) + + if len(languages) == 0 or "Latin Based" in languages: + return "Unknown" + + return languages[0] + + return self._languages[0][0] + + @property + def chaos(self) -> float: + return self._mean_mess_ratio + + @property + def coherence(self) -> float: + if not self._languages: + return 0.0 + return self._languages[0][1] + + @property + def percent_chaos(self) -> float: + return round(self.chaos * 100, ndigits=3) + + @property + def percent_coherence(self) -> float: + return round(self.coherence * 100, ndigits=3) + + @property + def raw(self) -> bytes: + """ + Original untouched bytes. + """ + return self._payload + + @property + def submatch(self) -> List["CharsetMatch"]: + return self._leaves + + @property + def has_submatch(self) -> bool: + return len(self._leaves) > 0 + + @property + def alphabets(self) -> List[str]: + if self._unicode_ranges is not None: + return self._unicode_ranges + # list detected ranges + detected_ranges: List[Optional[str]] = [ + unicode_range(char) for char in str(self) + ] + # filter and sort + self._unicode_ranges = sorted(list({r for r in detected_ranges if r})) + return self._unicode_ranges + + @property + def could_be_from_charset(self) -> List[str]: + """ + The complete list of encoding that output the exact SAME str result and therefore could be the originating + encoding. + This list does include the encoding available in property 'encoding'. + """ + return [self._encoding] + [m.encoding for m in self._leaves] + + def output(self, encoding: str = "utf_8") -> bytes: + """ + Method to get re-encoded bytes payload using given target encoding. Default to UTF-8. + Any errors will be simply ignored by the encoder NOT replaced. + """ + if self._output_encoding is None or self._output_encoding != encoding: + self._output_encoding = encoding + self._output_payload = str(self).encode(encoding, "replace") + + return self._output_payload # type: ignore + + @property + def fingerprint(self) -> str: + """ + Retrieve the unique SHA256 computed using the transformed (re-encoded) payload. Not the original one. + """ + return sha256(self.output()).hexdigest() + + +class CharsetMatches: + """ + Container with every CharsetMatch items ordered by default from most probable to the less one. + Act like a list(iterable) but does not implements all related methods. + """ + + def __init__(self, results: Optional[List[CharsetMatch]] = None): + self._results: List[CharsetMatch] = sorted(results) if results else [] + + def __iter__(self) -> Iterator[CharsetMatch]: + yield from self._results + + def __getitem__(self, item: Union[int, str]) -> CharsetMatch: + """ + Retrieve a single item either by its position or encoding name (alias may be used here). + Raise KeyError upon invalid index or encoding not present in results. + """ + if isinstance(item, int): + return self._results[item] + if isinstance(item, str): + item = iana_name(item, False) + for result in self._results: + if item in result.could_be_from_charset: + return result + raise KeyError + + def __len__(self) -> int: + return len(self._results) + + def __bool__(self) -> bool: + return len(self._results) > 0 + + def append(self, item: CharsetMatch) -> None: + """ + Insert a single match. Will be inserted accordingly to preserve sort. + Can be inserted as a submatch. + """ + if not isinstance(item, CharsetMatch): + raise ValueError( + "Cannot append instance '{}' to CharsetMatches".format( + str(item.__class__) + ) + ) + # We should disable the submatch factoring when the input file is too heavy (conserve RAM usage) + if len(item.raw) <= TOO_BIG_SEQUENCE: + for match in self._results: + if match.fingerprint == item.fingerprint and match.chaos == item.chaos: + match.add_submatch(item) + return + self._results.append(item) + self._results = sorted(self._results) + + def best(self) -> Optional["CharsetMatch"]: + """ + Simply return the first match. Strict equivalent to matches[0]. + """ + if not self._results: + return None + return self._results[0] + + def first(self) -> Optional["CharsetMatch"]: + """ + Redundant method, call the method best(). Kept for BC reasons. + """ + return self.best() + + +CoherenceMatch = Tuple[str, float] +CoherenceMatches = List[CoherenceMatch] + + +class CliDetectionResult: + def __init__( + self, + path: str, + encoding: Optional[str], + encoding_aliases: List[str], + alternative_encodings: List[str], + language: str, + alphabets: List[str], + has_sig_or_bom: bool, + chaos: float, + coherence: float, + unicode_path: Optional[str], + is_preferred: bool, + ): + self.path: str = path + self.unicode_path: Optional[str] = unicode_path + self.encoding: Optional[str] = encoding + self.encoding_aliases: List[str] = encoding_aliases + self.alternative_encodings: List[str] = alternative_encodings + self.language: str = language + self.alphabets: List[str] = alphabets + self.has_sig_or_bom: bool = has_sig_or_bom + self.chaos: float = chaos + self.coherence: float = coherence + self.is_preferred: bool = is_preferred + + @property + def __dict__(self) -> Dict[str, Any]: # type: ignore + return { + "path": self.path, + "encoding": self.encoding, + "encoding_aliases": self.encoding_aliases, + "alternative_encodings": self.alternative_encodings, + "language": self.language, + "alphabets": self.alphabets, + "has_sig_or_bom": self.has_sig_or_bom, + "chaos": self.chaos, + "coherence": self.coherence, + "unicode_path": self.unicode_path, + "is_preferred": self.is_preferred, + } + + def to_json(self) -> str: + return dumps(self.__dict__, ensure_ascii=True, indent=4) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/py.typed b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/utils.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/utils.py new file mode 100644 index 000000000..e5cbbf4c0 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/utils.py @@ -0,0 +1,421 @@ +import importlib +import logging +import unicodedata +from codecs import IncrementalDecoder +from encodings.aliases import aliases +from functools import lru_cache +from re import findall +from typing import Generator, List, Optional, Set, Tuple, Union + +from _multibytecodec import MultibyteIncrementalDecoder + +from .constant import ( + ENCODING_MARKS, + IANA_SUPPORTED_SIMILAR, + RE_POSSIBLE_ENCODING_INDICATION, + UNICODE_RANGES_COMBINED, + UNICODE_SECONDARY_RANGE_KEYWORD, + UTF8_MAXIMAL_ALLOCATION, +) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_accentuated(character: str) -> bool: + try: + description: str = unicodedata.name(character) + except ValueError: + return False + return ( + "WITH GRAVE" in description + or "WITH ACUTE" in description + or "WITH CEDILLA" in description + or "WITH DIAERESIS" in description + or "WITH CIRCUMFLEX" in description + or "WITH TILDE" in description + or "WITH MACRON" in description + or "WITH RING ABOVE" in description + ) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def remove_accent(character: str) -> str: + decomposed: str = unicodedata.decomposition(character) + if not decomposed: + return character + + codes: List[str] = decomposed.split(" ") + + return chr(int(codes[0], 16)) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def unicode_range(character: str) -> Optional[str]: + """ + Retrieve the Unicode range official name from a single character. + """ + character_ord: int = ord(character) + + for range_name, ord_range in UNICODE_RANGES_COMBINED.items(): + if character_ord in ord_range: + return range_name + + return None + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_latin(character: str) -> bool: + try: + description: str = unicodedata.name(character) + except ValueError: + return False + return "LATIN" in description + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_punctuation(character: str) -> bool: + character_category: str = unicodedata.category(character) + + if "P" in character_category: + return True + + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + return False + + return "Punctuation" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_symbol(character: str) -> bool: + character_category: str = unicodedata.category(character) + + if "S" in character_category or "N" in character_category: + return True + + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + return False + + return "Forms" in character_range and character_category != "Lo" + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_emoticon(character: str) -> bool: + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + return False + + return "Emoticons" in character_range or "Pictographs" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_separator(character: str) -> bool: + if character.isspace() or character in {"|", "+", "<", ">"}: + return True + + character_category: str = unicodedata.category(character) + + return "Z" in character_category or character_category in {"Po", "Pd", "Pc"} + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_case_variable(character: str) -> bool: + return character.islower() != character.isupper() + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_cjk(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "CJK" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_hiragana(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "HIRAGANA" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_katakana(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "KATAKANA" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_hangul(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "HANGUL" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_thai(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "THAI" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_arabic(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "ARABIC" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_arabic_isolated_form(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "ARABIC" in character_name and "ISOLATED FORM" in character_name + + +@lru_cache(maxsize=len(UNICODE_RANGES_COMBINED)) +def is_unicode_range_secondary(range_name: str) -> bool: + return any(keyword in range_name for keyword in UNICODE_SECONDARY_RANGE_KEYWORD) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_unprintable(character: str) -> bool: + return ( + character.isspace() is False # includes \n \t \r \v + and character.isprintable() is False + and character != "\x1A" # Why? Its the ASCII substitute character. + and character != "\ufeff" # bug discovered in Python, + # Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space. + ) + + +def any_specified_encoding(sequence: bytes, search_zone: int = 8192) -> Optional[str]: + """ + Extract using ASCII-only decoder any specified encoding in the first n-bytes. + """ + if not isinstance(sequence, bytes): + raise TypeError + + seq_len: int = len(sequence) + + results: List[str] = findall( + RE_POSSIBLE_ENCODING_INDICATION, + sequence[: min(seq_len, search_zone)].decode("ascii", errors="ignore"), + ) + + if len(results) == 0: + return None + + for specified_encoding in results: + specified_encoding = specified_encoding.lower().replace("-", "_") + + encoding_alias: str + encoding_iana: str + + for encoding_alias, encoding_iana in aliases.items(): + if encoding_alias == specified_encoding: + return encoding_iana + if encoding_iana == specified_encoding: + return encoding_iana + + return None + + +@lru_cache(maxsize=128) +def is_multi_byte_encoding(name: str) -> bool: + """ + Verify is a specific encoding is a multi byte one based on it IANA name + """ + return name in { + "utf_8", + "utf_8_sig", + "utf_16", + "utf_16_be", + "utf_16_le", + "utf_32", + "utf_32_le", + "utf_32_be", + "utf_7", + } or issubclass( + importlib.import_module("encodings.{}".format(name)).IncrementalDecoder, + MultibyteIncrementalDecoder, + ) + + +def identify_sig_or_bom(sequence: bytes) -> Tuple[Optional[str], bytes]: + """ + Identify and extract SIG/BOM in given sequence. + """ + + for iana_encoding in ENCODING_MARKS: + marks: Union[bytes, List[bytes]] = ENCODING_MARKS[iana_encoding] + + if isinstance(marks, bytes): + marks = [marks] + + for mark in marks: + if sequence.startswith(mark): + return iana_encoding, mark + + return None, b"" + + +def should_strip_sig_or_bom(iana_encoding: str) -> bool: + return iana_encoding not in {"utf_16", "utf_32"} + + +def iana_name(cp_name: str, strict: bool = True) -> str: + cp_name = cp_name.lower().replace("-", "_") + + encoding_alias: str + encoding_iana: str + + for encoding_alias, encoding_iana in aliases.items(): + if cp_name in [encoding_alias, encoding_iana]: + return encoding_iana + + if strict: + raise ValueError("Unable to retrieve IANA for '{}'".format(cp_name)) + + return cp_name + + +def range_scan(decoded_sequence: str) -> List[str]: + ranges: Set[str] = set() + + for character in decoded_sequence: + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + continue + + ranges.add(character_range) + + return list(ranges) + + +def cp_similarity(iana_name_a: str, iana_name_b: str) -> float: + if is_multi_byte_encoding(iana_name_a) or is_multi_byte_encoding(iana_name_b): + return 0.0 + + decoder_a = importlib.import_module( + "encodings.{}".format(iana_name_a) + ).IncrementalDecoder + decoder_b = importlib.import_module( + "encodings.{}".format(iana_name_b) + ).IncrementalDecoder + + id_a: IncrementalDecoder = decoder_a(errors="ignore") + id_b: IncrementalDecoder = decoder_b(errors="ignore") + + character_match_count: int = 0 + + for i in range(255): + to_be_decoded: bytes = bytes([i]) + if id_a.decode(to_be_decoded) == id_b.decode(to_be_decoded): + character_match_count += 1 + + return character_match_count / 254 + + +def is_cp_similar(iana_name_a: str, iana_name_b: str) -> bool: + """ + Determine if two code page are at least 80% similar. IANA_SUPPORTED_SIMILAR dict was generated using + the function cp_similarity. + """ + return ( + iana_name_a in IANA_SUPPORTED_SIMILAR + and iana_name_b in IANA_SUPPORTED_SIMILAR[iana_name_a] + ) + + +def set_logging_handler( + name: str = "charset_normalizer", + level: int = logging.INFO, + format_string: str = "%(asctime)s | %(levelname)s | %(message)s", +) -> None: + logger = logging.getLogger(name) + logger.setLevel(level) + + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter(format_string)) + logger.addHandler(handler) + + +def cut_sequence_chunks( + sequences: bytes, + encoding_iana: str, + offsets: range, + chunk_size: int, + bom_or_sig_available: bool, + strip_sig_or_bom: bool, + sig_payload: bytes, + is_multi_byte_decoder: bool, + decoded_payload: Optional[str] = None, +) -> Generator[str, None, None]: + if decoded_payload and is_multi_byte_decoder is False: + for i in offsets: + chunk = decoded_payload[i : i + chunk_size] + if not chunk: + break + yield chunk + else: + for i in offsets: + chunk_end = i + chunk_size + if chunk_end > len(sequences) + 8: + continue + + cut_sequence = sequences[i : i + chunk_size] + + if bom_or_sig_available and strip_sig_or_bom is False: + cut_sequence = sig_payload + cut_sequence + + chunk = cut_sequence.decode( + encoding_iana, + errors="ignore" if is_multi_byte_decoder else "strict", + ) + + # multi-byte bad cutting detector and adjustment + # not the cleanest way to perform that fix but clever enough for now. + if is_multi_byte_decoder and i > 0: + chunk_partial_size_chk: int = min(chunk_size, 16) + + if ( + decoded_payload + and chunk[:chunk_partial_size_chk] not in decoded_payload + ): + for j in range(i, i - 4, -1): + cut_sequence = sequences[j:chunk_end] + + if bom_or_sig_available and strip_sig_or_bom is False: + cut_sequence = sig_payload + cut_sequence + + chunk = cut_sequence.decode(encoding_iana, errors="ignore") + + if chunk[:chunk_partial_size_chk] in decoded_payload: + break + + yield chunk diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/version.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/version.py new file mode 100644 index 000000000..5a4da4ff4 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/charset_normalizer/version.py @@ -0,0 +1,6 @@ +""" +Expose version +""" + +__version__ = "3.3.2" +VERSION = __version__.split(".") diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/click-8.1.7.dist-info/INSTALLER b/.direnv/python-3.8.20/lib/python3.8/site-packages/click-8.1.7.dist-info/INSTALLER new file mode 100644 index 000000000..ce3133dc1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/click-8.1.7.dist-info/INSTALLER @@ -0,0 +1 @@ +Poetry 2.2.1 \ No newline at end of file diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/click-8.1.7.dist-info/LICENSE.rst b/.direnv/python-3.8.20/lib/python3.8/site-packages/click-8.1.7.dist-info/LICENSE.rst new file mode 100644 index 000000000..d12a84918 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/click-8.1.7.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2014 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/click-8.1.7.dist-info/METADATA b/.direnv/python-3.8.20/lib/python3.8/site-packages/click-8.1.7.dist-info/METADATA new file mode 100644 index 000000000..7a6bbb24b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/click-8.1.7.dist-info/METADATA @@ -0,0 +1,103 @@ +Metadata-Version: 2.1 +Name: click +Version: 8.1.7 +Summary: Composable command line interface toolkit +Home-page: https://palletsprojects.com/p/click/ +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://click.palletsprojects.com/ +Project-URL: Changes, https://click.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/click/ +Project-URL: Issue Tracker, https://github.com/pallets/click/issues/ +Project-URL: Chat, https://discord.gg/pallets +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst +Requires-Dist: colorama ; platform_system == "Windows" +Requires-Dist: importlib-metadata ; python_version < "3.8" + +\$ click\_ +========== + +Click is a Python package for creating beautiful command line interfaces +in a composable way with as little code as necessary. It's the "Command +Line Interface Creation Kit". It's highly configurable but comes with +sensible defaults out of the box. + +It aims to make the process of writing command line tools quick and fun +while also preventing any frustration caused by the inability to +implement an intended CLI API. + +Click in three points: + +- Arbitrary nesting of commands +- Automatic help page generation +- Supports lazy loading of subcommands at runtime + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + $ pip install -U click + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +A Simple Example +---------------- + +.. code-block:: python + + import click + + @click.command() + @click.option("--count", default=1, help="Number of greetings.") + @click.option("--name", prompt="Your name", help="The person to greet.") + def hello(count, name): + """Simple program that greets NAME for a total of COUNT times.""" + for _ in range(count): + click.echo(f"Hello, {name}!") + + if __name__ == '__main__': + hello() + +.. code-block:: text + + $ python hello.py --count=3 + Your name: Click + Hello, Click! + Hello, Click! + Hello, Click! + + +Donate +------ + +The Pallets organization develops and supports Click and other popular +packages. In order to grow the community of contributors and users, and +allow the maintainers to devote more time to the projects, `please +donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://click.palletsprojects.com/ +- Changes: https://click.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/click/ +- Source Code: https://github.com/pallets/click +- Issue Tracker: https://github.com/pallets/click/issues +- Chat: https://discord.gg/pallets diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/click-8.1.7.dist-info/RECORD b/.direnv/python-3.8.20/lib/python3.8/site-packages/click-8.1.7.dist-info/RECORD new file mode 100644 index 000000000..fff748e8a --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/click-8.1.7.dist-info/RECORD @@ -0,0 +1,23 @@ +click/__init__.py,sha256=YDDbjm406dTOA0V8bTtdGnhN7zj5j-_dFRewZF_pLvw,3138 +click/_compat.py,sha256=5318agQpbt4kroKsbqDOYpTSWzL_YCZVUQiTT04yXmc,18744 +click/_termui_impl.py,sha256=3dFYv4445Nw-rFvZOTBMBPYwB1bxnmNk9Du6Dm_oBSU,24069 +click/_textwrap.py,sha256=10fQ64OcBUMuK7mFvh8363_uoOxPlRItZBmKzRJDgoY,1353 +click/_winconsole.py,sha256=5ju3jQkcZD0W27WEMGqmEP4y_crUVzPCqsX_FYb7BO0,7860 +click/core.py,sha256=j6oEWtGgGna8JarD6WxhXmNnxLnfRjwXglbBc-8jr7U,114086 +click/decorators.py,sha256=-ZlbGYgV-oI8jr_oH4RpuL1PFS-5QmeuEAsLDAYgxtw,18719 +click/exceptions.py,sha256=fyROO-47HWFDjt2qupo7A3J32VlpM-ovJnfowu92K3s,9273 +click/formatting.py,sha256=Frf0-5W33-loyY_i9qrwXR8-STnW3m5gvyxLVUdyxyk,9706 +click/globals.py,sha256=TP-qM88STzc7f127h35TD_v920FgfOD2EwzqA0oE8XU,1961 +click/parser.py,sha256=LKyYQE9ZLj5KgIDXkrcTHQRXIggfoivX14_UVIn56YA,19067 +click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +click/shell_completion.py,sha256=Ty3VM_ts0sQhj6u7eFTiLwHPoTgcXTGEAUg2OpLqYKw,18460 +click/termui.py,sha256=H7Q8FpmPelhJ2ovOhfCRhjMtCpNyjFXryAMLZODqsdc,28324 +click/testing.py,sha256=1Qd4kS5bucn1hsNIRryd0WtTMuCpkA93grkWxT8POsU,16084 +click/types.py,sha256=TZvz3hKvBztf-Hpa2enOmP4eznSPLzijjig5b_0XMxE,36391 +click/utils.py,sha256=1476UduUNY6UePGU4m18uzVHLt1sKM2PP3yWsQhbItM,20298 +click-8.1.7.dist-info/LICENSE.rst,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475 +click-8.1.7.dist-info/METADATA,sha256=qIMevCxGA9yEmJOM_4WHuUJCwWpsIEVbCPOhs45YPN4,3014 +click-8.1.7.dist-info/WHEEL,sha256=5sUXSg9e4bi7lTLOHcm6QEYwO5TIF1TNbTSVFVjcJcc,92 +click-8.1.7.dist-info/top_level.txt,sha256=J1ZQogalYS4pphY_lPECoNMfw0HzTSrZglC4Yfwo4xA,6 +click-8.1.7.dist-info/INSTALLER,sha256=sz0Tnp99msIbbLB51q7U9nA6AvRKcsOeUKhrHH0Ulg4,12 +click-8.1.7.dist-info/RECORD,, diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/click-8.1.7.dist-info/WHEEL b/.direnv/python-3.8.20/lib/python3.8/site-packages/click-8.1.7.dist-info/WHEEL new file mode 100644 index 000000000..2c08da084 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/click-8.1.7.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.41.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/click-8.1.7.dist-info/top_level.txt b/.direnv/python-3.8.20/lib/python3.8/site-packages/click-8.1.7.dist-info/top_level.txt new file mode 100644 index 000000000..dca9a9096 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/click-8.1.7.dist-info/top_level.txt @@ -0,0 +1 @@ +click diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/click/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/__init__.py new file mode 100644 index 000000000..9a1dab048 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/__init__.py @@ -0,0 +1,73 @@ +""" +Click is a simple Python module inspired by the stdlib optparse to make +writing command line scripts fun. Unlike other modules, it's based +around a simple API that does not come with too much magic and is +composable. +""" +from .core import Argument as Argument +from .core import BaseCommand as BaseCommand +from .core import Command as Command +from .core import CommandCollection as CommandCollection +from .core import Context as Context +from .core import Group as Group +from .core import MultiCommand as MultiCommand +from .core import Option as Option +from .core import Parameter as Parameter +from .decorators import argument as argument +from .decorators import command as command +from .decorators import confirmation_option as confirmation_option +from .decorators import group as group +from .decorators import help_option as help_option +from .decorators import make_pass_decorator as make_pass_decorator +from .decorators import option as option +from .decorators import pass_context as pass_context +from .decorators import pass_obj as pass_obj +from .decorators import password_option as password_option +from .decorators import version_option as version_option +from .exceptions import Abort as Abort +from .exceptions import BadArgumentUsage as BadArgumentUsage +from .exceptions import BadOptionUsage as BadOptionUsage +from .exceptions import BadParameter as BadParameter +from .exceptions import ClickException as ClickException +from .exceptions import FileError as FileError +from .exceptions import MissingParameter as MissingParameter +from .exceptions import NoSuchOption as NoSuchOption +from .exceptions import UsageError as UsageError +from .formatting import HelpFormatter as HelpFormatter +from .formatting import wrap_text as wrap_text +from .globals import get_current_context as get_current_context +from .parser import OptionParser as OptionParser +from .termui import clear as clear +from .termui import confirm as confirm +from .termui import echo_via_pager as echo_via_pager +from .termui import edit as edit +from .termui import getchar as getchar +from .termui import launch as launch +from .termui import pause as pause +from .termui import progressbar as progressbar +from .termui import prompt as prompt +from .termui import secho as secho +from .termui import style as style +from .termui import unstyle as unstyle +from .types import BOOL as BOOL +from .types import Choice as Choice +from .types import DateTime as DateTime +from .types import File as File +from .types import FLOAT as FLOAT +from .types import FloatRange as FloatRange +from .types import INT as INT +from .types import IntRange as IntRange +from .types import ParamType as ParamType +from .types import Path as Path +from .types import STRING as STRING +from .types import Tuple as Tuple +from .types import UNPROCESSED as UNPROCESSED +from .types import UUID as UUID +from .utils import echo as echo +from .utils import format_filename as format_filename +from .utils import get_app_dir as get_app_dir +from .utils import get_binary_stream as get_binary_stream +from .utils import get_text_stream as get_text_stream +from .utils import open_file as open_file + +__version__ = "8.1.7" diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/click/_compat.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/_compat.py new file mode 100644 index 000000000..23f886659 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/_compat.py @@ -0,0 +1,623 @@ +import codecs +import io +import os +import re +import sys +import typing as t +from weakref import WeakKeyDictionary + +CYGWIN = sys.platform.startswith("cygwin") +WIN = sys.platform.startswith("win") +auto_wrap_for_ansi: t.Optional[t.Callable[[t.TextIO], t.TextIO]] = None +_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]") + + +def _make_text_stream( + stream: t.BinaryIO, + encoding: t.Optional[str], + errors: t.Optional[str], + force_readable: bool = False, + force_writable: bool = False, +) -> t.TextIO: + if encoding is None: + encoding = get_best_encoding(stream) + if errors is None: + errors = "replace" + return _NonClosingTextIOWrapper( + stream, + encoding, + errors, + line_buffering=True, + force_readable=force_readable, + force_writable=force_writable, + ) + + +def is_ascii_encoding(encoding: str) -> bool: + """Checks if a given encoding is ascii.""" + try: + return codecs.lookup(encoding).name == "ascii" + except LookupError: + return False + + +def get_best_encoding(stream: t.IO[t.Any]) -> str: + """Returns the default stream encoding if not found.""" + rv = getattr(stream, "encoding", None) or sys.getdefaultencoding() + if is_ascii_encoding(rv): + return "utf-8" + return rv + + +class _NonClosingTextIOWrapper(io.TextIOWrapper): + def __init__( + self, + stream: t.BinaryIO, + encoding: t.Optional[str], + errors: t.Optional[str], + force_readable: bool = False, + force_writable: bool = False, + **extra: t.Any, + ) -> None: + self._stream = stream = t.cast( + t.BinaryIO, _FixupStream(stream, force_readable, force_writable) + ) + super().__init__(stream, encoding, errors, **extra) + + def __del__(self) -> None: + try: + self.detach() + except Exception: + pass + + def isatty(self) -> bool: + # https://bitbucket.org/pypy/pypy/issue/1803 + return self._stream.isatty() + + +class _FixupStream: + """The new io interface needs more from streams than streams + traditionally implement. As such, this fix-up code is necessary in + some circumstances. + + The forcing of readable and writable flags are there because some tools + put badly patched objects on sys (one such offender are certain version + of jupyter notebook). + """ + + def __init__( + self, + stream: t.BinaryIO, + force_readable: bool = False, + force_writable: bool = False, + ): + self._stream = stream + self._force_readable = force_readable + self._force_writable = force_writable + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._stream, name) + + def read1(self, size: int) -> bytes: + f = getattr(self._stream, "read1", None) + + if f is not None: + return t.cast(bytes, f(size)) + + return self._stream.read(size) + + def readable(self) -> bool: + if self._force_readable: + return True + x = getattr(self._stream, "readable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.read(0) + except Exception: + return False + return True + + def writable(self) -> bool: + if self._force_writable: + return True + x = getattr(self._stream, "writable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.write("") # type: ignore + except Exception: + try: + self._stream.write(b"") + except Exception: + return False + return True + + def seekable(self) -> bool: + x = getattr(self._stream, "seekable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.seek(self._stream.tell()) + except Exception: + return False + return True + + +def _is_binary_reader(stream: t.IO[t.Any], default: bool = False) -> bool: + try: + return isinstance(stream.read(0), bytes) + except Exception: + return default + # This happens in some cases where the stream was already + # closed. In this case, we assume the default. + + +def _is_binary_writer(stream: t.IO[t.Any], default: bool = False) -> bool: + try: + stream.write(b"") + except Exception: + try: + stream.write("") + return False + except Exception: + pass + return default + return True + + +def _find_binary_reader(stream: t.IO[t.Any]) -> t.Optional[t.BinaryIO]: + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detaching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_reader(stream, False): + return t.cast(t.BinaryIO, stream) + + buf = getattr(stream, "buffer", None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_reader(buf, True): + return t.cast(t.BinaryIO, buf) + + return None + + +def _find_binary_writer(stream: t.IO[t.Any]) -> t.Optional[t.BinaryIO]: + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detaching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_writer(stream, False): + return t.cast(t.BinaryIO, stream) + + buf = getattr(stream, "buffer", None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_writer(buf, True): + return t.cast(t.BinaryIO, buf) + + return None + + +def _stream_is_misconfigured(stream: t.TextIO) -> bool: + """A stream is misconfigured if its encoding is ASCII.""" + # If the stream does not have an encoding set, we assume it's set + # to ASCII. This appears to happen in certain unittest + # environments. It's not quite clear what the correct behavior is + # but this at least will force Click to recover somehow. + return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii") + + +def _is_compat_stream_attr(stream: t.TextIO, attr: str, value: t.Optional[str]) -> bool: + """A stream attribute is compatible if it is equal to the + desired value or the desired value is unset and the attribute + has a value. + """ + stream_value = getattr(stream, attr, None) + return stream_value == value or (value is None and stream_value is not None) + + +def _is_compatible_text_stream( + stream: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] +) -> bool: + """Check if a stream's encoding and errors attributes are + compatible with the desired values. + """ + return _is_compat_stream_attr( + stream, "encoding", encoding + ) and _is_compat_stream_attr(stream, "errors", errors) + + +def _force_correct_text_stream( + text_stream: t.IO[t.Any], + encoding: t.Optional[str], + errors: t.Optional[str], + is_binary: t.Callable[[t.IO[t.Any], bool], bool], + find_binary: t.Callable[[t.IO[t.Any]], t.Optional[t.BinaryIO]], + force_readable: bool = False, + force_writable: bool = False, +) -> t.TextIO: + if is_binary(text_stream, False): + binary_reader = t.cast(t.BinaryIO, text_stream) + else: + text_stream = t.cast(t.TextIO, text_stream) + # If the stream looks compatible, and won't default to a + # misconfigured ascii encoding, return it as-is. + if _is_compatible_text_stream(text_stream, encoding, errors) and not ( + encoding is None and _stream_is_misconfigured(text_stream) + ): + return text_stream + + # Otherwise, get the underlying binary reader. + possible_binary_reader = find_binary(text_stream) + + # If that's not possible, silently use the original reader + # and get mojibake instead of exceptions. + if possible_binary_reader is None: + return text_stream + + binary_reader = possible_binary_reader + + # Default errors to replace instead of strict in order to get + # something that works. + if errors is None: + errors = "replace" + + # Wrap the binary stream in a text stream with the correct + # encoding parameters. + return _make_text_stream( + binary_reader, + encoding, + errors, + force_readable=force_readable, + force_writable=force_writable, + ) + + +def _force_correct_text_reader( + text_reader: t.IO[t.Any], + encoding: t.Optional[str], + errors: t.Optional[str], + force_readable: bool = False, +) -> t.TextIO: + return _force_correct_text_stream( + text_reader, + encoding, + errors, + _is_binary_reader, + _find_binary_reader, + force_readable=force_readable, + ) + + +def _force_correct_text_writer( + text_writer: t.IO[t.Any], + encoding: t.Optional[str], + errors: t.Optional[str], + force_writable: bool = False, +) -> t.TextIO: + return _force_correct_text_stream( + text_writer, + encoding, + errors, + _is_binary_writer, + _find_binary_writer, + force_writable=force_writable, + ) + + +def get_binary_stdin() -> t.BinaryIO: + reader = _find_binary_reader(sys.stdin) + if reader is None: + raise RuntimeError("Was not able to determine binary stream for sys.stdin.") + return reader + + +def get_binary_stdout() -> t.BinaryIO: + writer = _find_binary_writer(sys.stdout) + if writer is None: + raise RuntimeError("Was not able to determine binary stream for sys.stdout.") + return writer + + +def get_binary_stderr() -> t.BinaryIO: + writer = _find_binary_writer(sys.stderr) + if writer is None: + raise RuntimeError("Was not able to determine binary stream for sys.stderr.") + return writer + + +def get_text_stdin( + encoding: t.Optional[str] = None, errors: t.Optional[str] = None +) -> t.TextIO: + rv = _get_windows_console_stream(sys.stdin, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_reader(sys.stdin, encoding, errors, force_readable=True) + + +def get_text_stdout( + encoding: t.Optional[str] = None, errors: t.Optional[str] = None +) -> t.TextIO: + rv = _get_windows_console_stream(sys.stdout, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer(sys.stdout, encoding, errors, force_writable=True) + + +def get_text_stderr( + encoding: t.Optional[str] = None, errors: t.Optional[str] = None +) -> t.TextIO: + rv = _get_windows_console_stream(sys.stderr, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer(sys.stderr, encoding, errors, force_writable=True) + + +def _wrap_io_open( + file: t.Union[str, "os.PathLike[str]", int], + mode: str, + encoding: t.Optional[str], + errors: t.Optional[str], +) -> t.IO[t.Any]: + """Handles not passing ``encoding`` and ``errors`` in binary mode.""" + if "b" in mode: + return open(file, mode) + + return open(file, mode, encoding=encoding, errors=errors) + + +def open_stream( + filename: "t.Union[str, os.PathLike[str]]", + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + atomic: bool = False, +) -> t.Tuple[t.IO[t.Any], bool]: + binary = "b" in mode + filename = os.fspath(filename) + + # Standard streams first. These are simple because they ignore the + # atomic flag. Use fsdecode to handle Path("-"). + if os.fsdecode(filename) == "-": + if any(m in mode for m in ["w", "a", "x"]): + if binary: + return get_binary_stdout(), False + return get_text_stdout(encoding=encoding, errors=errors), False + if binary: + return get_binary_stdin(), False + return get_text_stdin(encoding=encoding, errors=errors), False + + # Non-atomic writes directly go out through the regular open functions. + if not atomic: + return _wrap_io_open(filename, mode, encoding, errors), True + + # Some usability stuff for atomic writes + if "a" in mode: + raise ValueError( + "Appending to an existing file is not supported, because that" + " would involve an expensive `copy`-operation to a temporary" + " file. Open the file in normal `w`-mode and copy explicitly" + " if that's what you're after." + ) + if "x" in mode: + raise ValueError("Use the `overwrite`-parameter instead.") + if "w" not in mode: + raise ValueError("Atomic writes only make sense with `w`-mode.") + + # Atomic writes are more complicated. They work by opening a file + # as a proxy in the same folder and then using the fdopen + # functionality to wrap it in a Python file. Then we wrap it in an + # atomic file that moves the file over on close. + import errno + import random + + try: + perm: t.Optional[int] = os.stat(filename).st_mode + except OSError: + perm = None + + flags = os.O_RDWR | os.O_CREAT | os.O_EXCL + + if binary: + flags |= getattr(os, "O_BINARY", 0) + + while True: + tmp_filename = os.path.join( + os.path.dirname(filename), + f".__atomic-write{random.randrange(1 << 32):08x}", + ) + try: + fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm) + break + except OSError as e: + if e.errno == errno.EEXIST or ( + os.name == "nt" + and e.errno == errno.EACCES + and os.path.isdir(e.filename) + and os.access(e.filename, os.W_OK) + ): + continue + raise + + if perm is not None: + os.chmod(tmp_filename, perm) # in case perm includes bits in umask + + f = _wrap_io_open(fd, mode, encoding, errors) + af = _AtomicFile(f, tmp_filename, os.path.realpath(filename)) + return t.cast(t.IO[t.Any], af), True + + +class _AtomicFile: + def __init__(self, f: t.IO[t.Any], tmp_filename: str, real_filename: str) -> None: + self._f = f + self._tmp_filename = tmp_filename + self._real_filename = real_filename + self.closed = False + + @property + def name(self) -> str: + return self._real_filename + + def close(self, delete: bool = False) -> None: + if self.closed: + return + self._f.close() + os.replace(self._tmp_filename, self._real_filename) + self.closed = True + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._f, name) + + def __enter__(self) -> "_AtomicFile": + return self + + def __exit__(self, exc_type: t.Optional[t.Type[BaseException]], *_: t.Any) -> None: + self.close(delete=exc_type is not None) + + def __repr__(self) -> str: + return repr(self._f) + + +def strip_ansi(value: str) -> str: + return _ansi_re.sub("", value) + + +def _is_jupyter_kernel_output(stream: t.IO[t.Any]) -> bool: + while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)): + stream = stream._stream + + return stream.__class__.__module__.startswith("ipykernel.") + + +def should_strip_ansi( + stream: t.Optional[t.IO[t.Any]] = None, color: t.Optional[bool] = None +) -> bool: + if color is None: + if stream is None: + stream = sys.stdin + return not isatty(stream) and not _is_jupyter_kernel_output(stream) + return not color + + +# On Windows, wrap the output streams with colorama to support ANSI +# color codes. +# NOTE: double check is needed so mypy does not analyze this on Linux +if sys.platform.startswith("win") and WIN: + from ._winconsole import _get_windows_console_stream + + def _get_argv_encoding() -> str: + import locale + + return locale.getpreferredencoding() + + _ansi_stream_wrappers: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() + + def auto_wrap_for_ansi( # noqa: F811 + stream: t.TextIO, color: t.Optional[bool] = None + ) -> t.TextIO: + """Support ANSI color and style codes on Windows by wrapping a + stream with colorama. + """ + try: + cached = _ansi_stream_wrappers.get(stream) + except Exception: + cached = None + + if cached is not None: + return cached + + import colorama + + strip = should_strip_ansi(stream, color) + ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip) + rv = t.cast(t.TextIO, ansi_wrapper.stream) + _write = rv.write + + def _safe_write(s): + try: + return _write(s) + except BaseException: + ansi_wrapper.reset_all() + raise + + rv.write = _safe_write + + try: + _ansi_stream_wrappers[stream] = rv + except Exception: + pass + + return rv + +else: + + def _get_argv_encoding() -> str: + return getattr(sys.stdin, "encoding", None) or sys.getfilesystemencoding() + + def _get_windows_console_stream( + f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] + ) -> t.Optional[t.TextIO]: + return None + + +def term_len(x: str) -> int: + return len(strip_ansi(x)) + + +def isatty(stream: t.IO[t.Any]) -> bool: + try: + return stream.isatty() + except Exception: + return False + + +def _make_cached_stream_func( + src_func: t.Callable[[], t.Optional[t.TextIO]], + wrapper_func: t.Callable[[], t.TextIO], +) -> t.Callable[[], t.Optional[t.TextIO]]: + cache: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() + + def func() -> t.Optional[t.TextIO]: + stream = src_func() + + if stream is None: + return None + + try: + rv = cache.get(stream) + except Exception: + rv = None + if rv is not None: + return rv + rv = wrapper_func() + try: + cache[stream] = rv + except Exception: + pass + return rv + + return func + + +_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin) +_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout) +_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr) + + +binary_streams: t.Mapping[str, t.Callable[[], t.BinaryIO]] = { + "stdin": get_binary_stdin, + "stdout": get_binary_stdout, + "stderr": get_binary_stderr, +} + +text_streams: t.Mapping[ + str, t.Callable[[t.Optional[str], t.Optional[str]], t.TextIO] +] = { + "stdin": get_text_stdin, + "stdout": get_text_stdout, + "stderr": get_text_stderr, +} diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/click/_termui_impl.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/_termui_impl.py new file mode 100644 index 000000000..f74465775 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/_termui_impl.py @@ -0,0 +1,739 @@ +""" +This module contains implementations for the termui module. To keep the +import time of Click down, some infrequently used functionality is +placed in this module and only imported as needed. +""" +import contextlib +import math +import os +import sys +import time +import typing as t +from gettext import gettext as _ +from io import StringIO +from types import TracebackType + +from ._compat import _default_text_stdout +from ._compat import CYGWIN +from ._compat import get_best_encoding +from ._compat import isatty +from ._compat import open_stream +from ._compat import strip_ansi +from ._compat import term_len +from ._compat import WIN +from .exceptions import ClickException +from .utils import echo + +V = t.TypeVar("V") + +if os.name == "nt": + BEFORE_BAR = "\r" + AFTER_BAR = "\n" +else: + BEFORE_BAR = "\r\033[?25l" + AFTER_BAR = "\033[?25h\n" + + +class ProgressBar(t.Generic[V]): + def __init__( + self, + iterable: t.Optional[t.Iterable[V]], + length: t.Optional[int] = None, + fill_char: str = "#", + empty_char: str = " ", + bar_template: str = "%(bar)s", + info_sep: str = " ", + show_eta: bool = True, + show_percent: t.Optional[bool] = None, + show_pos: bool = False, + item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None, + label: t.Optional[str] = None, + file: t.Optional[t.TextIO] = None, + color: t.Optional[bool] = None, + update_min_steps: int = 1, + width: int = 30, + ) -> None: + self.fill_char = fill_char + self.empty_char = empty_char + self.bar_template = bar_template + self.info_sep = info_sep + self.show_eta = show_eta + self.show_percent = show_percent + self.show_pos = show_pos + self.item_show_func = item_show_func + self.label: str = label or "" + + if file is None: + file = _default_text_stdout() + + # There are no standard streams attached to write to. For example, + # pythonw on Windows. + if file is None: + file = StringIO() + + self.file = file + self.color = color + self.update_min_steps = update_min_steps + self._completed_intervals = 0 + self.width: int = width + self.autowidth: bool = width == 0 + + if length is None: + from operator import length_hint + + length = length_hint(iterable, -1) + + if length == -1: + length = None + if iterable is None: + if length is None: + raise TypeError("iterable or length is required") + iterable = t.cast(t.Iterable[V], range(length)) + self.iter: t.Iterable[V] = iter(iterable) + self.length = length + self.pos = 0 + self.avg: t.List[float] = [] + self.last_eta: float + self.start: float + self.start = self.last_eta = time.time() + self.eta_known: bool = False + self.finished: bool = False + self.max_width: t.Optional[int] = None + self.entered: bool = False + self.current_item: t.Optional[V] = None + self.is_hidden: bool = not isatty(self.file) + self._last_line: t.Optional[str] = None + + def __enter__(self) -> "ProgressBar[V]": + self.entered = True + self.render_progress() + return self + + def __exit__( + self, + exc_type: t.Optional[t.Type[BaseException]], + exc_value: t.Optional[BaseException], + tb: t.Optional[TracebackType], + ) -> None: + self.render_finish() + + def __iter__(self) -> t.Iterator[V]: + if not self.entered: + raise RuntimeError("You need to use progress bars in a with block.") + self.render_progress() + return self.generator() + + def __next__(self) -> V: + # Iteration is defined in terms of a generator function, + # returned by iter(self); use that to define next(). This works + # because `self.iter` is an iterable consumed by that generator, + # so it is re-entry safe. Calling `next(self.generator())` + # twice works and does "what you want". + return next(iter(self)) + + def render_finish(self) -> None: + if self.is_hidden: + return + self.file.write(AFTER_BAR) + self.file.flush() + + @property + def pct(self) -> float: + if self.finished: + return 1.0 + return min(self.pos / (float(self.length or 1) or 1), 1.0) + + @property + def time_per_iteration(self) -> float: + if not self.avg: + return 0.0 + return sum(self.avg) / float(len(self.avg)) + + @property + def eta(self) -> float: + if self.length is not None and not self.finished: + return self.time_per_iteration * (self.length - self.pos) + return 0.0 + + def format_eta(self) -> str: + if self.eta_known: + t = int(self.eta) + seconds = t % 60 + t //= 60 + minutes = t % 60 + t //= 60 + hours = t % 24 + t //= 24 + if t > 0: + return f"{t}d {hours:02}:{minutes:02}:{seconds:02}" + else: + return f"{hours:02}:{minutes:02}:{seconds:02}" + return "" + + def format_pos(self) -> str: + pos = str(self.pos) + if self.length is not None: + pos += f"/{self.length}" + return pos + + def format_pct(self) -> str: + return f"{int(self.pct * 100): 4}%"[1:] + + def format_bar(self) -> str: + if self.length is not None: + bar_length = int(self.pct * self.width) + bar = self.fill_char * bar_length + bar += self.empty_char * (self.width - bar_length) + elif self.finished: + bar = self.fill_char * self.width + else: + chars = list(self.empty_char * (self.width or 1)) + if self.time_per_iteration != 0: + chars[ + int( + (math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5) + * self.width + ) + ] = self.fill_char + bar = "".join(chars) + return bar + + def format_progress_line(self) -> str: + show_percent = self.show_percent + + info_bits = [] + if self.length is not None and show_percent is None: + show_percent = not self.show_pos + + if self.show_pos: + info_bits.append(self.format_pos()) + if show_percent: + info_bits.append(self.format_pct()) + if self.show_eta and self.eta_known and not self.finished: + info_bits.append(self.format_eta()) + if self.item_show_func is not None: + item_info = self.item_show_func(self.current_item) + if item_info is not None: + info_bits.append(item_info) + + return ( + self.bar_template + % { + "label": self.label, + "bar": self.format_bar(), + "info": self.info_sep.join(info_bits), + } + ).rstrip() + + def render_progress(self) -> None: + import shutil + + if self.is_hidden: + # Only output the label as it changes if the output is not a + # TTY. Use file=stderr if you expect to be piping stdout. + if self._last_line != self.label: + self._last_line = self.label + echo(self.label, file=self.file, color=self.color) + + return + + buf = [] + # Update width in case the terminal has been resized + if self.autowidth: + old_width = self.width + self.width = 0 + clutter_length = term_len(self.format_progress_line()) + new_width = max(0, shutil.get_terminal_size().columns - clutter_length) + if new_width < old_width: + buf.append(BEFORE_BAR) + buf.append(" " * self.max_width) # type: ignore + self.max_width = new_width + self.width = new_width + + clear_width = self.width + if self.max_width is not None: + clear_width = self.max_width + + buf.append(BEFORE_BAR) + line = self.format_progress_line() + line_len = term_len(line) + if self.max_width is None or self.max_width < line_len: + self.max_width = line_len + + buf.append(line) + buf.append(" " * (clear_width - line_len)) + line = "".join(buf) + # Render the line only if it changed. + + if line != self._last_line: + self._last_line = line + echo(line, file=self.file, color=self.color, nl=False) + self.file.flush() + + def make_step(self, n_steps: int) -> None: + self.pos += n_steps + if self.length is not None and self.pos >= self.length: + self.finished = True + + if (time.time() - self.last_eta) < 1.0: + return + + self.last_eta = time.time() + + # self.avg is a rolling list of length <= 7 of steps where steps are + # defined as time elapsed divided by the total progress through + # self.length. + if self.pos: + step = (time.time() - self.start) / self.pos + else: + step = time.time() - self.start + + self.avg = self.avg[-6:] + [step] + + self.eta_known = self.length is not None + + def update(self, n_steps: int, current_item: t.Optional[V] = None) -> None: + """Update the progress bar by advancing a specified number of + steps, and optionally set the ``current_item`` for this new + position. + + :param n_steps: Number of steps to advance. + :param current_item: Optional item to set as ``current_item`` + for the updated position. + + .. versionchanged:: 8.0 + Added the ``current_item`` optional parameter. + + .. versionchanged:: 8.0 + Only render when the number of steps meets the + ``update_min_steps`` threshold. + """ + if current_item is not None: + self.current_item = current_item + + self._completed_intervals += n_steps + + if self._completed_intervals >= self.update_min_steps: + self.make_step(self._completed_intervals) + self.render_progress() + self._completed_intervals = 0 + + def finish(self) -> None: + self.eta_known = False + self.current_item = None + self.finished = True + + def generator(self) -> t.Iterator[V]: + """Return a generator which yields the items added to the bar + during construction, and updates the progress bar *after* the + yielded block returns. + """ + # WARNING: the iterator interface for `ProgressBar` relies on + # this and only works because this is a simple generator which + # doesn't create or manage additional state. If this function + # changes, the impact should be evaluated both against + # `iter(bar)` and `next(bar)`. `next()` in particular may call + # `self.generator()` repeatedly, and this must remain safe in + # order for that interface to work. + if not self.entered: + raise RuntimeError("You need to use progress bars in a with block.") + + if self.is_hidden: + yield from self.iter + else: + for rv in self.iter: + self.current_item = rv + + # This allows show_item_func to be updated before the + # item is processed. Only trigger at the beginning of + # the update interval. + if self._completed_intervals == 0: + self.render_progress() + + yield rv + self.update(1) + + self.finish() + self.render_progress() + + +def pager(generator: t.Iterable[str], color: t.Optional[bool] = None) -> None: + """Decide what method to use for paging through text.""" + stdout = _default_text_stdout() + + # There are no standard streams attached to write to. For example, + # pythonw on Windows. + if stdout is None: + stdout = StringIO() + + if not isatty(sys.stdin) or not isatty(stdout): + return _nullpager(stdout, generator, color) + pager_cmd = (os.environ.get("PAGER", None) or "").strip() + if pager_cmd: + if WIN: + return _tempfilepager(generator, pager_cmd, color) + return _pipepager(generator, pager_cmd, color) + if os.environ.get("TERM") in ("dumb", "emacs"): + return _nullpager(stdout, generator, color) + if WIN or sys.platform.startswith("os2"): + return _tempfilepager(generator, "more <", color) + if hasattr(os, "system") and os.system("(less) 2>/dev/null") == 0: + return _pipepager(generator, "less", color) + + import tempfile + + fd, filename = tempfile.mkstemp() + os.close(fd) + try: + if hasattr(os, "system") and os.system(f'more "{filename}"') == 0: + return _pipepager(generator, "more", color) + return _nullpager(stdout, generator, color) + finally: + os.unlink(filename) + + +def _pipepager(generator: t.Iterable[str], cmd: str, color: t.Optional[bool]) -> None: + """Page through text by feeding it to another program. Invoking a + pager through this might support colors. + """ + import subprocess + + env = dict(os.environ) + + # If we're piping to less we might support colors under the + # condition that + cmd_detail = cmd.rsplit("/", 1)[-1].split() + if color is None and cmd_detail[0] == "less": + less_flags = f"{os.environ.get('LESS', '')}{' '.join(cmd_detail[1:])}" + if not less_flags: + env["LESS"] = "-R" + color = True + elif "r" in less_flags or "R" in less_flags: + color = True + + c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, env=env) + stdin = t.cast(t.BinaryIO, c.stdin) + encoding = get_best_encoding(stdin) + try: + for text in generator: + if not color: + text = strip_ansi(text) + + stdin.write(text.encode(encoding, "replace")) + except (OSError, KeyboardInterrupt): + pass + else: + stdin.close() + + # Less doesn't respect ^C, but catches it for its own UI purposes (aborting + # search or other commands inside less). + # + # That means when the user hits ^C, the parent process (click) terminates, + # but less is still alive, paging the output and messing up the terminal. + # + # If the user wants to make the pager exit on ^C, they should set + # `LESS='-K'`. It's not our decision to make. + while True: + try: + c.wait() + except KeyboardInterrupt: + pass + else: + break + + +def _tempfilepager( + generator: t.Iterable[str], cmd: str, color: t.Optional[bool] +) -> None: + """Page through text by invoking a program on a temporary file.""" + import tempfile + + fd, filename = tempfile.mkstemp() + # TODO: This never terminates if the passed generator never terminates. + text = "".join(generator) + if not color: + text = strip_ansi(text) + encoding = get_best_encoding(sys.stdout) + with open_stream(filename, "wb")[0] as f: + f.write(text.encode(encoding)) + try: + os.system(f'{cmd} "{filename}"') + finally: + os.close(fd) + os.unlink(filename) + + +def _nullpager( + stream: t.TextIO, generator: t.Iterable[str], color: t.Optional[bool] +) -> None: + """Simply print unformatted text. This is the ultimate fallback.""" + for text in generator: + if not color: + text = strip_ansi(text) + stream.write(text) + + +class Editor: + def __init__( + self, + editor: t.Optional[str] = None, + env: t.Optional[t.Mapping[str, str]] = None, + require_save: bool = True, + extension: str = ".txt", + ) -> None: + self.editor = editor + self.env = env + self.require_save = require_save + self.extension = extension + + def get_editor(self) -> str: + if self.editor is not None: + return self.editor + for key in "VISUAL", "EDITOR": + rv = os.environ.get(key) + if rv: + return rv + if WIN: + return "notepad" + for editor in "sensible-editor", "vim", "nano": + if os.system(f"which {editor} >/dev/null 2>&1") == 0: + return editor + return "vi" + + def edit_file(self, filename: str) -> None: + import subprocess + + editor = self.get_editor() + environ: t.Optional[t.Dict[str, str]] = None + + if self.env: + environ = os.environ.copy() + environ.update(self.env) + + try: + c = subprocess.Popen(f'{editor} "{filename}"', env=environ, shell=True) + exit_code = c.wait() + if exit_code != 0: + raise ClickException( + _("{editor}: Editing failed").format(editor=editor) + ) + except OSError as e: + raise ClickException( + _("{editor}: Editing failed: {e}").format(editor=editor, e=e) + ) from e + + def edit(self, text: t.Optional[t.AnyStr]) -> t.Optional[t.AnyStr]: + import tempfile + + if not text: + data = b"" + elif isinstance(text, (bytes, bytearray)): + data = text + else: + if text and not text.endswith("\n"): + text += "\n" + + if WIN: + data = text.replace("\n", "\r\n").encode("utf-8-sig") + else: + data = text.encode("utf-8") + + fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension) + f: t.BinaryIO + + try: + with os.fdopen(fd, "wb") as f: + f.write(data) + + # If the filesystem resolution is 1 second, like Mac OS + # 10.12 Extended, or 2 seconds, like FAT32, and the editor + # closes very fast, require_save can fail. Set the modified + # time to be 2 seconds in the past to work around this. + os.utime(name, (os.path.getatime(name), os.path.getmtime(name) - 2)) + # Depending on the resolution, the exact value might not be + # recorded, so get the new recorded value. + timestamp = os.path.getmtime(name) + + self.edit_file(name) + + if self.require_save and os.path.getmtime(name) == timestamp: + return None + + with open(name, "rb") as f: + rv = f.read() + + if isinstance(text, (bytes, bytearray)): + return rv + + return rv.decode("utf-8-sig").replace("\r\n", "\n") # type: ignore + finally: + os.unlink(name) + + +def open_url(url: str, wait: bool = False, locate: bool = False) -> int: + import subprocess + + def _unquote_file(url: str) -> str: + from urllib.parse import unquote + + if url.startswith("file://"): + url = unquote(url[7:]) + + return url + + if sys.platform == "darwin": + args = ["open"] + if wait: + args.append("-W") + if locate: + args.append("-R") + args.append(_unquote_file(url)) + null = open("/dev/null", "w") + try: + return subprocess.Popen(args, stderr=null).wait() + finally: + null.close() + elif WIN: + if locate: + url = _unquote_file(url.replace('"', "")) + args = f'explorer /select,"{url}"' + else: + url = url.replace('"', "") + wait_str = "/WAIT" if wait else "" + args = f'start {wait_str} "" "{url}"' + return os.system(args) + elif CYGWIN: + if locate: + url = os.path.dirname(_unquote_file(url).replace('"', "")) + args = f'cygstart "{url}"' + else: + url = url.replace('"', "") + wait_str = "-w" if wait else "" + args = f'cygstart {wait_str} "{url}"' + return os.system(args) + + try: + if locate: + url = os.path.dirname(_unquote_file(url)) or "." + else: + url = _unquote_file(url) + c = subprocess.Popen(["xdg-open", url]) + if wait: + return c.wait() + return 0 + except OSError: + if url.startswith(("http://", "https://")) and not locate and not wait: + import webbrowser + + webbrowser.open(url) + return 0 + return 1 + + +def _translate_ch_to_exc(ch: str) -> t.Optional[BaseException]: + if ch == "\x03": + raise KeyboardInterrupt() + + if ch == "\x04" and not WIN: # Unix-like, Ctrl+D + raise EOFError() + + if ch == "\x1a" and WIN: # Windows, Ctrl+Z + raise EOFError() + + return None + + +if WIN: + import msvcrt + + @contextlib.contextmanager + def raw_terminal() -> t.Iterator[int]: + yield -1 + + def getchar(echo: bool) -> str: + # The function `getch` will return a bytes object corresponding to + # the pressed character. Since Windows 10 build 1803, it will also + # return \x00 when called a second time after pressing a regular key. + # + # `getwch` does not share this probably-bugged behavior. Moreover, it + # returns a Unicode object by default, which is what we want. + # + # Either of these functions will return \x00 or \xe0 to indicate + # a special key, and you need to call the same function again to get + # the "rest" of the code. The fun part is that \u00e0 is + # "latin small letter a with grave", so if you type that on a French + # keyboard, you _also_ get a \xe0. + # E.g., consider the Up arrow. This returns \xe0 and then \x48. The + # resulting Unicode string reads as "a with grave" + "capital H". + # This is indistinguishable from when the user actually types + # "a with grave" and then "capital H". + # + # When \xe0 is returned, we assume it's part of a special-key sequence + # and call `getwch` again, but that means that when the user types + # the \u00e0 character, `getchar` doesn't return until a second + # character is typed. + # The alternative is returning immediately, but that would mess up + # cross-platform handling of arrow keys and others that start with + # \xe0. Another option is using `getch`, but then we can't reliably + # read non-ASCII characters, because return values of `getch` are + # limited to the current 8-bit codepage. + # + # Anyway, Click doesn't claim to do this Right(tm), and using `getwch` + # is doing the right thing in more situations than with `getch`. + func: t.Callable[[], str] + + if echo: + func = msvcrt.getwche # type: ignore + else: + func = msvcrt.getwch # type: ignore + + rv = func() + + if rv in ("\x00", "\xe0"): + # \x00 and \xe0 are control characters that indicate special key, + # see above. + rv += func() + + _translate_ch_to_exc(rv) + return rv + +else: + import tty + import termios + + @contextlib.contextmanager + def raw_terminal() -> t.Iterator[int]: + f: t.Optional[t.TextIO] + fd: int + + if not isatty(sys.stdin): + f = open("/dev/tty") + fd = f.fileno() + else: + fd = sys.stdin.fileno() + f = None + + try: + old_settings = termios.tcgetattr(fd) + + try: + tty.setraw(fd) + yield fd + finally: + termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) + sys.stdout.flush() + + if f is not None: + f.close() + except termios.error: + pass + + def getchar(echo: bool) -> str: + with raw_terminal() as fd: + ch = os.read(fd, 32).decode(get_best_encoding(sys.stdin), "replace") + + if echo and isatty(sys.stdout): + sys.stdout.write(ch) + + _translate_ch_to_exc(ch) + return ch diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/click/_textwrap.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/_textwrap.py new file mode 100644 index 000000000..b47dcbd42 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/_textwrap.py @@ -0,0 +1,49 @@ +import textwrap +import typing as t +from contextlib import contextmanager + + +class TextWrapper(textwrap.TextWrapper): + def _handle_long_word( + self, + reversed_chunks: t.List[str], + cur_line: t.List[str], + cur_len: int, + width: int, + ) -> None: + space_left = max(width - cur_len, 1) + + if self.break_long_words: + last = reversed_chunks[-1] + cut = last[:space_left] + res = last[space_left:] + cur_line.append(cut) + reversed_chunks[-1] = res + elif not cur_line: + cur_line.append(reversed_chunks.pop()) + + @contextmanager + def extra_indent(self, indent: str) -> t.Iterator[None]: + old_initial_indent = self.initial_indent + old_subsequent_indent = self.subsequent_indent + self.initial_indent += indent + self.subsequent_indent += indent + + try: + yield + finally: + self.initial_indent = old_initial_indent + self.subsequent_indent = old_subsequent_indent + + def indent_only(self, text: str) -> str: + rv = [] + + for idx, line in enumerate(text.splitlines()): + indent = self.initial_indent + + if idx > 0: + indent = self.subsequent_indent + + rv.append(f"{indent}{line}") + + return "\n".join(rv) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/click/_winconsole.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/_winconsole.py new file mode 100644 index 000000000..6b20df315 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/_winconsole.py @@ -0,0 +1,279 @@ +# This module is based on the excellent work by Adam Bartoš who +# provided a lot of what went into the implementation here in +# the discussion to issue1602 in the Python bug tracker. +# +# There are some general differences in regards to how this works +# compared to the original patches as we do not need to patch +# the entire interpreter but just work in our little world of +# echo and prompt. +import io +import sys +import time +import typing as t +from ctypes import byref +from ctypes import c_char +from ctypes import c_char_p +from ctypes import c_int +from ctypes import c_ssize_t +from ctypes import c_ulong +from ctypes import c_void_p +from ctypes import POINTER +from ctypes import py_object +from ctypes import Structure +from ctypes.wintypes import DWORD +from ctypes.wintypes import HANDLE +from ctypes.wintypes import LPCWSTR +from ctypes.wintypes import LPWSTR + +from ._compat import _NonClosingTextIOWrapper + +assert sys.platform == "win32" +import msvcrt # noqa: E402 +from ctypes import windll # noqa: E402 +from ctypes import WINFUNCTYPE # noqa: E402 + +c_ssize_p = POINTER(c_ssize_t) + +kernel32 = windll.kernel32 +GetStdHandle = kernel32.GetStdHandle +ReadConsoleW = kernel32.ReadConsoleW +WriteConsoleW = kernel32.WriteConsoleW +GetConsoleMode = kernel32.GetConsoleMode +GetLastError = kernel32.GetLastError +GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32)) +CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( + ("CommandLineToArgvW", windll.shell32) +) +LocalFree = WINFUNCTYPE(c_void_p, c_void_p)(("LocalFree", windll.kernel32)) + +STDIN_HANDLE = GetStdHandle(-10) +STDOUT_HANDLE = GetStdHandle(-11) +STDERR_HANDLE = GetStdHandle(-12) + +PyBUF_SIMPLE = 0 +PyBUF_WRITABLE = 1 + +ERROR_SUCCESS = 0 +ERROR_NOT_ENOUGH_MEMORY = 8 +ERROR_OPERATION_ABORTED = 995 + +STDIN_FILENO = 0 +STDOUT_FILENO = 1 +STDERR_FILENO = 2 + +EOF = b"\x1a" +MAX_BYTES_WRITTEN = 32767 + +try: + from ctypes import pythonapi +except ImportError: + # On PyPy we cannot get buffers so our ability to operate here is + # severely limited. + get_buffer = None +else: + + class Py_buffer(Structure): + _fields_ = [ + ("buf", c_void_p), + ("obj", py_object), + ("len", c_ssize_t), + ("itemsize", c_ssize_t), + ("readonly", c_int), + ("ndim", c_int), + ("format", c_char_p), + ("shape", c_ssize_p), + ("strides", c_ssize_p), + ("suboffsets", c_ssize_p), + ("internal", c_void_p), + ] + + PyObject_GetBuffer = pythonapi.PyObject_GetBuffer + PyBuffer_Release = pythonapi.PyBuffer_Release + + def get_buffer(obj, writable=False): + buf = Py_buffer() + flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE + PyObject_GetBuffer(py_object(obj), byref(buf), flags) + + try: + buffer_type = c_char * buf.len + return buffer_type.from_address(buf.buf) + finally: + PyBuffer_Release(byref(buf)) + + +class _WindowsConsoleRawIOBase(io.RawIOBase): + def __init__(self, handle): + self.handle = handle + + def isatty(self): + super().isatty() + return True + + +class _WindowsConsoleReader(_WindowsConsoleRawIOBase): + def readable(self): + return True + + def readinto(self, b): + bytes_to_be_read = len(b) + if not bytes_to_be_read: + return 0 + elif bytes_to_be_read % 2: + raise ValueError( + "cannot read odd number of bytes from UTF-16-LE encoded console" + ) + + buffer = get_buffer(b, writable=True) + code_units_to_be_read = bytes_to_be_read // 2 + code_units_read = c_ulong() + + rv = ReadConsoleW( + HANDLE(self.handle), + buffer, + code_units_to_be_read, + byref(code_units_read), + None, + ) + if GetLastError() == ERROR_OPERATION_ABORTED: + # wait for KeyboardInterrupt + time.sleep(0.1) + if not rv: + raise OSError(f"Windows error: {GetLastError()}") + + if buffer[0] == EOF: + return 0 + return 2 * code_units_read.value + + +class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): + def writable(self): + return True + + @staticmethod + def _get_error_message(errno): + if errno == ERROR_SUCCESS: + return "ERROR_SUCCESS" + elif errno == ERROR_NOT_ENOUGH_MEMORY: + return "ERROR_NOT_ENOUGH_MEMORY" + return f"Windows error {errno}" + + def write(self, b): + bytes_to_be_written = len(b) + buf = get_buffer(b) + code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2 + code_units_written = c_ulong() + + WriteConsoleW( + HANDLE(self.handle), + buf, + code_units_to_be_written, + byref(code_units_written), + None, + ) + bytes_written = 2 * code_units_written.value + + if bytes_written == 0 and bytes_to_be_written > 0: + raise OSError(self._get_error_message(GetLastError())) + return bytes_written + + +class ConsoleStream: + def __init__(self, text_stream: t.TextIO, byte_stream: t.BinaryIO) -> None: + self._text_stream = text_stream + self.buffer = byte_stream + + @property + def name(self) -> str: + return self.buffer.name + + def write(self, x: t.AnyStr) -> int: + if isinstance(x, str): + return self._text_stream.write(x) + try: + self.flush() + except Exception: + pass + return self.buffer.write(x) + + def writelines(self, lines: t.Iterable[t.AnyStr]) -> None: + for line in lines: + self.write(line) + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._text_stream, name) + + def isatty(self) -> bool: + return self.buffer.isatty() + + def __repr__(self): + return f"" + + +def _get_text_stdin(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +def _get_text_stdout(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +def _get_text_stderr(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +_stream_factories: t.Mapping[int, t.Callable[[t.BinaryIO], t.TextIO]] = { + 0: _get_text_stdin, + 1: _get_text_stdout, + 2: _get_text_stderr, +} + + +def _is_console(f: t.TextIO) -> bool: + if not hasattr(f, "fileno"): + return False + + try: + fileno = f.fileno() + except (OSError, io.UnsupportedOperation): + return False + + handle = msvcrt.get_osfhandle(fileno) + return bool(GetConsoleMode(handle, byref(DWORD()))) + + +def _get_windows_console_stream( + f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] +) -> t.Optional[t.TextIO]: + if ( + get_buffer is not None + and encoding in {"utf-16-le", None} + and errors in {"strict", None} + and _is_console(f) + ): + func = _stream_factories.get(f.fileno()) + if func is not None: + b = getattr(f, "buffer", None) + + if b is None: + return None + + return func(b) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/click/core.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/core.py new file mode 100644 index 000000000..cc65e896b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/core.py @@ -0,0 +1,3042 @@ +import enum +import errno +import inspect +import os +import sys +import typing as t +from collections import abc +from contextlib import contextmanager +from contextlib import ExitStack +from functools import update_wrapper +from gettext import gettext as _ +from gettext import ngettext +from itertools import repeat +from types import TracebackType + +from . import types +from .exceptions import Abort +from .exceptions import BadParameter +from .exceptions import ClickException +from .exceptions import Exit +from .exceptions import MissingParameter +from .exceptions import UsageError +from .formatting import HelpFormatter +from .formatting import join_options +from .globals import pop_context +from .globals import push_context +from .parser import _flag_needs_value +from .parser import OptionParser +from .parser import split_opt +from .termui import confirm +from .termui import prompt +from .termui import style +from .utils import _detect_program_name +from .utils import _expand_args +from .utils import echo +from .utils import make_default_short_help +from .utils import make_str +from .utils import PacifyFlushWrapper + +if t.TYPE_CHECKING: + import typing_extensions as te + from .shell_completion import CompletionItem + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) +V = t.TypeVar("V") + + +def _complete_visible_commands( + ctx: "Context", incomplete: str +) -> t.Iterator[t.Tuple[str, "Command"]]: + """List all the subcommands of a group that start with the + incomplete value and aren't hidden. + + :param ctx: Invocation context for the group. + :param incomplete: Value being completed. May be empty. + """ + multi = t.cast(MultiCommand, ctx.command) + + for name in multi.list_commands(ctx): + if name.startswith(incomplete): + command = multi.get_command(ctx, name) + + if command is not None and not command.hidden: + yield name, command + + +def _check_multicommand( + base_command: "MultiCommand", cmd_name: str, cmd: "Command", register: bool = False +) -> None: + if not base_command.chain or not isinstance(cmd, MultiCommand): + return + if register: + hint = ( + "It is not possible to add multi commands as children to" + " another multi command that is in chain mode." + ) + else: + hint = ( + "Found a multi command as subcommand to a multi command" + " that is in chain mode. This is not supported." + ) + raise RuntimeError( + f"{hint}. Command {base_command.name!r} is set to chain and" + f" {cmd_name!r} was added as a subcommand but it in itself is a" + f" multi command. ({cmd_name!r} is a {type(cmd).__name__}" + f" within a chained {type(base_command).__name__} named" + f" {base_command.name!r})." + ) + + +def batch(iterable: t.Iterable[V], batch_size: int) -> t.List[t.Tuple[V, ...]]: + return list(zip(*repeat(iter(iterable), batch_size))) + + +@contextmanager +def augment_usage_errors( + ctx: "Context", param: t.Optional["Parameter"] = None +) -> t.Iterator[None]: + """Context manager that attaches extra information to exceptions.""" + try: + yield + except BadParameter as e: + if e.ctx is None: + e.ctx = ctx + if param is not None and e.param is None: + e.param = param + raise + except UsageError as e: + if e.ctx is None: + e.ctx = ctx + raise + + +def iter_params_for_processing( + invocation_order: t.Sequence["Parameter"], + declaration_order: t.Sequence["Parameter"], +) -> t.List["Parameter"]: + """Given a sequence of parameters in the order as should be considered + for processing and an iterable of parameters that exist, this returns + a list in the correct order as they should be processed. + """ + + def sort_key(item: "Parameter") -> t.Tuple[bool, float]: + try: + idx: float = invocation_order.index(item) + except ValueError: + idx = float("inf") + + return not item.is_eager, idx + + return sorted(declaration_order, key=sort_key) + + +class ParameterSource(enum.Enum): + """This is an :class:`~enum.Enum` that indicates the source of a + parameter's value. + + Use :meth:`click.Context.get_parameter_source` to get the + source for a parameter by name. + + .. versionchanged:: 8.0 + Use :class:`~enum.Enum` and drop the ``validate`` method. + + .. versionchanged:: 8.0 + Added the ``PROMPT`` value. + """ + + COMMANDLINE = enum.auto() + """The value was provided by the command line args.""" + ENVIRONMENT = enum.auto() + """The value was provided with an environment variable.""" + DEFAULT = enum.auto() + """Used the default specified by the parameter.""" + DEFAULT_MAP = enum.auto() + """Used a default provided by :attr:`Context.default_map`.""" + PROMPT = enum.auto() + """Used a prompt to confirm a default or provide a value.""" + + +class Context: + """The context is a special internal object that holds state relevant + for the script execution at every single level. It's normally invisible + to commands unless they opt-in to getting access to it. + + The context is useful as it can pass internal objects around and can + control special execution features such as reading data from + environment variables. + + A context can be used as context manager in which case it will call + :meth:`close` on teardown. + + :param command: the command class for this context. + :param parent: the parent context. + :param info_name: the info name for this invocation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it is usually + the name of the script, for commands below it it's + the name of the script. + :param obj: an arbitrary object of user data. + :param auto_envvar_prefix: the prefix to use for automatic environment + variables. If this is `None` then reading + from environment variables is disabled. This + does not affect manually set environment + variables which are always read. + :param default_map: a dictionary (like object) with default values + for parameters. + :param terminal_width: the width of the terminal. The default is + inherit from parent context. If no context + defines the terminal width then auto + detection will be applied. + :param max_content_width: the maximum width for content rendered by + Click (this currently only affects help + pages). This defaults to 80 characters if + not overridden. In other words: even if the + terminal is larger than that, Click will not + format things wider than 80 characters by + default. In addition to that, formatters might + add some safety mapping on the right. + :param resilient_parsing: if this flag is enabled then Click will + parse without any interactivity or callback + invocation. Default values will also be + ignored. This is useful for implementing + things such as completion support. + :param allow_extra_args: if this is set to `True` then extra arguments + at the end will not raise an error and will be + kept on the context. The default is to inherit + from the command. + :param allow_interspersed_args: if this is set to `False` then options + and arguments cannot be mixed. The + default is to inherit from the command. + :param ignore_unknown_options: instructs click to ignore options it does + not know and keeps them for later + processing. + :param help_option_names: optionally a list of strings that define how + the default help parameter is named. The + default is ``['--help']``. + :param token_normalize_func: an optional function that is used to + normalize tokens (options, choices, + etc.). This for instance can be used to + implement case insensitive behavior. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are used in texts that Click prints which is by + default not the case. This for instance would affect + help output. + :param show_default: Show the default value for commands. If this + value is not set, it defaults to the value from the parent + context. ``Command.show_default`` overrides this default for the + specific command. + + .. versionchanged:: 8.1 + The ``show_default`` parameter is overridden by + ``Command.show_default``, instead of the other way around. + + .. versionchanged:: 8.0 + The ``show_default`` parameter defaults to the value from the + parent context. + + .. versionchanged:: 7.1 + Added the ``show_default`` parameter. + + .. versionchanged:: 4.0 + Added the ``color``, ``ignore_unknown_options``, and + ``max_content_width`` parameters. + + .. versionchanged:: 3.0 + Added the ``allow_extra_args`` and ``allow_interspersed_args`` + parameters. + + .. versionchanged:: 2.0 + Added the ``resilient_parsing``, ``help_option_names``, and + ``token_normalize_func`` parameters. + """ + + #: The formatter class to create with :meth:`make_formatter`. + #: + #: .. versionadded:: 8.0 + formatter_class: t.Type["HelpFormatter"] = HelpFormatter + + def __init__( + self, + command: "Command", + parent: t.Optional["Context"] = None, + info_name: t.Optional[str] = None, + obj: t.Optional[t.Any] = None, + auto_envvar_prefix: t.Optional[str] = None, + default_map: t.Optional[t.MutableMapping[str, t.Any]] = None, + terminal_width: t.Optional[int] = None, + max_content_width: t.Optional[int] = None, + resilient_parsing: bool = False, + allow_extra_args: t.Optional[bool] = None, + allow_interspersed_args: t.Optional[bool] = None, + ignore_unknown_options: t.Optional[bool] = None, + help_option_names: t.Optional[t.List[str]] = None, + token_normalize_func: t.Optional[t.Callable[[str], str]] = None, + color: t.Optional[bool] = None, + show_default: t.Optional[bool] = None, + ) -> None: + #: the parent context or `None` if none exists. + self.parent = parent + #: the :class:`Command` for this context. + self.command = command + #: the descriptive information name + self.info_name = info_name + #: Map of parameter names to their parsed values. Parameters + #: with ``expose_value=False`` are not stored. + self.params: t.Dict[str, t.Any] = {} + #: the leftover arguments. + self.args: t.List[str] = [] + #: protected arguments. These are arguments that are prepended + #: to `args` when certain parsing scenarios are encountered but + #: must be never propagated to another arguments. This is used + #: to implement nested parsing. + self.protected_args: t.List[str] = [] + #: the collected prefixes of the command's options. + self._opt_prefixes: t.Set[str] = set(parent._opt_prefixes) if parent else set() + + if obj is None and parent is not None: + obj = parent.obj + + #: the user object stored. + self.obj: t.Any = obj + self._meta: t.Dict[str, t.Any] = getattr(parent, "meta", {}) + + #: A dictionary (-like object) with defaults for parameters. + if ( + default_map is None + and info_name is not None + and parent is not None + and parent.default_map is not None + ): + default_map = parent.default_map.get(info_name) + + self.default_map: t.Optional[t.MutableMapping[str, t.Any]] = default_map + + #: This flag indicates if a subcommand is going to be executed. A + #: group callback can use this information to figure out if it's + #: being executed directly or because the execution flow passes + #: onwards to a subcommand. By default it's None, but it can be + #: the name of the subcommand to execute. + #: + #: If chaining is enabled this will be set to ``'*'`` in case + #: any commands are executed. It is however not possible to + #: figure out which ones. If you require this knowledge you + #: should use a :func:`result_callback`. + self.invoked_subcommand: t.Optional[str] = None + + if terminal_width is None and parent is not None: + terminal_width = parent.terminal_width + + #: The width of the terminal (None is autodetection). + self.terminal_width: t.Optional[int] = terminal_width + + if max_content_width is None and parent is not None: + max_content_width = parent.max_content_width + + #: The maximum width of formatted content (None implies a sensible + #: default which is 80 for most things). + self.max_content_width: t.Optional[int] = max_content_width + + if allow_extra_args is None: + allow_extra_args = command.allow_extra_args + + #: Indicates if the context allows extra args or if it should + #: fail on parsing. + #: + #: .. versionadded:: 3.0 + self.allow_extra_args = allow_extra_args + + if allow_interspersed_args is None: + allow_interspersed_args = command.allow_interspersed_args + + #: Indicates if the context allows mixing of arguments and + #: options or not. + #: + #: .. versionadded:: 3.0 + self.allow_interspersed_args: bool = allow_interspersed_args + + if ignore_unknown_options is None: + ignore_unknown_options = command.ignore_unknown_options + + #: Instructs click to ignore options that a command does not + #: understand and will store it on the context for later + #: processing. This is primarily useful for situations where you + #: want to call into external programs. Generally this pattern is + #: strongly discouraged because it's not possibly to losslessly + #: forward all arguments. + #: + #: .. versionadded:: 4.0 + self.ignore_unknown_options: bool = ignore_unknown_options + + if help_option_names is None: + if parent is not None: + help_option_names = parent.help_option_names + else: + help_option_names = ["--help"] + + #: The names for the help options. + self.help_option_names: t.List[str] = help_option_names + + if token_normalize_func is None and parent is not None: + token_normalize_func = parent.token_normalize_func + + #: An optional normalization function for tokens. This is + #: options, choices, commands etc. + self.token_normalize_func: t.Optional[ + t.Callable[[str], str] + ] = token_normalize_func + + #: Indicates if resilient parsing is enabled. In that case Click + #: will do its best to not cause any failures and default values + #: will be ignored. Useful for completion. + self.resilient_parsing: bool = resilient_parsing + + # If there is no envvar prefix yet, but the parent has one and + # the command on this level has a name, we can expand the envvar + # prefix automatically. + if auto_envvar_prefix is None: + if ( + parent is not None + and parent.auto_envvar_prefix is not None + and self.info_name is not None + ): + auto_envvar_prefix = ( + f"{parent.auto_envvar_prefix}_{self.info_name.upper()}" + ) + else: + auto_envvar_prefix = auto_envvar_prefix.upper() + + if auto_envvar_prefix is not None: + auto_envvar_prefix = auto_envvar_prefix.replace("-", "_") + + self.auto_envvar_prefix: t.Optional[str] = auto_envvar_prefix + + if color is None and parent is not None: + color = parent.color + + #: Controls if styling output is wanted or not. + self.color: t.Optional[bool] = color + + if show_default is None and parent is not None: + show_default = parent.show_default + + #: Show option default values when formatting help text. + self.show_default: t.Optional[bool] = show_default + + self._close_callbacks: t.List[t.Callable[[], t.Any]] = [] + self._depth = 0 + self._parameter_source: t.Dict[str, ParameterSource] = {} + self._exit_stack = ExitStack() + + def to_info_dict(self) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. This traverses the entire CLI + structure. + + .. code-block:: python + + with Context(cli) as ctx: + info = ctx.to_info_dict() + + .. versionadded:: 8.0 + """ + return { + "command": self.command.to_info_dict(self), + "info_name": self.info_name, + "allow_extra_args": self.allow_extra_args, + "allow_interspersed_args": self.allow_interspersed_args, + "ignore_unknown_options": self.ignore_unknown_options, + "auto_envvar_prefix": self.auto_envvar_prefix, + } + + def __enter__(self) -> "Context": + self._depth += 1 + push_context(self) + return self + + def __exit__( + self, + exc_type: t.Optional[t.Type[BaseException]], + exc_value: t.Optional[BaseException], + tb: t.Optional[TracebackType], + ) -> None: + self._depth -= 1 + if self._depth == 0: + self.close() + pop_context() + + @contextmanager + def scope(self, cleanup: bool = True) -> t.Iterator["Context"]: + """This helper method can be used with the context object to promote + it to the current thread local (see :func:`get_current_context`). + The default behavior of this is to invoke the cleanup functions which + can be disabled by setting `cleanup` to `False`. The cleanup + functions are typically used for things such as closing file handles. + + If the cleanup is intended the context object can also be directly + used as a context manager. + + Example usage:: + + with ctx.scope(): + assert get_current_context() is ctx + + This is equivalent:: + + with ctx: + assert get_current_context() is ctx + + .. versionadded:: 5.0 + + :param cleanup: controls if the cleanup functions should be run or + not. The default is to run these functions. In + some situations the context only wants to be + temporarily pushed in which case this can be disabled. + Nested pushes automatically defer the cleanup. + """ + if not cleanup: + self._depth += 1 + try: + with self as rv: + yield rv + finally: + if not cleanup: + self._depth -= 1 + + @property + def meta(self) -> t.Dict[str, t.Any]: + """This is a dictionary which is shared with all the contexts + that are nested. It exists so that click utilities can store some + state here if they need to. It is however the responsibility of + that code to manage this dictionary well. + + The keys are supposed to be unique dotted strings. For instance + module paths are a good choice for it. What is stored in there is + irrelevant for the operation of click. However what is important is + that code that places data here adheres to the general semantics of + the system. + + Example usage:: + + LANG_KEY = f'{__name__}.lang' + + def set_language(value): + ctx = get_current_context() + ctx.meta[LANG_KEY] = value + + def get_language(): + return get_current_context().meta.get(LANG_KEY, 'en_US') + + .. versionadded:: 5.0 + """ + return self._meta + + def make_formatter(self) -> HelpFormatter: + """Creates the :class:`~click.HelpFormatter` for the help and + usage output. + + To quickly customize the formatter class used without overriding + this method, set the :attr:`formatter_class` attribute. + + .. versionchanged:: 8.0 + Added the :attr:`formatter_class` attribute. + """ + return self.formatter_class( + width=self.terminal_width, max_width=self.max_content_width + ) + + def with_resource(self, context_manager: t.ContextManager[V]) -> V: + """Register a resource as if it were used in a ``with`` + statement. The resource will be cleaned up when the context is + popped. + + Uses :meth:`contextlib.ExitStack.enter_context`. It calls the + resource's ``__enter__()`` method and returns the result. When + the context is popped, it closes the stack, which calls the + resource's ``__exit__()`` method. + + To register a cleanup function for something that isn't a + context manager, use :meth:`call_on_close`. Or use something + from :mod:`contextlib` to turn it into a context manager first. + + .. code-block:: python + + @click.group() + @click.option("--name") + @click.pass_context + def cli(ctx): + ctx.obj = ctx.with_resource(connect_db(name)) + + :param context_manager: The context manager to enter. + :return: Whatever ``context_manager.__enter__()`` returns. + + .. versionadded:: 8.0 + """ + return self._exit_stack.enter_context(context_manager) + + def call_on_close(self, f: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: + """Register a function to be called when the context tears down. + + This can be used to close resources opened during the script + execution. Resources that support Python's context manager + protocol which would be used in a ``with`` statement should be + registered with :meth:`with_resource` instead. + + :param f: The function to execute on teardown. + """ + return self._exit_stack.callback(f) + + def close(self) -> None: + """Invoke all close callbacks registered with + :meth:`call_on_close`, and exit all context managers entered + with :meth:`with_resource`. + """ + self._exit_stack.close() + # In case the context is reused, create a new exit stack. + self._exit_stack = ExitStack() + + @property + def command_path(self) -> str: + """The computed command path. This is used for the ``usage`` + information on the help page. It's automatically created by + combining the info names of the chain of contexts to the root. + """ + rv = "" + if self.info_name is not None: + rv = self.info_name + if self.parent is not None: + parent_command_path = [self.parent.command_path] + + if isinstance(self.parent.command, Command): + for param in self.parent.command.get_params(self): + parent_command_path.extend(param.get_usage_pieces(self)) + + rv = f"{' '.join(parent_command_path)} {rv}" + return rv.lstrip() + + def find_root(self) -> "Context": + """Finds the outermost context.""" + node = self + while node.parent is not None: + node = node.parent + return node + + def find_object(self, object_type: t.Type[V]) -> t.Optional[V]: + """Finds the closest object of a given type.""" + node: t.Optional["Context"] = self + + while node is not None: + if isinstance(node.obj, object_type): + return node.obj + + node = node.parent + + return None + + def ensure_object(self, object_type: t.Type[V]) -> V: + """Like :meth:`find_object` but sets the innermost object to a + new instance of `object_type` if it does not exist. + """ + rv = self.find_object(object_type) + if rv is None: + self.obj = rv = object_type() + return rv + + @t.overload + def lookup_default( + self, name: str, call: "te.Literal[True]" = True + ) -> t.Optional[t.Any]: + ... + + @t.overload + def lookup_default( + self, name: str, call: "te.Literal[False]" = ... + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + ... + + def lookup_default(self, name: str, call: bool = True) -> t.Optional[t.Any]: + """Get the default for a parameter from :attr:`default_map`. + + :param name: Name of the parameter. + :param call: If the default is a callable, call it. Disable to + return the callable instead. + + .. versionchanged:: 8.0 + Added the ``call`` parameter. + """ + if self.default_map is not None: + value = self.default_map.get(name) + + if call and callable(value): + return value() + + return value + + return None + + def fail(self, message: str) -> "te.NoReturn": + """Aborts the execution of the program with a specific error + message. + + :param message: the error message to fail with. + """ + raise UsageError(message, self) + + def abort(self) -> "te.NoReturn": + """Aborts the script.""" + raise Abort() + + def exit(self, code: int = 0) -> "te.NoReturn": + """Exits the application with a given exit code.""" + raise Exit(code) + + def get_usage(self) -> str: + """Helper method to get formatted usage string for the current + context and command. + """ + return self.command.get_usage(self) + + def get_help(self) -> str: + """Helper method to get formatted help page for the current + context and command. + """ + return self.command.get_help(self) + + def _make_sub_context(self, command: "Command") -> "Context": + """Create a new context of the same type as this context, but + for a new command. + + :meta private: + """ + return type(self)(command, info_name=command.name, parent=self) + + @t.overload + def invoke( + __self, # noqa: B902 + __callback: "t.Callable[..., V]", + *args: t.Any, + **kwargs: t.Any, + ) -> V: + ... + + @t.overload + def invoke( + __self, # noqa: B902 + __callback: "Command", + *args: t.Any, + **kwargs: t.Any, + ) -> t.Any: + ... + + def invoke( + __self, # noqa: B902 + __callback: t.Union["Command", "t.Callable[..., V]"], + *args: t.Any, + **kwargs: t.Any, + ) -> t.Union[t.Any, V]: + """Invokes a command callback in exactly the way it expects. There + are two ways to invoke this method: + + 1. the first argument can be a callback and all other arguments and + keyword arguments are forwarded directly to the function. + 2. the first argument is a click command object. In that case all + arguments are forwarded as well but proper click parameters + (options and click arguments) must be keyword arguments and Click + will fill in defaults. + + Note that before Click 3.2 keyword arguments were not properly filled + in against the intention of this code and no context was created. For + more information about this change and why it was done in a bugfix + release see :ref:`upgrade-to-3.2`. + + .. versionchanged:: 8.0 + All ``kwargs`` are tracked in :attr:`params` so they will be + passed if :meth:`forward` is called at multiple levels. + """ + if isinstance(__callback, Command): + other_cmd = __callback + + if other_cmd.callback is None: + raise TypeError( + "The given command does not have a callback that can be invoked." + ) + else: + __callback = t.cast("t.Callable[..., V]", other_cmd.callback) + + ctx = __self._make_sub_context(other_cmd) + + for param in other_cmd.params: + if param.name not in kwargs and param.expose_value: + kwargs[param.name] = param.type_cast_value( # type: ignore + ctx, param.get_default(ctx) + ) + + # Track all kwargs as params, so that forward() will pass + # them on in subsequent calls. + ctx.params.update(kwargs) + else: + ctx = __self + + with augment_usage_errors(__self): + with ctx: + return __callback(*args, **kwargs) + + def forward( + __self, __cmd: "Command", *args: t.Any, **kwargs: t.Any # noqa: B902 + ) -> t.Any: + """Similar to :meth:`invoke` but fills in default keyword + arguments from the current context if the other command expects + it. This cannot invoke callbacks directly, only other commands. + + .. versionchanged:: 8.0 + All ``kwargs`` are tracked in :attr:`params` so they will be + passed if ``forward`` is called at multiple levels. + """ + # Can only forward to other commands, not direct callbacks. + if not isinstance(__cmd, Command): + raise TypeError("Callback is not a command.") + + for param in __self.params: + if param not in kwargs: + kwargs[param] = __self.params[param] + + return __self.invoke(__cmd, *args, **kwargs) + + def set_parameter_source(self, name: str, source: ParameterSource) -> None: + """Set the source of a parameter. This indicates the location + from which the value of the parameter was obtained. + + :param name: The name of the parameter. + :param source: A member of :class:`~click.core.ParameterSource`. + """ + self._parameter_source[name] = source + + def get_parameter_source(self, name: str) -> t.Optional[ParameterSource]: + """Get the source of a parameter. This indicates the location + from which the value of the parameter was obtained. + + This can be useful for determining when a user specified a value + on the command line that is the same as the default value. It + will be :attr:`~click.core.ParameterSource.DEFAULT` only if the + value was actually taken from the default. + + :param name: The name of the parameter. + :rtype: ParameterSource + + .. versionchanged:: 8.0 + Returns ``None`` if the parameter was not provided from any + source. + """ + return self._parameter_source.get(name) + + +class BaseCommand: + """The base command implements the minimal API contract of commands. + Most code will never use this as it does not implement a lot of useful + functionality but it can act as the direct subclass of alternative + parsing methods that do not depend on the Click parser. + + For instance, this can be used to bridge Click and other systems like + argparse or docopt. + + Because base commands do not implement a lot of the API that other + parts of Click take for granted, they are not supported for all + operations. For instance, they cannot be used with the decorators + usually and they have no built-in callback system. + + .. versionchanged:: 2.0 + Added the `context_settings` parameter. + + :param name: the name of the command to use unless a group overrides it. + :param context_settings: an optional dictionary with defaults that are + passed to the context object. + """ + + #: The context class to create with :meth:`make_context`. + #: + #: .. versionadded:: 8.0 + context_class: t.Type[Context] = Context + #: the default for the :attr:`Context.allow_extra_args` flag. + allow_extra_args = False + #: the default for the :attr:`Context.allow_interspersed_args` flag. + allow_interspersed_args = True + #: the default for the :attr:`Context.ignore_unknown_options` flag. + ignore_unknown_options = False + + def __init__( + self, + name: t.Optional[str], + context_settings: t.Optional[t.MutableMapping[str, t.Any]] = None, + ) -> None: + #: the name the command thinks it has. Upon registering a command + #: on a :class:`Group` the group will default the command name + #: with this information. You should instead use the + #: :class:`Context`\'s :attr:`~Context.info_name` attribute. + self.name = name + + if context_settings is None: + context_settings = {} + + #: an optional dictionary with defaults passed to the context. + self.context_settings: t.MutableMapping[str, t.Any] = context_settings + + def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. This traverses the entire structure + below this command. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + :param ctx: A :class:`Context` representing this command. + + .. versionadded:: 8.0 + """ + return {"name": self.name} + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.name}>" + + def get_usage(self, ctx: Context) -> str: + raise NotImplementedError("Base commands cannot get usage") + + def get_help(self, ctx: Context) -> str: + raise NotImplementedError("Base commands cannot get help") + + def make_context( + self, + info_name: t.Optional[str], + args: t.List[str], + parent: t.Optional[Context] = None, + **extra: t.Any, + ) -> Context: + """This function when given an info name and arguments will kick + off the parsing and create a new :class:`Context`. It does not + invoke the actual command callback though. + + To quickly customize the context class used without overriding + this method, set the :attr:`context_class` attribute. + + :param info_name: the info name for this invocation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it's usually + the name of the script, for commands below it's + the name of the command. + :param args: the arguments to parse as list of strings. + :param parent: the parent context if available. + :param extra: extra keyword arguments forwarded to the context + constructor. + + .. versionchanged:: 8.0 + Added the :attr:`context_class` attribute. + """ + for key, value in self.context_settings.items(): + if key not in extra: + extra[key] = value + + ctx = self.context_class( + self, info_name=info_name, parent=parent, **extra # type: ignore + ) + + with ctx.scope(cleanup=False): + self.parse_args(ctx, args) + return ctx + + def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: + """Given a context and a list of arguments this creates the parser + and parses the arguments, then modifies the context as necessary. + This is automatically invoked by :meth:`make_context`. + """ + raise NotImplementedError("Base commands do not know how to parse arguments.") + + def invoke(self, ctx: Context) -> t.Any: + """Given a context, this invokes the command. The default + implementation is raising a not implemented error. + """ + raise NotImplementedError("Base commands are not invocable by default") + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. Looks + at the names of chained multi-commands. + + Any command could be part of a chained multi-command, so sibling + commands are valid at any point during command completion. Other + command classes will return more completions. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results: t.List["CompletionItem"] = [] + + while ctx.parent is not None: + ctx = ctx.parent + + if isinstance(ctx.command, MultiCommand) and ctx.command.chain: + results.extend( + CompletionItem(name, help=command.get_short_help_str()) + for name, command in _complete_visible_commands(ctx, incomplete) + if name not in ctx.protected_args + ) + + return results + + @t.overload + def main( + self, + args: t.Optional[t.Sequence[str]] = None, + prog_name: t.Optional[str] = None, + complete_var: t.Optional[str] = None, + standalone_mode: "te.Literal[True]" = True, + **extra: t.Any, + ) -> "te.NoReturn": + ... + + @t.overload + def main( + self, + args: t.Optional[t.Sequence[str]] = None, + prog_name: t.Optional[str] = None, + complete_var: t.Optional[str] = None, + standalone_mode: bool = ..., + **extra: t.Any, + ) -> t.Any: + ... + + def main( + self, + args: t.Optional[t.Sequence[str]] = None, + prog_name: t.Optional[str] = None, + complete_var: t.Optional[str] = None, + standalone_mode: bool = True, + windows_expand_args: bool = True, + **extra: t.Any, + ) -> t.Any: + """This is the way to invoke a script with all the bells and + whistles as a command line application. This will always terminate + the application after a call. If this is not wanted, ``SystemExit`` + needs to be caught. + + This method is also available by directly calling the instance of + a :class:`Command`. + + :param args: the arguments that should be used for parsing. If not + provided, ``sys.argv[1:]`` is used. + :param prog_name: the program name that should be used. By default + the program name is constructed by taking the file + name from ``sys.argv[0]``. + :param complete_var: the environment variable that controls the + bash completion support. The default is + ``"__COMPLETE"`` with prog_name in + uppercase. + :param standalone_mode: the default behavior is to invoke the script + in standalone mode. Click will then + handle exceptions and convert them into + error messages and the function will never + return but shut down the interpreter. If + this is set to `False` they will be + propagated to the caller and the return + value of this function is the return value + of :meth:`invoke`. + :param windows_expand_args: Expand glob patterns, user dir, and + env vars in command line args on Windows. + :param extra: extra keyword arguments are forwarded to the context + constructor. See :class:`Context` for more information. + + .. versionchanged:: 8.0.1 + Added the ``windows_expand_args`` parameter to allow + disabling command line arg expansion on Windows. + + .. versionchanged:: 8.0 + When taking arguments from ``sys.argv`` on Windows, glob + patterns, user dir, and env vars are expanded. + + .. versionchanged:: 3.0 + Added the ``standalone_mode`` parameter. + """ + if args is None: + args = sys.argv[1:] + + if os.name == "nt" and windows_expand_args: + args = _expand_args(args) + else: + args = list(args) + + if prog_name is None: + prog_name = _detect_program_name() + + # Process shell completion requests and exit early. + self._main_shell_completion(extra, prog_name, complete_var) + + try: + try: + with self.make_context(prog_name, args, **extra) as ctx: + rv = self.invoke(ctx) + if not standalone_mode: + return rv + # it's not safe to `ctx.exit(rv)` here! + # note that `rv` may actually contain data like "1" which + # has obvious effects + # more subtle case: `rv=[None, None]` can come out of + # chained commands which all returned `None` -- so it's not + # even always obvious that `rv` indicates success/failure + # by its truthiness/falsiness + ctx.exit() + except (EOFError, KeyboardInterrupt) as e: + echo(file=sys.stderr) + raise Abort() from e + except ClickException as e: + if not standalone_mode: + raise + e.show() + sys.exit(e.exit_code) + except OSError as e: + if e.errno == errno.EPIPE: + sys.stdout = t.cast(t.TextIO, PacifyFlushWrapper(sys.stdout)) + sys.stderr = t.cast(t.TextIO, PacifyFlushWrapper(sys.stderr)) + sys.exit(1) + else: + raise + except Exit as e: + if standalone_mode: + sys.exit(e.exit_code) + else: + # in non-standalone mode, return the exit code + # note that this is only reached if `self.invoke` above raises + # an Exit explicitly -- thus bypassing the check there which + # would return its result + # the results of non-standalone execution may therefore be + # somewhat ambiguous: if there are codepaths which lead to + # `ctx.exit(1)` and to `return 1`, the caller won't be able to + # tell the difference between the two + return e.exit_code + except Abort: + if not standalone_mode: + raise + echo(_("Aborted!"), file=sys.stderr) + sys.exit(1) + + def _main_shell_completion( + self, + ctx_args: t.MutableMapping[str, t.Any], + prog_name: str, + complete_var: t.Optional[str] = None, + ) -> None: + """Check if the shell is asking for tab completion, process + that, then exit early. Called from :meth:`main` before the + program is invoked. + + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. Defaults to + ``_{PROG_NAME}_COMPLETE``. + + .. versionchanged:: 8.2.0 + Dots (``.``) in ``prog_name`` are replaced with underscores (``_``). + """ + if complete_var is None: + complete_name = prog_name.replace("-", "_").replace(".", "_") + complete_var = f"_{complete_name}_COMPLETE".upper() + + instruction = os.environ.get(complete_var) + + if not instruction: + return + + from .shell_completion import shell_complete + + rv = shell_complete(self, ctx_args, prog_name, complete_var, instruction) + sys.exit(rv) + + def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Any: + """Alias for :meth:`main`.""" + return self.main(*args, **kwargs) + + +class Command(BaseCommand): + """Commands are the basic building block of command line interfaces in + Click. A basic command handles command line parsing and might dispatch + more parsing to commands nested below it. + + :param name: the name of the command to use unless a group overrides it. + :param context_settings: an optional dictionary with defaults that are + passed to the context object. + :param callback: the callback to invoke. This is optional. + :param params: the parameters to register with this command. This can + be either :class:`Option` or :class:`Argument` objects. + :param help: the help string to use for this command. + :param epilog: like the help string but it's printed at the end of the + help page after everything else. + :param short_help: the short help to use for this command. This is + shown on the command listing of the parent command. + :param add_help_option: by default each command registers a ``--help`` + option. This can be disabled by this parameter. + :param no_args_is_help: this controls what happens if no arguments are + provided. This option is disabled by default. + If enabled this will add ``--help`` as argument + if no arguments are passed + :param hidden: hide this command from help outputs. + + :param deprecated: issues a message indicating that + the command is deprecated. + + .. versionchanged:: 8.1 + ``help``, ``epilog``, and ``short_help`` are stored unprocessed, + all formatting is done when outputting help text, not at init, + and is done even if not using the ``@command`` decorator. + + .. versionchanged:: 8.0 + Added a ``repr`` showing the command name. + + .. versionchanged:: 7.1 + Added the ``no_args_is_help`` parameter. + + .. versionchanged:: 2.0 + Added the ``context_settings`` parameter. + """ + + def __init__( + self, + name: t.Optional[str], + context_settings: t.Optional[t.MutableMapping[str, t.Any]] = None, + callback: t.Optional[t.Callable[..., t.Any]] = None, + params: t.Optional[t.List["Parameter"]] = None, + help: t.Optional[str] = None, + epilog: t.Optional[str] = None, + short_help: t.Optional[str] = None, + options_metavar: t.Optional[str] = "[OPTIONS]", + add_help_option: bool = True, + no_args_is_help: bool = False, + hidden: bool = False, + deprecated: bool = False, + ) -> None: + super().__init__(name, context_settings) + #: the callback to execute when the command fires. This might be + #: `None` in which case nothing happens. + self.callback = callback + #: the list of parameters for this command in the order they + #: should show up in the help page and execute. Eager parameters + #: will automatically be handled before non eager ones. + self.params: t.List["Parameter"] = params or [] + self.help = help + self.epilog = epilog + self.options_metavar = options_metavar + self.short_help = short_help + self.add_help_option = add_help_option + self.no_args_is_help = no_args_is_help + self.hidden = hidden + self.deprecated = deprecated + + def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict(ctx) + info_dict.update( + params=[param.to_info_dict() for param in self.get_params(ctx)], + help=self.help, + epilog=self.epilog, + short_help=self.short_help, + hidden=self.hidden, + deprecated=self.deprecated, + ) + return info_dict + + def get_usage(self, ctx: Context) -> str: + """Formats the usage line into a string and returns it. + + Calls :meth:`format_usage` internally. + """ + formatter = ctx.make_formatter() + self.format_usage(ctx, formatter) + return formatter.getvalue().rstrip("\n") + + def get_params(self, ctx: Context) -> t.List["Parameter"]: + rv = self.params + help_option = self.get_help_option(ctx) + + if help_option is not None: + rv = [*rv, help_option] + + return rv + + def format_usage(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the usage line into the formatter. + + This is a low-level method called by :meth:`get_usage`. + """ + pieces = self.collect_usage_pieces(ctx) + formatter.write_usage(ctx.command_path, " ".join(pieces)) + + def collect_usage_pieces(self, ctx: Context) -> t.List[str]: + """Returns all the pieces that go into the usage line and returns + it as a list of strings. + """ + rv = [self.options_metavar] if self.options_metavar else [] + + for param in self.get_params(ctx): + rv.extend(param.get_usage_pieces(ctx)) + + return rv + + def get_help_option_names(self, ctx: Context) -> t.List[str]: + """Returns the names for the help option.""" + all_names = set(ctx.help_option_names) + for param in self.params: + all_names.difference_update(param.opts) + all_names.difference_update(param.secondary_opts) + return list(all_names) + + def get_help_option(self, ctx: Context) -> t.Optional["Option"]: + """Returns the help option object.""" + help_options = self.get_help_option_names(ctx) + + if not help_options or not self.add_help_option: + return None + + def show_help(ctx: Context, param: "Parameter", value: str) -> None: + if value and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + return Option( + help_options, + is_flag=True, + is_eager=True, + expose_value=False, + callback=show_help, + help=_("Show this message and exit."), + ) + + def make_parser(self, ctx: Context) -> OptionParser: + """Creates the underlying option parser for this command.""" + parser = OptionParser(ctx) + for param in self.get_params(ctx): + param.add_to_parser(parser, ctx) + return parser + + def get_help(self, ctx: Context) -> str: + """Formats the help into a string and returns it. + + Calls :meth:`format_help` internally. + """ + formatter = ctx.make_formatter() + self.format_help(ctx, formatter) + return formatter.getvalue().rstrip("\n") + + def get_short_help_str(self, limit: int = 45) -> str: + """Gets short help for the command or makes it by shortening the + long help string. + """ + if self.short_help: + text = inspect.cleandoc(self.short_help) + elif self.help: + text = make_default_short_help(self.help, limit) + else: + text = "" + + if self.deprecated: + text = _("(Deprecated) {text}").format(text=text) + + return text.strip() + + def format_help(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the help into the formatter if it exists. + + This is a low-level method called by :meth:`get_help`. + + This calls the following methods: + + - :meth:`format_usage` + - :meth:`format_help_text` + - :meth:`format_options` + - :meth:`format_epilog` + """ + self.format_usage(ctx, formatter) + self.format_help_text(ctx, formatter) + self.format_options(ctx, formatter) + self.format_epilog(ctx, formatter) + + def format_help_text(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the help text to the formatter if it exists.""" + if self.help is not None: + # truncate the help text to the first form feed + text = inspect.cleandoc(self.help).partition("\f")[0] + else: + text = "" + + if self.deprecated: + text = _("(Deprecated) {text}").format(text=text) + + if text: + formatter.write_paragraph() + + with formatter.indentation(): + formatter.write_text(text) + + def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes all the options into the formatter if they exist.""" + opts = [] + for param in self.get_params(ctx): + rv = param.get_help_record(ctx) + if rv is not None: + opts.append(rv) + + if opts: + with formatter.section(_("Options")): + formatter.write_dl(opts) + + def format_epilog(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the epilog into the formatter if it exists.""" + if self.epilog: + epilog = inspect.cleandoc(self.epilog) + formatter.write_paragraph() + + with formatter.indentation(): + formatter.write_text(epilog) + + def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: + if not args and self.no_args_is_help and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + parser = self.make_parser(ctx) + opts, args, param_order = parser.parse_args(args=args) + + for param in iter_params_for_processing(param_order, self.get_params(ctx)): + value, args = param.handle_parse_result(ctx, opts, args) + + if args and not ctx.allow_extra_args and not ctx.resilient_parsing: + ctx.fail( + ngettext( + "Got unexpected extra argument ({args})", + "Got unexpected extra arguments ({args})", + len(args), + ).format(args=" ".join(map(str, args))) + ) + + ctx.args = args + ctx._opt_prefixes.update(parser._opt_prefixes) + return args + + def invoke(self, ctx: Context) -> t.Any: + """Given a context, this invokes the attached callback (if it exists) + in the right way. + """ + if self.deprecated: + message = _( + "DeprecationWarning: The command {name!r} is deprecated." + ).format(name=self.name) + echo(style(message, fg="red"), err=True) + + if self.callback is not None: + return ctx.invoke(self.callback, **ctx.params) + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. Looks + at the names of options and chained multi-commands. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results: t.List["CompletionItem"] = [] + + if incomplete and not incomplete[0].isalnum(): + for param in self.get_params(ctx): + if ( + not isinstance(param, Option) + or param.hidden + or ( + not param.multiple + and ctx.get_parameter_source(param.name) # type: ignore + is ParameterSource.COMMANDLINE + ) + ): + continue + + results.extend( + CompletionItem(name, help=param.help) + for name in [*param.opts, *param.secondary_opts] + if name.startswith(incomplete) + ) + + results.extend(super().shell_complete(ctx, incomplete)) + return results + + +class MultiCommand(Command): + """A multi command is the basic implementation of a command that + dispatches to subcommands. The most common version is the + :class:`Group`. + + :param invoke_without_command: this controls how the multi command itself + is invoked. By default it's only invoked + if a subcommand is provided. + :param no_args_is_help: this controls what happens if no arguments are + provided. This option is enabled by default if + `invoke_without_command` is disabled or disabled + if it's enabled. If enabled this will add + ``--help`` as argument if no arguments are + passed. + :param subcommand_metavar: the string that is used in the documentation + to indicate the subcommand place. + :param chain: if this is set to `True` chaining of multiple subcommands + is enabled. This restricts the form of commands in that + they cannot have optional arguments but it allows + multiple commands to be chained together. + :param result_callback: The result callback to attach to this multi + command. This can be set or changed later with the + :meth:`result_callback` decorator. + :param attrs: Other command arguments described in :class:`Command`. + """ + + allow_extra_args = True + allow_interspersed_args = False + + def __init__( + self, + name: t.Optional[str] = None, + invoke_without_command: bool = False, + no_args_is_help: t.Optional[bool] = None, + subcommand_metavar: t.Optional[str] = None, + chain: bool = False, + result_callback: t.Optional[t.Callable[..., t.Any]] = None, + **attrs: t.Any, + ) -> None: + super().__init__(name, **attrs) + + if no_args_is_help is None: + no_args_is_help = not invoke_without_command + + self.no_args_is_help = no_args_is_help + self.invoke_without_command = invoke_without_command + + if subcommand_metavar is None: + if chain: + subcommand_metavar = "COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]..." + else: + subcommand_metavar = "COMMAND [ARGS]..." + + self.subcommand_metavar = subcommand_metavar + self.chain = chain + # The result callback that is stored. This can be set or + # overridden with the :func:`result_callback` decorator. + self._result_callback = result_callback + + if self.chain: + for param in self.params: + if isinstance(param, Argument) and not param.required: + raise RuntimeError( + "Multi commands in chain mode cannot have" + " optional arguments." + ) + + def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict(ctx) + commands = {} + + for name in self.list_commands(ctx): + command = self.get_command(ctx, name) + + if command is None: + continue + + sub_ctx = ctx._make_sub_context(command) + + with sub_ctx.scope(cleanup=False): + commands[name] = command.to_info_dict(sub_ctx) + + info_dict.update(commands=commands, chain=self.chain) + return info_dict + + def collect_usage_pieces(self, ctx: Context) -> t.List[str]: + rv = super().collect_usage_pieces(ctx) + rv.append(self.subcommand_metavar) + return rv + + def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: + super().format_options(ctx, formatter) + self.format_commands(ctx, formatter) + + def result_callback(self, replace: bool = False) -> t.Callable[[F], F]: + """Adds a result callback to the command. By default if a + result callback is already registered this will chain them but + this can be disabled with the `replace` parameter. The result + callback is invoked with the return value of the subcommand + (or the list of return values from all subcommands if chaining + is enabled) as well as the parameters as they would be passed + to the main callback. + + Example:: + + @click.group() + @click.option('-i', '--input', default=23) + def cli(input): + return 42 + + @cli.result_callback() + def process_result(result, input): + return result + input + + :param replace: if set to `True` an already existing result + callback will be removed. + + .. versionchanged:: 8.0 + Renamed from ``resultcallback``. + + .. versionadded:: 3.0 + """ + + def decorator(f: F) -> F: + old_callback = self._result_callback + + if old_callback is None or replace: + self._result_callback = f + return f + + def function(__value, *args, **kwargs): # type: ignore + inner = old_callback(__value, *args, **kwargs) + return f(inner, *args, **kwargs) + + self._result_callback = rv = update_wrapper(t.cast(F, function), f) + return rv + + return decorator + + def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None: + """Extra format methods for multi methods that adds all the commands + after the options. + """ + commands = [] + for subcommand in self.list_commands(ctx): + cmd = self.get_command(ctx, subcommand) + # What is this, the tool lied about a command. Ignore it + if cmd is None: + continue + if cmd.hidden: + continue + + commands.append((subcommand, cmd)) + + # allow for 3 times the default spacing + if len(commands): + limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands) + + rows = [] + for subcommand, cmd in commands: + help = cmd.get_short_help_str(limit) + rows.append((subcommand, help)) + + if rows: + with formatter.section(_("Commands")): + formatter.write_dl(rows) + + def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: + if not args and self.no_args_is_help and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + rest = super().parse_args(ctx, args) + + if self.chain: + ctx.protected_args = rest + ctx.args = [] + elif rest: + ctx.protected_args, ctx.args = rest[:1], rest[1:] + + return ctx.args + + def invoke(self, ctx: Context) -> t.Any: + def _process_result(value: t.Any) -> t.Any: + if self._result_callback is not None: + value = ctx.invoke(self._result_callback, value, **ctx.params) + return value + + if not ctx.protected_args: + if self.invoke_without_command: + # No subcommand was invoked, so the result callback is + # invoked with the group return value for regular + # groups, or an empty list for chained groups. + with ctx: + rv = super().invoke(ctx) + return _process_result([] if self.chain else rv) + ctx.fail(_("Missing command.")) + + # Fetch args back out + args = [*ctx.protected_args, *ctx.args] + ctx.args = [] + ctx.protected_args = [] + + # If we're not in chain mode, we only allow the invocation of a + # single command but we also inform the current context about the + # name of the command to invoke. + if not self.chain: + # Make sure the context is entered so we do not clean up + # resources until the result processor has worked. + with ctx: + cmd_name, cmd, args = self.resolve_command(ctx, args) + assert cmd is not None + ctx.invoked_subcommand = cmd_name + super().invoke(ctx) + sub_ctx = cmd.make_context(cmd_name, args, parent=ctx) + with sub_ctx: + return _process_result(sub_ctx.command.invoke(sub_ctx)) + + # In chain mode we create the contexts step by step, but after the + # base command has been invoked. Because at that point we do not + # know the subcommands yet, the invoked subcommand attribute is + # set to ``*`` to inform the command that subcommands are executed + # but nothing else. + with ctx: + ctx.invoked_subcommand = "*" if args else None + super().invoke(ctx) + + # Otherwise we make every single context and invoke them in a + # chain. In that case the return value to the result processor + # is the list of all invoked subcommand's results. + contexts = [] + while args: + cmd_name, cmd, args = self.resolve_command(ctx, args) + assert cmd is not None + sub_ctx = cmd.make_context( + cmd_name, + args, + parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False, + ) + contexts.append(sub_ctx) + args, sub_ctx.args = sub_ctx.args, [] + + rv = [] + for sub_ctx in contexts: + with sub_ctx: + rv.append(sub_ctx.command.invoke(sub_ctx)) + return _process_result(rv) + + def resolve_command( + self, ctx: Context, args: t.List[str] + ) -> t.Tuple[t.Optional[str], t.Optional[Command], t.List[str]]: + cmd_name = make_str(args[0]) + original_cmd_name = cmd_name + + # Get the command + cmd = self.get_command(ctx, cmd_name) + + # If we can't find the command but there is a normalization + # function available, we try with that one. + if cmd is None and ctx.token_normalize_func is not None: + cmd_name = ctx.token_normalize_func(cmd_name) + cmd = self.get_command(ctx, cmd_name) + + # If we don't find the command we want to show an error message + # to the user that it was not provided. However, there is + # something else we should do: if the first argument looks like + # an option we want to kick off parsing again for arguments to + # resolve things like --help which now should go to the main + # place. + if cmd is None and not ctx.resilient_parsing: + if split_opt(cmd_name)[0]: + self.parse_args(ctx, ctx.args) + ctx.fail(_("No such command {name!r}.").format(name=original_cmd_name)) + return cmd_name if cmd else None, cmd, args[1:] + + def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: + """Given a context and a command name, this returns a + :class:`Command` object if it exists or returns `None`. + """ + raise NotImplementedError + + def list_commands(self, ctx: Context) -> t.List[str]: + """Returns a list of subcommand names in the order they should + appear. + """ + return [] + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. Looks + at the names of options, subcommands, and chained + multi-commands. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results = [ + CompletionItem(name, help=command.get_short_help_str()) + for name, command in _complete_visible_commands(ctx, incomplete) + ] + results.extend(super().shell_complete(ctx, incomplete)) + return results + + +class Group(MultiCommand): + """A group allows a command to have subcommands attached. This is + the most common way to implement nesting in Click. + + :param name: The name of the group command. + :param commands: A dict mapping names to :class:`Command` objects. + Can also be a list of :class:`Command`, which will use + :attr:`Command.name` to create the dict. + :param attrs: Other command arguments described in + :class:`MultiCommand`, :class:`Command`, and + :class:`BaseCommand`. + + .. versionchanged:: 8.0 + The ``commands`` argument can be a list of command objects. + """ + + #: If set, this is used by the group's :meth:`command` decorator + #: as the default :class:`Command` class. This is useful to make all + #: subcommands use a custom command class. + #: + #: .. versionadded:: 8.0 + command_class: t.Optional[t.Type[Command]] = None + + #: If set, this is used by the group's :meth:`group` decorator + #: as the default :class:`Group` class. This is useful to make all + #: subgroups use a custom group class. + #: + #: If set to the special value :class:`type` (literally + #: ``group_class = type``), this group's class will be used as the + #: default class. This makes a custom group class continue to make + #: custom groups. + #: + #: .. versionadded:: 8.0 + group_class: t.Optional[t.Union[t.Type["Group"], t.Type[type]]] = None + # Literal[type] isn't valid, so use Type[type] + + def __init__( + self, + name: t.Optional[str] = None, + commands: t.Optional[ + t.Union[t.MutableMapping[str, Command], t.Sequence[Command]] + ] = None, + **attrs: t.Any, + ) -> None: + super().__init__(name, **attrs) + + if commands is None: + commands = {} + elif isinstance(commands, abc.Sequence): + commands = {c.name: c for c in commands if c.name is not None} + + #: The registered subcommands by their exported names. + self.commands: t.MutableMapping[str, Command] = commands + + def add_command(self, cmd: Command, name: t.Optional[str] = None) -> None: + """Registers another :class:`Command` with this group. If the name + is not provided, the name of the command is used. + """ + name = name or cmd.name + if name is None: + raise TypeError("Command has no name.") + _check_multicommand(self, name, cmd, register=True) + self.commands[name] = cmd + + @t.overload + def command(self, __func: t.Callable[..., t.Any]) -> Command: + ... + + @t.overload + def command( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[t.Callable[..., t.Any]], Command]: + ... + + def command( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], Command], Command]: + """A shortcut decorator for declaring and attaching a command to + the group. This takes the same arguments as :func:`command` and + immediately registers the created command with this group by + calling :meth:`add_command`. + + To customize the command class used, set the + :attr:`command_class` attribute. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.0 + Added the :attr:`command_class` attribute. + """ + from .decorators import command + + func: t.Optional[t.Callable[..., t.Any]] = None + + if args and callable(args[0]): + assert ( + len(args) == 1 and not kwargs + ), "Use 'command(**kwargs)(callable)' to provide arguments." + (func,) = args + args = () + + if self.command_class and kwargs.get("cls") is None: + kwargs["cls"] = self.command_class + + def decorator(f: t.Callable[..., t.Any]) -> Command: + cmd: Command = command(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + + if func is not None: + return decorator(func) + + return decorator + + @t.overload + def group(self, __func: t.Callable[..., t.Any]) -> "Group": + ... + + @t.overload + def group( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[t.Callable[..., t.Any]], "Group"]: + ... + + def group( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], "Group"], "Group"]: + """A shortcut decorator for declaring and attaching a group to + the group. This takes the same arguments as :func:`group` and + immediately registers the created group with this group by + calling :meth:`add_command`. + + To customize the group class used, set the :attr:`group_class` + attribute. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.0 + Added the :attr:`group_class` attribute. + """ + from .decorators import group + + func: t.Optional[t.Callable[..., t.Any]] = None + + if args and callable(args[0]): + assert ( + len(args) == 1 and not kwargs + ), "Use 'group(**kwargs)(callable)' to provide arguments." + (func,) = args + args = () + + if self.group_class is not None and kwargs.get("cls") is None: + if self.group_class is type: + kwargs["cls"] = type(self) + else: + kwargs["cls"] = self.group_class + + def decorator(f: t.Callable[..., t.Any]) -> "Group": + cmd: Group = group(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + + if func is not None: + return decorator(func) + + return decorator + + def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: + return self.commands.get(cmd_name) + + def list_commands(self, ctx: Context) -> t.List[str]: + return sorted(self.commands) + + +class CommandCollection(MultiCommand): + """A command collection is a multi command that merges multiple multi + commands together into one. This is a straightforward implementation + that accepts a list of different multi commands as sources and + provides all the commands for each of them. + + See :class:`MultiCommand` and :class:`Command` for the description of + ``name`` and ``attrs``. + """ + + def __init__( + self, + name: t.Optional[str] = None, + sources: t.Optional[t.List[MultiCommand]] = None, + **attrs: t.Any, + ) -> None: + super().__init__(name, **attrs) + #: The list of registered multi commands. + self.sources: t.List[MultiCommand] = sources or [] + + def add_source(self, multi_cmd: MultiCommand) -> None: + """Adds a new multi command to the chain dispatcher.""" + self.sources.append(multi_cmd) + + def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: + for source in self.sources: + rv = source.get_command(ctx, cmd_name) + + if rv is not None: + if self.chain: + _check_multicommand(self, cmd_name, rv) + + return rv + + return None + + def list_commands(self, ctx: Context) -> t.List[str]: + rv: t.Set[str] = set() + + for source in self.sources: + rv.update(source.list_commands(ctx)) + + return sorted(rv) + + +def _check_iter(value: t.Any) -> t.Iterator[t.Any]: + """Check if the value is iterable but not a string. Raises a type + error, or return an iterator over the value. + """ + if isinstance(value, str): + raise TypeError + + return iter(value) + + +class Parameter: + r"""A parameter to a command comes in two versions: they are either + :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently + not supported by design as some of the internals for parsing are + intentionally not finalized. + + Some settings are supported by both options and arguments. + + :param param_decls: the parameter declarations for this option or + argument. This is a list of flags or argument + names. + :param type: the type that should be used. Either a :class:`ParamType` + or a Python type. The latter is converted into the former + automatically if supported. + :param required: controls if this is optional or not. + :param default: the default value if omitted. This can also be a callable, + in which case it's invoked when the default is needed + without any arguments. + :param callback: A function to further process or validate the value + after type conversion. It is called as ``f(ctx, param, value)`` + and must return the value. It is called for all sources, + including prompts. + :param nargs: the number of arguments to match. If not ``1`` the return + value is a tuple instead of single value. The default for + nargs is ``1`` (except if the type is a tuple, then it's + the arity of the tuple). If ``nargs=-1``, all remaining + parameters are collected. + :param metavar: how the value is represented in the help page. + :param expose_value: if this is `True` then the value is passed onwards + to the command callback and stored on the context, + otherwise it's skipped. + :param is_eager: eager values are processed before non eager ones. This + should not be set for arguments or it will inverse the + order of processing. + :param envvar: a string or list of strings that are environment variables + that should be checked. + :param shell_complete: A function that returns custom shell + completions. Used instead of the param's type completion if + given. Takes ``ctx, param, incomplete`` and must return a list + of :class:`~click.shell_completion.CompletionItem` or a list of + strings. + + .. versionchanged:: 8.0 + ``process_value`` validates required parameters and bounded + ``nargs``, and invokes the parameter callback before returning + the value. This allows the callback to validate prompts. + ``full_process_value`` is removed. + + .. versionchanged:: 8.0 + ``autocompletion`` is renamed to ``shell_complete`` and has new + semantics described above. The old name is deprecated and will + be removed in 8.1, until then it will be wrapped to match the + new requirements. + + .. versionchanged:: 8.0 + For ``multiple=True, nargs>1``, the default must be a list of + tuples. + + .. versionchanged:: 8.0 + Setting a default is no longer required for ``nargs>1``, it will + default to ``None``. ``multiple=True`` or ``nargs=-1`` will + default to ``()``. + + .. versionchanged:: 7.1 + Empty environment variables are ignored rather than taking the + empty string value. This makes it possible for scripts to clear + variables if they can't unset them. + + .. versionchanged:: 2.0 + Changed signature for parameter callback to also be passed the + parameter. The old callback format will still work, but it will + raise a warning to give you a chance to migrate the code easier. + """ + + param_type_name = "parameter" + + def __init__( + self, + param_decls: t.Optional[t.Sequence[str]] = None, + type: t.Optional[t.Union[types.ParamType, t.Any]] = None, + required: bool = False, + default: t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]] = None, + callback: t.Optional[t.Callable[[Context, "Parameter", t.Any], t.Any]] = None, + nargs: t.Optional[int] = None, + multiple: bool = False, + metavar: t.Optional[str] = None, + expose_value: bool = True, + is_eager: bool = False, + envvar: t.Optional[t.Union[str, t.Sequence[str]]] = None, + shell_complete: t.Optional[ + t.Callable[ + [Context, "Parameter", str], + t.Union[t.List["CompletionItem"], t.List[str]], + ] + ] = None, + ) -> None: + self.name: t.Optional[str] + self.opts: t.List[str] + self.secondary_opts: t.List[str] + self.name, self.opts, self.secondary_opts = self._parse_decls( + param_decls or (), expose_value + ) + self.type: types.ParamType = types.convert_type(type, default) + + # Default nargs to what the type tells us if we have that + # information available. + if nargs is None: + if self.type.is_composite: + nargs = self.type.arity + else: + nargs = 1 + + self.required = required + self.callback = callback + self.nargs = nargs + self.multiple = multiple + self.expose_value = expose_value + self.default = default + self.is_eager = is_eager + self.metavar = metavar + self.envvar = envvar + self._custom_shell_complete = shell_complete + + if __debug__: + if self.type.is_composite and nargs != self.type.arity: + raise ValueError( + f"'nargs' must be {self.type.arity} (or None) for" + f" type {self.type!r}, but it was {nargs}." + ) + + # Skip no default or callable default. + check_default = default if not callable(default) else None + + if check_default is not None: + if multiple: + try: + # Only check the first value against nargs. + check_default = next(_check_iter(check_default), None) + except TypeError: + raise ValueError( + "'default' must be a list when 'multiple' is true." + ) from None + + # Can be None for multiple with empty default. + if nargs != 1 and check_default is not None: + try: + _check_iter(check_default) + except TypeError: + if multiple: + message = ( + "'default' must be a list of lists when 'multiple' is" + " true and 'nargs' != 1." + ) + else: + message = "'default' must be a list when 'nargs' != 1." + + raise ValueError(message) from None + + if nargs > 1 and len(check_default) != nargs: + subject = "item length" if multiple else "length" + raise ValueError( + f"'default' {subject} must match nargs={nargs}." + ) + + def to_info_dict(self) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + .. versionadded:: 8.0 + """ + return { + "name": self.name, + "param_type_name": self.param_type_name, + "opts": self.opts, + "secondary_opts": self.secondary_opts, + "type": self.type.to_info_dict(), + "required": self.required, + "nargs": self.nargs, + "multiple": self.multiple, + "default": self.default, + "envvar": self.envvar, + } + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.name}>" + + def _parse_decls( + self, decls: t.Sequence[str], expose_value: bool + ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: + raise NotImplementedError() + + @property + def human_readable_name(self) -> str: + """Returns the human readable name of this parameter. This is the + same as the name for options, but the metavar for arguments. + """ + return self.name # type: ignore + + def make_metavar(self) -> str: + if self.metavar is not None: + return self.metavar + + metavar = self.type.get_metavar(self) + + if metavar is None: + metavar = self.type.name.upper() + + if self.nargs != 1: + metavar += "..." + + return metavar + + @t.overload + def get_default( + self, ctx: Context, call: "te.Literal[True]" = True + ) -> t.Optional[t.Any]: + ... + + @t.overload + def get_default( + self, ctx: Context, call: bool = ... + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + ... + + def get_default( + self, ctx: Context, call: bool = True + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + """Get the default for the parameter. Tries + :meth:`Context.lookup_default` first, then the local default. + + :param ctx: Current context. + :param call: If the default is a callable, call it. Disable to + return the callable instead. + + .. versionchanged:: 8.0.2 + Type casting is no longer performed when getting a default. + + .. versionchanged:: 8.0.1 + Type casting can fail in resilient parsing mode. Invalid + defaults will not prevent showing help text. + + .. versionchanged:: 8.0 + Looks at ``ctx.default_map`` first. + + .. versionchanged:: 8.0 + Added the ``call`` parameter. + """ + value = ctx.lookup_default(self.name, call=False) # type: ignore + + if value is None: + value = self.default + + if call and callable(value): + value = value() + + return value + + def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: + raise NotImplementedError() + + def consume_value( + self, ctx: Context, opts: t.Mapping[str, t.Any] + ) -> t.Tuple[t.Any, ParameterSource]: + value = opts.get(self.name) # type: ignore + source = ParameterSource.COMMANDLINE + + if value is None: + value = self.value_from_envvar(ctx) + source = ParameterSource.ENVIRONMENT + + if value is None: + value = ctx.lookup_default(self.name) # type: ignore + source = ParameterSource.DEFAULT_MAP + + if value is None: + value = self.get_default(ctx) + source = ParameterSource.DEFAULT + + return value, source + + def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any: + """Convert and validate a value against the option's + :attr:`type`, :attr:`multiple`, and :attr:`nargs`. + """ + if value is None: + return () if self.multiple or self.nargs == -1 else None + + def check_iter(value: t.Any) -> t.Iterator[t.Any]: + try: + return _check_iter(value) + except TypeError: + # This should only happen when passing in args manually, + # the parser should construct an iterable when parsing + # the command line. + raise BadParameter( + _("Value must be an iterable."), ctx=ctx, param=self + ) from None + + if self.nargs == 1 or self.type.is_composite: + + def convert(value: t.Any) -> t.Any: + return self.type(value, param=self, ctx=ctx) + + elif self.nargs == -1: + + def convert(value: t.Any) -> t.Any: # t.Tuple[t.Any, ...] + return tuple(self.type(x, self, ctx) for x in check_iter(value)) + + else: # nargs > 1 + + def convert(value: t.Any) -> t.Any: # t.Tuple[t.Any, ...] + value = tuple(check_iter(value)) + + if len(value) != self.nargs: + raise BadParameter( + ngettext( + "Takes {nargs} values but 1 was given.", + "Takes {nargs} values but {len} were given.", + len(value), + ).format(nargs=self.nargs, len=len(value)), + ctx=ctx, + param=self, + ) + + return tuple(self.type(x, self, ctx) for x in value) + + if self.multiple: + return tuple(convert(x) for x in check_iter(value)) + + return convert(value) + + def value_is_missing(self, value: t.Any) -> bool: + if value is None: + return True + + if (self.nargs != 1 or self.multiple) and value == (): + return True + + return False + + def process_value(self, ctx: Context, value: t.Any) -> t.Any: + value = self.type_cast_value(ctx, value) + + if self.required and self.value_is_missing(value): + raise MissingParameter(ctx=ctx, param=self) + + if self.callback is not None: + value = self.callback(ctx, self, value) + + return value + + def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]: + if self.envvar is None: + return None + + if isinstance(self.envvar, str): + rv = os.environ.get(self.envvar) + + if rv: + return rv + else: + for envvar in self.envvar: + rv = os.environ.get(envvar) + + if rv: + return rv + + return None + + def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]: + rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx) + + if rv is not None and self.nargs != 1: + rv = self.type.split_envvar_value(rv) + + return rv + + def handle_parse_result( + self, ctx: Context, opts: t.Mapping[str, t.Any], args: t.List[str] + ) -> t.Tuple[t.Any, t.List[str]]: + with augment_usage_errors(ctx, param=self): + value, source = self.consume_value(ctx, opts) + ctx.set_parameter_source(self.name, source) # type: ignore + + try: + value = self.process_value(ctx, value) + except Exception: + if not ctx.resilient_parsing: + raise + + value = None + + if self.expose_value: + ctx.params[self.name] = value # type: ignore + + return value, args + + def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]: + pass + + def get_usage_pieces(self, ctx: Context) -> t.List[str]: + return [] + + def get_error_hint(self, ctx: Context) -> str: + """Get a stringified version of the param for use in error messages to + indicate which param caused the error. + """ + hint_list = self.opts or [self.human_readable_name] + return " / ".join(f"'{x}'" for x in hint_list) + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. If a + ``shell_complete`` function was given during init, it is used. + Otherwise, the :attr:`type` + :meth:`~click.types.ParamType.shell_complete` function is used. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + if self._custom_shell_complete is not None: + results = self._custom_shell_complete(ctx, self, incomplete) + + if results and isinstance(results[0], str): + from click.shell_completion import CompletionItem + + results = [CompletionItem(c) for c in results] + + return t.cast(t.List["CompletionItem"], results) + + return self.type.shell_complete(ctx, self, incomplete) + + +class Option(Parameter): + """Options are usually optional values on the command line and + have some extra features that arguments don't have. + + All other parameters are passed onwards to the parameter constructor. + + :param show_default: Show the default value for this option in its + help text. Values are not shown by default, unless + :attr:`Context.show_default` is ``True``. If this value is a + string, it shows that string in parentheses instead of the + actual value. This is particularly useful for dynamic options. + For single option boolean flags, the default remains hidden if + its value is ``False``. + :param show_envvar: Controls if an environment variable should be + shown on the help page. Normally, environment variables are not + shown. + :param prompt: If set to ``True`` or a non empty string then the + user will be prompted for input. If set to ``True`` the prompt + will be the option name capitalized. + :param confirmation_prompt: Prompt a second time to confirm the + value if it was prompted for. Can be set to a string instead of + ``True`` to customize the message. + :param prompt_required: If set to ``False``, the user will be + prompted for input only when the option was specified as a flag + without a value. + :param hide_input: If this is ``True`` then the input on the prompt + will be hidden from the user. This is useful for password input. + :param is_flag: forces this option to act as a flag. The default is + auto detection. + :param flag_value: which value should be used for this flag if it's + enabled. This is set to a boolean automatically if + the option string contains a slash to mark two options. + :param multiple: if this is set to `True` then the argument is accepted + multiple times and recorded. This is similar to ``nargs`` + in how it works but supports arbitrary number of + arguments. + :param count: this flag makes an option increment an integer. + :param allow_from_autoenv: if this is enabled then the value of this + parameter will be pulled from an environment + variable in case a prefix is defined on the + context. + :param help: the help string. + :param hidden: hide this option from help outputs. + :param attrs: Other command arguments described in :class:`Parameter`. + + .. versionchanged:: 8.1.0 + Help text indentation is cleaned here instead of only in the + ``@option`` decorator. + + .. versionchanged:: 8.1.0 + The ``show_default`` parameter overrides + ``Context.show_default``. + + .. versionchanged:: 8.1.0 + The default of a single option boolean flag is not shown if the + default value is ``False``. + + .. versionchanged:: 8.0.1 + ``type`` is detected from ``flag_value`` if given. + """ + + param_type_name = "option" + + def __init__( + self, + param_decls: t.Optional[t.Sequence[str]] = None, + show_default: t.Union[bool, str, None] = None, + prompt: t.Union[bool, str] = False, + confirmation_prompt: t.Union[bool, str] = False, + prompt_required: bool = True, + hide_input: bool = False, + is_flag: t.Optional[bool] = None, + flag_value: t.Optional[t.Any] = None, + multiple: bool = False, + count: bool = False, + allow_from_autoenv: bool = True, + type: t.Optional[t.Union[types.ParamType, t.Any]] = None, + help: t.Optional[str] = None, + hidden: bool = False, + show_choices: bool = True, + show_envvar: bool = False, + **attrs: t.Any, + ) -> None: + if help: + help = inspect.cleandoc(help) + + default_is_missing = "default" not in attrs + super().__init__(param_decls, type=type, multiple=multiple, **attrs) + + if prompt is True: + if self.name is None: + raise TypeError("'name' is required with 'prompt=True'.") + + prompt_text: t.Optional[str] = self.name.replace("_", " ").capitalize() + elif prompt is False: + prompt_text = None + else: + prompt_text = prompt + + self.prompt = prompt_text + self.confirmation_prompt = confirmation_prompt + self.prompt_required = prompt_required + self.hide_input = hide_input + self.hidden = hidden + + # If prompt is enabled but not required, then the option can be + # used as a flag to indicate using prompt or flag_value. + self._flag_needs_value = self.prompt is not None and not self.prompt_required + + if is_flag is None: + if flag_value is not None: + # Implicitly a flag because flag_value was set. + is_flag = True + elif self._flag_needs_value: + # Not a flag, but when used as a flag it shows a prompt. + is_flag = False + else: + # Implicitly a flag because flag options were given. + is_flag = bool(self.secondary_opts) + elif is_flag is False and not self._flag_needs_value: + # Not a flag, and prompt is not enabled, can be used as a + # flag if flag_value is set. + self._flag_needs_value = flag_value is not None + + self.default: t.Union[t.Any, t.Callable[[], t.Any]] + + if is_flag and default_is_missing and not self.required: + if multiple: + self.default = () + else: + self.default = False + + if flag_value is None: + flag_value = not self.default + + self.type: types.ParamType + if is_flag and type is None: + # Re-guess the type from the flag value instead of the + # default. + self.type = types.convert_type(None, flag_value) + + self.is_flag: bool = is_flag + self.is_bool_flag: bool = is_flag and isinstance(self.type, types.BoolParamType) + self.flag_value: t.Any = flag_value + + # Counting + self.count = count + if count: + if type is None: + self.type = types.IntRange(min=0) + if default_is_missing: + self.default = 0 + + self.allow_from_autoenv = allow_from_autoenv + self.help = help + self.show_default = show_default + self.show_choices = show_choices + self.show_envvar = show_envvar + + if __debug__: + if self.nargs == -1: + raise TypeError("nargs=-1 is not supported for options.") + + if self.prompt and self.is_flag and not self.is_bool_flag: + raise TypeError("'prompt' is not valid for non-boolean flag.") + + if not self.is_bool_flag and self.secondary_opts: + raise TypeError("Secondary flag is not valid for non-boolean flag.") + + if self.is_bool_flag and self.hide_input and self.prompt is not None: + raise TypeError( + "'prompt' with 'hide_input' is not valid for boolean flag." + ) + + if self.count: + if self.multiple: + raise TypeError("'count' is not valid with 'multiple'.") + + if self.is_flag: + raise TypeError("'count' is not valid with 'is_flag'.") + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + help=self.help, + prompt=self.prompt, + is_flag=self.is_flag, + flag_value=self.flag_value, + count=self.count, + hidden=self.hidden, + ) + return info_dict + + def _parse_decls( + self, decls: t.Sequence[str], expose_value: bool + ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: + opts = [] + secondary_opts = [] + name = None + possible_names = [] + + for decl in decls: + if decl.isidentifier(): + if name is not None: + raise TypeError(f"Name '{name}' defined twice") + name = decl + else: + split_char = ";" if decl[:1] == "/" else "/" + if split_char in decl: + first, second = decl.split(split_char, 1) + first = first.rstrip() + if first: + possible_names.append(split_opt(first)) + opts.append(first) + second = second.lstrip() + if second: + secondary_opts.append(second.lstrip()) + if first == second: + raise ValueError( + f"Boolean option {decl!r} cannot use the" + " same flag for true/false." + ) + else: + possible_names.append(split_opt(decl)) + opts.append(decl) + + if name is None and possible_names: + possible_names.sort(key=lambda x: -len(x[0])) # group long options first + name = possible_names[0][1].replace("-", "_").lower() + if not name.isidentifier(): + name = None + + if name is None: + if not expose_value: + return None, opts, secondary_opts + raise TypeError("Could not determine name for option") + + if not opts and not secondary_opts: + raise TypeError( + f"No options defined but a name was passed ({name})." + " Did you mean to declare an argument instead? Did" + f" you mean to pass '--{name}'?" + ) + + return name, opts, secondary_opts + + def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: + if self.multiple: + action = "append" + elif self.count: + action = "count" + else: + action = "store" + + if self.is_flag: + action = f"{action}_const" + + if self.is_bool_flag and self.secondary_opts: + parser.add_option( + obj=self, opts=self.opts, dest=self.name, action=action, const=True + ) + parser.add_option( + obj=self, + opts=self.secondary_opts, + dest=self.name, + action=action, + const=False, + ) + else: + parser.add_option( + obj=self, + opts=self.opts, + dest=self.name, + action=action, + const=self.flag_value, + ) + else: + parser.add_option( + obj=self, + opts=self.opts, + dest=self.name, + action=action, + nargs=self.nargs, + ) + + def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]: + if self.hidden: + return None + + any_prefix_is_slash = False + + def _write_opts(opts: t.Sequence[str]) -> str: + nonlocal any_prefix_is_slash + + rv, any_slashes = join_options(opts) + + if any_slashes: + any_prefix_is_slash = True + + if not self.is_flag and not self.count: + rv += f" {self.make_metavar()}" + + return rv + + rv = [_write_opts(self.opts)] + + if self.secondary_opts: + rv.append(_write_opts(self.secondary_opts)) + + help = self.help or "" + extra = [] + + if self.show_envvar: + envvar = self.envvar + + if envvar is None: + if ( + self.allow_from_autoenv + and ctx.auto_envvar_prefix is not None + and self.name is not None + ): + envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" + + if envvar is not None: + var_str = ( + envvar + if isinstance(envvar, str) + else ", ".join(str(d) for d in envvar) + ) + extra.append(_("env var: {var}").format(var=var_str)) + + # Temporarily enable resilient parsing to avoid type casting + # failing for the default. Might be possible to extend this to + # help formatting in general. + resilient = ctx.resilient_parsing + ctx.resilient_parsing = True + + try: + default_value = self.get_default(ctx, call=False) + finally: + ctx.resilient_parsing = resilient + + show_default = False + show_default_is_str = False + + if self.show_default is not None: + if isinstance(self.show_default, str): + show_default_is_str = show_default = True + else: + show_default = self.show_default + elif ctx.show_default is not None: + show_default = ctx.show_default + + if show_default_is_str or (show_default and (default_value is not None)): + if show_default_is_str: + default_string = f"({self.show_default})" + elif isinstance(default_value, (list, tuple)): + default_string = ", ".join(str(d) for d in default_value) + elif inspect.isfunction(default_value): + default_string = _("(dynamic)") + elif self.is_bool_flag and self.secondary_opts: + # For boolean flags that have distinct True/False opts, + # use the opt without prefix instead of the value. + default_string = split_opt( + (self.opts if self.default else self.secondary_opts)[0] + )[1] + elif self.is_bool_flag and not self.secondary_opts and not default_value: + default_string = "" + else: + default_string = str(default_value) + + if default_string: + extra.append(_("default: {default}").format(default=default_string)) + + if ( + isinstance(self.type, types._NumberRangeBase) + # skip count with default range type + and not (self.count and self.type.min == 0 and self.type.max is None) + ): + range_str = self.type._describe_range() + + if range_str: + extra.append(range_str) + + if self.required: + extra.append(_("required")) + + if extra: + extra_str = "; ".join(extra) + help = f"{help} [{extra_str}]" if help else f"[{extra_str}]" + + return ("; " if any_prefix_is_slash else " / ").join(rv), help + + @t.overload + def get_default( + self, ctx: Context, call: "te.Literal[True]" = True + ) -> t.Optional[t.Any]: + ... + + @t.overload + def get_default( + self, ctx: Context, call: bool = ... + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + ... + + def get_default( + self, ctx: Context, call: bool = True + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + # If we're a non boolean flag our default is more complex because + # we need to look at all flags in the same group to figure out + # if we're the default one in which case we return the flag + # value as default. + if self.is_flag and not self.is_bool_flag: + for param in ctx.command.params: + if param.name == self.name and param.default: + return t.cast(Option, param).flag_value + + return None + + return super().get_default(ctx, call=call) + + def prompt_for_value(self, ctx: Context) -> t.Any: + """This is an alternative flow that can be activated in the full + value processing if a value does not exist. It will prompt the + user until a valid value exists and then returns the processed + value as result. + """ + assert self.prompt is not None + + # Calculate the default before prompting anything to be stable. + default = self.get_default(ctx) + + # If this is a prompt for a flag we need to handle this + # differently. + if self.is_bool_flag: + return confirm(self.prompt, default) + + return prompt( + self.prompt, + default=default, + type=self.type, + hide_input=self.hide_input, + show_choices=self.show_choices, + confirmation_prompt=self.confirmation_prompt, + value_proc=lambda x: self.process_value(ctx, x), + ) + + def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]: + rv = super().resolve_envvar_value(ctx) + + if rv is not None: + return rv + + if ( + self.allow_from_autoenv + and ctx.auto_envvar_prefix is not None + and self.name is not None + ): + envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" + rv = os.environ.get(envvar) + + if rv: + return rv + + return None + + def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]: + rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx) + + if rv is None: + return None + + value_depth = (self.nargs != 1) + bool(self.multiple) + + if value_depth > 0: + rv = self.type.split_envvar_value(rv) + + if self.multiple and self.nargs != 1: + rv = batch(rv, self.nargs) + + return rv + + def consume_value( + self, ctx: Context, opts: t.Mapping[str, "Parameter"] + ) -> t.Tuple[t.Any, ParameterSource]: + value, source = super().consume_value(ctx, opts) + + # The parser will emit a sentinel value if the option can be + # given as a flag without a value. This is different from None + # to distinguish from the flag not being given at all. + if value is _flag_needs_value: + if self.prompt is not None and not ctx.resilient_parsing: + value = self.prompt_for_value(ctx) + source = ParameterSource.PROMPT + else: + value = self.flag_value + source = ParameterSource.COMMANDLINE + + elif ( + self.multiple + and value is not None + and any(v is _flag_needs_value for v in value) + ): + value = [self.flag_value if v is _flag_needs_value else v for v in value] + source = ParameterSource.COMMANDLINE + + # The value wasn't set, or used the param's default, prompt if + # prompting is enabled. + elif ( + source in {None, ParameterSource.DEFAULT} + and self.prompt is not None + and (self.required or self.prompt_required) + and not ctx.resilient_parsing + ): + value = self.prompt_for_value(ctx) + source = ParameterSource.PROMPT + + return value, source + + +class Argument(Parameter): + """Arguments are positional parameters to a command. They generally + provide fewer features than options but can have infinite ``nargs`` + and are required by default. + + All parameters are passed onwards to the constructor of :class:`Parameter`. + """ + + param_type_name = "argument" + + def __init__( + self, + param_decls: t.Sequence[str], + required: t.Optional[bool] = None, + **attrs: t.Any, + ) -> None: + if required is None: + if attrs.get("default") is not None: + required = False + else: + required = attrs.get("nargs", 1) > 0 + + if "multiple" in attrs: + raise TypeError("__init__() got an unexpected keyword argument 'multiple'.") + + super().__init__(param_decls, required=required, **attrs) + + if __debug__: + if self.default is not None and self.nargs == -1: + raise TypeError("'default' is not supported for nargs=-1.") + + @property + def human_readable_name(self) -> str: + if self.metavar is not None: + return self.metavar + return self.name.upper() # type: ignore + + def make_metavar(self) -> str: + if self.metavar is not None: + return self.metavar + var = self.type.get_metavar(self) + if not var: + var = self.name.upper() # type: ignore + if not self.required: + var = f"[{var}]" + if self.nargs != 1: + var += "..." + return var + + def _parse_decls( + self, decls: t.Sequence[str], expose_value: bool + ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: + if not decls: + if not expose_value: + return None, [], [] + raise TypeError("Could not determine name for argument") + if len(decls) == 1: + name = arg = decls[0] + name = name.replace("-", "_").lower() + else: + raise TypeError( + "Arguments take exactly one parameter declaration, got" + f" {len(decls)}." + ) + return name, [arg], [] + + def get_usage_pieces(self, ctx: Context) -> t.List[str]: + return [self.make_metavar()] + + def get_error_hint(self, ctx: Context) -> str: + return f"'{self.make_metavar()}'" + + def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: + parser.add_argument(dest=self.name, nargs=self.nargs, obj=self) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/click/decorators.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/decorators.py new file mode 100644 index 000000000..d9bba9502 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/decorators.py @@ -0,0 +1,561 @@ +import inspect +import types +import typing as t +from functools import update_wrapper +from gettext import gettext as _ + +from .core import Argument +from .core import Command +from .core import Context +from .core import Group +from .core import Option +from .core import Parameter +from .globals import get_current_context +from .utils import echo + +if t.TYPE_CHECKING: + import typing_extensions as te + + P = te.ParamSpec("P") + +R = t.TypeVar("R") +T = t.TypeVar("T") +_AnyCallable = t.Callable[..., t.Any] +FC = t.TypeVar("FC", bound=t.Union[_AnyCallable, Command]) + + +def pass_context(f: "t.Callable[te.Concatenate[Context, P], R]") -> "t.Callable[P, R]": + """Marks a callback as wanting to receive the current context + object as first argument. + """ + + def new_func(*args: "P.args", **kwargs: "P.kwargs") -> "R": + return f(get_current_context(), *args, **kwargs) + + return update_wrapper(new_func, f) + + +def pass_obj(f: "t.Callable[te.Concatenate[t.Any, P], R]") -> "t.Callable[P, R]": + """Similar to :func:`pass_context`, but only pass the object on the + context onwards (:attr:`Context.obj`). This is useful if that object + represents the state of a nested system. + """ + + def new_func(*args: "P.args", **kwargs: "P.kwargs") -> "R": + return f(get_current_context().obj, *args, **kwargs) + + return update_wrapper(new_func, f) + + +def make_pass_decorator( + object_type: t.Type[T], ensure: bool = False +) -> t.Callable[["t.Callable[te.Concatenate[T, P], R]"], "t.Callable[P, R]"]: + """Given an object type this creates a decorator that will work + similar to :func:`pass_obj` but instead of passing the object of the + current context, it will find the innermost context of type + :func:`object_type`. + + This generates a decorator that works roughly like this:: + + from functools import update_wrapper + + def decorator(f): + @pass_context + def new_func(ctx, *args, **kwargs): + obj = ctx.find_object(object_type) + return ctx.invoke(f, obj, *args, **kwargs) + return update_wrapper(new_func, f) + return decorator + + :param object_type: the type of the object to pass. + :param ensure: if set to `True`, a new object will be created and + remembered on the context if it's not there yet. + """ + + def decorator(f: "t.Callable[te.Concatenate[T, P], R]") -> "t.Callable[P, R]": + def new_func(*args: "P.args", **kwargs: "P.kwargs") -> "R": + ctx = get_current_context() + + obj: t.Optional[T] + if ensure: + obj = ctx.ensure_object(object_type) + else: + obj = ctx.find_object(object_type) + + if obj is None: + raise RuntimeError( + "Managed to invoke callback without a context" + f" object of type {object_type.__name__!r}" + " existing." + ) + + return ctx.invoke(f, obj, *args, **kwargs) + + return update_wrapper(new_func, f) + + return decorator # type: ignore[return-value] + + +def pass_meta_key( + key: str, *, doc_description: t.Optional[str] = None +) -> "t.Callable[[t.Callable[te.Concatenate[t.Any, P], R]], t.Callable[P, R]]": + """Create a decorator that passes a key from + :attr:`click.Context.meta` as the first argument to the decorated + function. + + :param key: Key in ``Context.meta`` to pass. + :param doc_description: Description of the object being passed, + inserted into the decorator's docstring. Defaults to "the 'key' + key from Context.meta". + + .. versionadded:: 8.0 + """ + + def decorator(f: "t.Callable[te.Concatenate[t.Any, P], R]") -> "t.Callable[P, R]": + def new_func(*args: "P.args", **kwargs: "P.kwargs") -> R: + ctx = get_current_context() + obj = ctx.meta[key] + return ctx.invoke(f, obj, *args, **kwargs) + + return update_wrapper(new_func, f) + + if doc_description is None: + doc_description = f"the {key!r} key from :attr:`click.Context.meta`" + + decorator.__doc__ = ( + f"Decorator that passes {doc_description} as the first argument" + " to the decorated function." + ) + return decorator # type: ignore[return-value] + + +CmdType = t.TypeVar("CmdType", bound=Command) + + +# variant: no call, directly as decorator for a function. +@t.overload +def command(name: _AnyCallable) -> Command: + ... + + +# variant: with positional name and with positional or keyword cls argument: +# @command(namearg, CommandCls, ...) or @command(namearg, cls=CommandCls, ...) +@t.overload +def command( + name: t.Optional[str], + cls: t.Type[CmdType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], CmdType]: + ... + + +# variant: name omitted, cls _must_ be a keyword argument, @command(cls=CommandCls, ...) +@t.overload +def command( + name: None = None, + *, + cls: t.Type[CmdType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], CmdType]: + ... + + +# variant: with optional string name, no cls argument provided. +@t.overload +def command( + name: t.Optional[str] = ..., cls: None = None, **attrs: t.Any +) -> t.Callable[[_AnyCallable], Command]: + ... + + +def command( + name: t.Union[t.Optional[str], _AnyCallable] = None, + cls: t.Optional[t.Type[CmdType]] = None, + **attrs: t.Any, +) -> t.Union[Command, t.Callable[[_AnyCallable], t.Union[Command, CmdType]]]: + r"""Creates a new :class:`Command` and uses the decorated function as + callback. This will also automatically attach all decorated + :func:`option`\s and :func:`argument`\s as parameters to the command. + + The name of the command defaults to the name of the function with + underscores replaced by dashes. If you want to change that, you can + pass the intended name as the first argument. + + All keyword arguments are forwarded to the underlying command class. + For the ``params`` argument, any decorated params are appended to + the end of the list. + + Once decorated the function turns into a :class:`Command` instance + that can be invoked as a command line utility or be attached to a + command :class:`Group`. + + :param name: the name of the command. This defaults to the function + name with underscores replaced by dashes. + :param cls: the command class to instantiate. This defaults to + :class:`Command`. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.1 + The ``params`` argument can be used. Decorated params are + appended to the end of the list. + """ + + func: t.Optional[t.Callable[[_AnyCallable], t.Any]] = None + + if callable(name): + func = name + name = None + assert cls is None, "Use 'command(cls=cls)(callable)' to specify a class." + assert not attrs, "Use 'command(**kwargs)(callable)' to provide arguments." + + if cls is None: + cls = t.cast(t.Type[CmdType], Command) + + def decorator(f: _AnyCallable) -> CmdType: + if isinstance(f, Command): + raise TypeError("Attempted to convert a callback into a command twice.") + + attr_params = attrs.pop("params", None) + params = attr_params if attr_params is not None else [] + + try: + decorator_params = f.__click_params__ # type: ignore + except AttributeError: + pass + else: + del f.__click_params__ # type: ignore + params.extend(reversed(decorator_params)) + + if attrs.get("help") is None: + attrs["help"] = f.__doc__ + + if t.TYPE_CHECKING: + assert cls is not None + assert not callable(name) + + cmd = cls( + name=name or f.__name__.lower().replace("_", "-"), + callback=f, + params=params, + **attrs, + ) + cmd.__doc__ = f.__doc__ + return cmd + + if func is not None: + return decorator(func) + + return decorator + + +GrpType = t.TypeVar("GrpType", bound=Group) + + +# variant: no call, directly as decorator for a function. +@t.overload +def group(name: _AnyCallable) -> Group: + ... + + +# variant: with positional name and with positional or keyword cls argument: +# @group(namearg, GroupCls, ...) or @group(namearg, cls=GroupCls, ...) +@t.overload +def group( + name: t.Optional[str], + cls: t.Type[GrpType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], GrpType]: + ... + + +# variant: name omitted, cls _must_ be a keyword argument, @group(cmd=GroupCls, ...) +@t.overload +def group( + name: None = None, + *, + cls: t.Type[GrpType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], GrpType]: + ... + + +# variant: with optional string name, no cls argument provided. +@t.overload +def group( + name: t.Optional[str] = ..., cls: None = None, **attrs: t.Any +) -> t.Callable[[_AnyCallable], Group]: + ... + + +def group( + name: t.Union[str, _AnyCallable, None] = None, + cls: t.Optional[t.Type[GrpType]] = None, + **attrs: t.Any, +) -> t.Union[Group, t.Callable[[_AnyCallable], t.Union[Group, GrpType]]]: + """Creates a new :class:`Group` with a function as callback. This + works otherwise the same as :func:`command` just that the `cls` + parameter is set to :class:`Group`. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + """ + if cls is None: + cls = t.cast(t.Type[GrpType], Group) + + if callable(name): + return command(cls=cls, **attrs)(name) + + return command(name, cls, **attrs) + + +def _param_memo(f: t.Callable[..., t.Any], param: Parameter) -> None: + if isinstance(f, Command): + f.params.append(param) + else: + if not hasattr(f, "__click_params__"): + f.__click_params__ = [] # type: ignore + + f.__click_params__.append(param) # type: ignore + + +def argument( + *param_decls: str, cls: t.Optional[t.Type[Argument]] = None, **attrs: t.Any +) -> t.Callable[[FC], FC]: + """Attaches an argument to the command. All positional arguments are + passed as parameter declarations to :class:`Argument`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Argument` instance manually + and attaching it to the :attr:`Command.params` list. + + For the default argument class, refer to :class:`Argument` and + :class:`Parameter` for descriptions of parameters. + + :param cls: the argument class to instantiate. This defaults to + :class:`Argument`. + :param param_decls: Passed as positional arguments to the constructor of + ``cls``. + :param attrs: Passed as keyword arguments to the constructor of ``cls``. + """ + if cls is None: + cls = Argument + + def decorator(f: FC) -> FC: + _param_memo(f, cls(param_decls, **attrs)) + return f + + return decorator + + +def option( + *param_decls: str, cls: t.Optional[t.Type[Option]] = None, **attrs: t.Any +) -> t.Callable[[FC], FC]: + """Attaches an option to the command. All positional arguments are + passed as parameter declarations to :class:`Option`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Option` instance manually + and attaching it to the :attr:`Command.params` list. + + For the default option class, refer to :class:`Option` and + :class:`Parameter` for descriptions of parameters. + + :param cls: the option class to instantiate. This defaults to + :class:`Option`. + :param param_decls: Passed as positional arguments to the constructor of + ``cls``. + :param attrs: Passed as keyword arguments to the constructor of ``cls``. + """ + if cls is None: + cls = Option + + def decorator(f: FC) -> FC: + _param_memo(f, cls(param_decls, **attrs)) + return f + + return decorator + + +def confirmation_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--yes`` option which shows a prompt before continuing if + not passed. If the prompt is declined, the program will exit. + + :param param_decls: One or more option names. Defaults to the single + value ``"--yes"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value: + ctx.abort() + + if not param_decls: + param_decls = ("--yes",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("callback", callback) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("prompt", "Do you want to continue?") + kwargs.setdefault("help", "Confirm the action without prompting.") + return option(*param_decls, **kwargs) + + +def password_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--password`` option which prompts for a password, hiding + input and asking to enter the value again for confirmation. + + :param param_decls: One or more option names. Defaults to the single + value ``"--password"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + if not param_decls: + param_decls = ("--password",) + + kwargs.setdefault("prompt", True) + kwargs.setdefault("confirmation_prompt", True) + kwargs.setdefault("hide_input", True) + return option(*param_decls, **kwargs) + + +def version_option( + version: t.Optional[str] = None, + *param_decls: str, + package_name: t.Optional[str] = None, + prog_name: t.Optional[str] = None, + message: t.Optional[str] = None, + **kwargs: t.Any, +) -> t.Callable[[FC], FC]: + """Add a ``--version`` option which immediately prints the version + number and exits the program. + + If ``version`` is not provided, Click will try to detect it using + :func:`importlib.metadata.version` to get the version for the + ``package_name``. On Python < 3.8, the ``importlib_metadata`` + backport must be installed. + + If ``package_name`` is not provided, Click will try to detect it by + inspecting the stack frames. This will be used to detect the + version, so it must match the name of the installed package. + + :param version: The version number to show. If not provided, Click + will try to detect it. + :param param_decls: One or more option names. Defaults to the single + value ``"--version"``. + :param package_name: The package name to detect the version from. If + not provided, Click will try to detect it. + :param prog_name: The name of the CLI to show in the message. If not + provided, it will be detected from the command. + :param message: The message to show. The values ``%(prog)s``, + ``%(package)s``, and ``%(version)s`` are available. Defaults to + ``"%(prog)s, version %(version)s"``. + :param kwargs: Extra arguments are passed to :func:`option`. + :raise RuntimeError: ``version`` could not be detected. + + .. versionchanged:: 8.0 + Add the ``package_name`` parameter, and the ``%(package)s`` + value for messages. + + .. versionchanged:: 8.0 + Use :mod:`importlib.metadata` instead of ``pkg_resources``. The + version is detected based on the package name, not the entry + point name. The Python package name must match the installed + package name, or be passed with ``package_name=``. + """ + if message is None: + message = _("%(prog)s, version %(version)s") + + if version is None and package_name is None: + frame = inspect.currentframe() + f_back = frame.f_back if frame is not None else None + f_globals = f_back.f_globals if f_back is not None else None + # break reference cycle + # https://docs.python.org/3/library/inspect.html#the-interpreter-stack + del frame + + if f_globals is not None: + package_name = f_globals.get("__name__") + + if package_name == "__main__": + package_name = f_globals.get("__package__") + + if package_name: + package_name = package_name.partition(".")[0] + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value or ctx.resilient_parsing: + return + + nonlocal prog_name + nonlocal version + + if prog_name is None: + prog_name = ctx.find_root().info_name + + if version is None and package_name is not None: + metadata: t.Optional[types.ModuleType] + + try: + from importlib import metadata # type: ignore + except ImportError: + # Python < 3.8 + import importlib_metadata as metadata # type: ignore + + try: + version = metadata.version(package_name) # type: ignore + except metadata.PackageNotFoundError: # type: ignore + raise RuntimeError( + f"{package_name!r} is not installed. Try passing" + " 'package_name' instead." + ) from None + + if version is None: + raise RuntimeError( + f"Could not determine the version for {package_name!r} automatically." + ) + + echo( + message % {"prog": prog_name, "package": package_name, "version": version}, + color=ctx.color, + ) + ctx.exit() + + if not param_decls: + param_decls = ("--version",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("is_eager", True) + kwargs.setdefault("help", _("Show the version and exit.")) + kwargs["callback"] = callback + return option(*param_decls, **kwargs) + + +def help_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--help`` option which immediately prints the help page + and exits the program. + + This is usually unnecessary, as the ``--help`` option is added to + each command automatically unless ``add_help_option=False`` is + passed. + + :param param_decls: One or more option names. Defaults to the single + value ``"--help"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value or ctx.resilient_parsing: + return + + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + if not param_decls: + param_decls = ("--help",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("is_eager", True) + kwargs.setdefault("help", _("Show this message and exit.")) + kwargs["callback"] = callback + return option(*param_decls, **kwargs) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/click/exceptions.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/exceptions.py new file mode 100644 index 000000000..fe68a3613 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/exceptions.py @@ -0,0 +1,288 @@ +import typing as t +from gettext import gettext as _ +from gettext import ngettext + +from ._compat import get_text_stderr +from .utils import echo +from .utils import format_filename + +if t.TYPE_CHECKING: + from .core import Command + from .core import Context + from .core import Parameter + + +def _join_param_hints( + param_hint: t.Optional[t.Union[t.Sequence[str], str]] +) -> t.Optional[str]: + if param_hint is not None and not isinstance(param_hint, str): + return " / ".join(repr(x) for x in param_hint) + + return param_hint + + +class ClickException(Exception): + """An exception that Click can handle and show to the user.""" + + #: The exit code for this exception. + exit_code = 1 + + def __init__(self, message: str) -> None: + super().__init__(message) + self.message = message + + def format_message(self) -> str: + return self.message + + def __str__(self) -> str: + return self.message + + def show(self, file: t.Optional[t.IO[t.Any]] = None) -> None: + if file is None: + file = get_text_stderr() + + echo(_("Error: {message}").format(message=self.format_message()), file=file) + + +class UsageError(ClickException): + """An internal exception that signals a usage error. This typically + aborts any further handling. + + :param message: the error message to display. + :param ctx: optionally the context that caused this error. Click will + fill in the context automatically in some situations. + """ + + exit_code = 2 + + def __init__(self, message: str, ctx: t.Optional["Context"] = None) -> None: + super().__init__(message) + self.ctx = ctx + self.cmd: t.Optional["Command"] = self.ctx.command if self.ctx else None + + def show(self, file: t.Optional[t.IO[t.Any]] = None) -> None: + if file is None: + file = get_text_stderr() + color = None + hint = "" + if ( + self.ctx is not None + and self.ctx.command.get_help_option(self.ctx) is not None + ): + hint = _("Try '{command} {option}' for help.").format( + command=self.ctx.command_path, option=self.ctx.help_option_names[0] + ) + hint = f"{hint}\n" + if self.ctx is not None: + color = self.ctx.color + echo(f"{self.ctx.get_usage()}\n{hint}", file=file, color=color) + echo( + _("Error: {message}").format(message=self.format_message()), + file=file, + color=color, + ) + + +class BadParameter(UsageError): + """An exception that formats out a standardized error message for a + bad parameter. This is useful when thrown from a callback or type as + Click will attach contextual information to it (for instance, which + parameter it is). + + .. versionadded:: 2.0 + + :param param: the parameter object that caused this error. This can + be left out, and Click will attach this info itself + if possible. + :param param_hint: a string that shows up as parameter name. This + can be used as alternative to `param` in cases + where custom validation should happen. If it is + a string it's used as such, if it's a list then + each item is quoted and separated. + """ + + def __init__( + self, + message: str, + ctx: t.Optional["Context"] = None, + param: t.Optional["Parameter"] = None, + param_hint: t.Optional[str] = None, + ) -> None: + super().__init__(message, ctx) + self.param = param + self.param_hint = param_hint + + def format_message(self) -> str: + if self.param_hint is not None: + param_hint = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) # type: ignore + else: + return _("Invalid value: {message}").format(message=self.message) + + return _("Invalid value for {param_hint}: {message}").format( + param_hint=_join_param_hints(param_hint), message=self.message + ) + + +class MissingParameter(BadParameter): + """Raised if click required an option or argument but it was not + provided when invoking the script. + + .. versionadded:: 4.0 + + :param param_type: a string that indicates the type of the parameter. + The default is to inherit the parameter type from + the given `param`. Valid values are ``'parameter'``, + ``'option'`` or ``'argument'``. + """ + + def __init__( + self, + message: t.Optional[str] = None, + ctx: t.Optional["Context"] = None, + param: t.Optional["Parameter"] = None, + param_hint: t.Optional[str] = None, + param_type: t.Optional[str] = None, + ) -> None: + super().__init__(message or "", ctx, param, param_hint) + self.param_type = param_type + + def format_message(self) -> str: + if self.param_hint is not None: + param_hint: t.Optional[str] = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) # type: ignore + else: + param_hint = None + + param_hint = _join_param_hints(param_hint) + param_hint = f" {param_hint}" if param_hint else "" + + param_type = self.param_type + if param_type is None and self.param is not None: + param_type = self.param.param_type_name + + msg = self.message + if self.param is not None: + msg_extra = self.param.type.get_missing_message(self.param) + if msg_extra: + if msg: + msg += f". {msg_extra}" + else: + msg = msg_extra + + msg = f" {msg}" if msg else "" + + # Translate param_type for known types. + if param_type == "argument": + missing = _("Missing argument") + elif param_type == "option": + missing = _("Missing option") + elif param_type == "parameter": + missing = _("Missing parameter") + else: + missing = _("Missing {param_type}").format(param_type=param_type) + + return f"{missing}{param_hint}.{msg}" + + def __str__(self) -> str: + if not self.message: + param_name = self.param.name if self.param else None + return _("Missing parameter: {param_name}").format(param_name=param_name) + else: + return self.message + + +class NoSuchOption(UsageError): + """Raised if click attempted to handle an option that does not + exist. + + .. versionadded:: 4.0 + """ + + def __init__( + self, + option_name: str, + message: t.Optional[str] = None, + possibilities: t.Optional[t.Sequence[str]] = None, + ctx: t.Optional["Context"] = None, + ) -> None: + if message is None: + message = _("No such option: {name}").format(name=option_name) + + super().__init__(message, ctx) + self.option_name = option_name + self.possibilities = possibilities + + def format_message(self) -> str: + if not self.possibilities: + return self.message + + possibility_str = ", ".join(sorted(self.possibilities)) + suggest = ngettext( + "Did you mean {possibility}?", + "(Possible options: {possibilities})", + len(self.possibilities), + ).format(possibility=possibility_str, possibilities=possibility_str) + return f"{self.message} {suggest}" + + +class BadOptionUsage(UsageError): + """Raised if an option is generally supplied but the use of the option + was incorrect. This is for instance raised if the number of arguments + for an option is not correct. + + .. versionadded:: 4.0 + + :param option_name: the name of the option being used incorrectly. + """ + + def __init__( + self, option_name: str, message: str, ctx: t.Optional["Context"] = None + ) -> None: + super().__init__(message, ctx) + self.option_name = option_name + + +class BadArgumentUsage(UsageError): + """Raised if an argument is generally supplied but the use of the argument + was incorrect. This is for instance raised if the number of values + for an argument is not correct. + + .. versionadded:: 6.0 + """ + + +class FileError(ClickException): + """Raised if a file cannot be opened.""" + + def __init__(self, filename: str, hint: t.Optional[str] = None) -> None: + if hint is None: + hint = _("unknown error") + + super().__init__(hint) + self.ui_filename: str = format_filename(filename) + self.filename = filename + + def format_message(self) -> str: + return _("Could not open file {filename!r}: {message}").format( + filename=self.ui_filename, message=self.message + ) + + +class Abort(RuntimeError): + """An internal signalling exception that signals Click to abort.""" + + +class Exit(RuntimeError): + """An exception that indicates that the application should exit with some + status code. + + :param code: the status code to exit with. + """ + + __slots__ = ("exit_code",) + + def __init__(self, code: int = 0) -> None: + self.exit_code: int = code diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/click/formatting.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/formatting.py new file mode 100644 index 000000000..ddd2a2f82 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/formatting.py @@ -0,0 +1,301 @@ +import typing as t +from contextlib import contextmanager +from gettext import gettext as _ + +from ._compat import term_len +from .parser import split_opt + +# Can force a width. This is used by the test system +FORCED_WIDTH: t.Optional[int] = None + + +def measure_table(rows: t.Iterable[t.Tuple[str, str]]) -> t.Tuple[int, ...]: + widths: t.Dict[int, int] = {} + + for row in rows: + for idx, col in enumerate(row): + widths[idx] = max(widths.get(idx, 0), term_len(col)) + + return tuple(y for x, y in sorted(widths.items())) + + +def iter_rows( + rows: t.Iterable[t.Tuple[str, str]], col_count: int +) -> t.Iterator[t.Tuple[str, ...]]: + for row in rows: + yield row + ("",) * (col_count - len(row)) + + +def wrap_text( + text: str, + width: int = 78, + initial_indent: str = "", + subsequent_indent: str = "", + preserve_paragraphs: bool = False, +) -> str: + """A helper function that intelligently wraps text. By default, it + assumes that it operates on a single paragraph of text but if the + `preserve_paragraphs` parameter is provided it will intelligently + handle paragraphs (defined by two empty lines). + + If paragraphs are handled, a paragraph can be prefixed with an empty + line containing the ``\\b`` character (``\\x08``) to indicate that + no rewrapping should happen in that block. + + :param text: the text that should be rewrapped. + :param width: the maximum width for the text. + :param initial_indent: the initial indent that should be placed on the + first line as a string. + :param subsequent_indent: the indent string that should be placed on + each consecutive line. + :param preserve_paragraphs: if this flag is set then the wrapping will + intelligently handle paragraphs. + """ + from ._textwrap import TextWrapper + + text = text.expandtabs() + wrapper = TextWrapper( + width, + initial_indent=initial_indent, + subsequent_indent=subsequent_indent, + replace_whitespace=False, + ) + if not preserve_paragraphs: + return wrapper.fill(text) + + p: t.List[t.Tuple[int, bool, str]] = [] + buf: t.List[str] = [] + indent = None + + def _flush_par() -> None: + if not buf: + return + if buf[0].strip() == "\b": + p.append((indent or 0, True, "\n".join(buf[1:]))) + else: + p.append((indent or 0, False, " ".join(buf))) + del buf[:] + + for line in text.splitlines(): + if not line: + _flush_par() + indent = None + else: + if indent is None: + orig_len = term_len(line) + line = line.lstrip() + indent = orig_len - term_len(line) + buf.append(line) + _flush_par() + + rv = [] + for indent, raw, text in p: + with wrapper.extra_indent(" " * indent): + if raw: + rv.append(wrapper.indent_only(text)) + else: + rv.append(wrapper.fill(text)) + + return "\n\n".join(rv) + + +class HelpFormatter: + """This class helps with formatting text-based help pages. It's + usually just needed for very special internal cases, but it's also + exposed so that developers can write their own fancy outputs. + + At present, it always writes into memory. + + :param indent_increment: the additional increment for each level. + :param width: the width for the text. This defaults to the terminal + width clamped to a maximum of 78. + """ + + def __init__( + self, + indent_increment: int = 2, + width: t.Optional[int] = None, + max_width: t.Optional[int] = None, + ) -> None: + import shutil + + self.indent_increment = indent_increment + if max_width is None: + max_width = 80 + if width is None: + width = FORCED_WIDTH + if width is None: + width = max(min(shutil.get_terminal_size().columns, max_width) - 2, 50) + self.width = width + self.current_indent = 0 + self.buffer: t.List[str] = [] + + def write(self, string: str) -> None: + """Writes a unicode string into the internal buffer.""" + self.buffer.append(string) + + def indent(self) -> None: + """Increases the indentation.""" + self.current_indent += self.indent_increment + + def dedent(self) -> None: + """Decreases the indentation.""" + self.current_indent -= self.indent_increment + + def write_usage( + self, prog: str, args: str = "", prefix: t.Optional[str] = None + ) -> None: + """Writes a usage line into the buffer. + + :param prog: the program name. + :param args: whitespace separated list of arguments. + :param prefix: The prefix for the first line. Defaults to + ``"Usage: "``. + """ + if prefix is None: + prefix = f"{_('Usage:')} " + + usage_prefix = f"{prefix:>{self.current_indent}}{prog} " + text_width = self.width - self.current_indent + + if text_width >= (term_len(usage_prefix) + 20): + # The arguments will fit to the right of the prefix. + indent = " " * term_len(usage_prefix) + self.write( + wrap_text( + args, + text_width, + initial_indent=usage_prefix, + subsequent_indent=indent, + ) + ) + else: + # The prefix is too long, put the arguments on the next line. + self.write(usage_prefix) + self.write("\n") + indent = " " * (max(self.current_indent, term_len(prefix)) + 4) + self.write( + wrap_text( + args, text_width, initial_indent=indent, subsequent_indent=indent + ) + ) + + self.write("\n") + + def write_heading(self, heading: str) -> None: + """Writes a heading into the buffer.""" + self.write(f"{'':>{self.current_indent}}{heading}:\n") + + def write_paragraph(self) -> None: + """Writes a paragraph into the buffer.""" + if self.buffer: + self.write("\n") + + def write_text(self, text: str) -> None: + """Writes re-indented text into the buffer. This rewraps and + preserves paragraphs. + """ + indent = " " * self.current_indent + self.write( + wrap_text( + text, + self.width, + initial_indent=indent, + subsequent_indent=indent, + preserve_paragraphs=True, + ) + ) + self.write("\n") + + def write_dl( + self, + rows: t.Sequence[t.Tuple[str, str]], + col_max: int = 30, + col_spacing: int = 2, + ) -> None: + """Writes a definition list into the buffer. This is how options + and commands are usually formatted. + + :param rows: a list of two item tuples for the terms and values. + :param col_max: the maximum width of the first column. + :param col_spacing: the number of spaces between the first and + second column. + """ + rows = list(rows) + widths = measure_table(rows) + if len(widths) != 2: + raise TypeError("Expected two columns for definition list") + + first_col = min(widths[0], col_max) + col_spacing + + for first, second in iter_rows(rows, len(widths)): + self.write(f"{'':>{self.current_indent}}{first}") + if not second: + self.write("\n") + continue + if term_len(first) <= first_col - col_spacing: + self.write(" " * (first_col - term_len(first))) + else: + self.write("\n") + self.write(" " * (first_col + self.current_indent)) + + text_width = max(self.width - first_col - 2, 10) + wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True) + lines = wrapped_text.splitlines() + + if lines: + self.write(f"{lines[0]}\n") + + for line in lines[1:]: + self.write(f"{'':>{first_col + self.current_indent}}{line}\n") + else: + self.write("\n") + + @contextmanager + def section(self, name: str) -> t.Iterator[None]: + """Helpful context manager that writes a paragraph, a heading, + and the indents. + + :param name: the section name that is written as heading. + """ + self.write_paragraph() + self.write_heading(name) + self.indent() + try: + yield + finally: + self.dedent() + + @contextmanager + def indentation(self) -> t.Iterator[None]: + """A context manager that increases the indentation.""" + self.indent() + try: + yield + finally: + self.dedent() + + def getvalue(self) -> str: + """Returns the buffer contents.""" + return "".join(self.buffer) + + +def join_options(options: t.Sequence[str]) -> t.Tuple[str, bool]: + """Given a list of option strings this joins them in the most appropriate + way and returns them in the form ``(formatted_string, + any_prefix_is_slash)`` where the second item in the tuple is a flag that + indicates if any of the option prefixes was a slash. + """ + rv = [] + any_prefix_is_slash = False + + for opt in options: + prefix = split_opt(opt)[0] + + if prefix == "/": + any_prefix_is_slash = True + + rv.append((len(prefix), opt)) + + rv.sort(key=lambda x: x[0]) + return ", ".join(x[1] for x in rv), any_prefix_is_slash diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/click/globals.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/globals.py new file mode 100644 index 000000000..480058f10 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/globals.py @@ -0,0 +1,68 @@ +import typing as t +from threading import local + +if t.TYPE_CHECKING: + import typing_extensions as te + from .core import Context + +_local = local() + + +@t.overload +def get_current_context(silent: "te.Literal[False]" = False) -> "Context": + ... + + +@t.overload +def get_current_context(silent: bool = ...) -> t.Optional["Context"]: + ... + + +def get_current_context(silent: bool = False) -> t.Optional["Context"]: + """Returns the current click context. This can be used as a way to + access the current context object from anywhere. This is a more implicit + alternative to the :func:`pass_context` decorator. This function is + primarily useful for helpers such as :func:`echo` which might be + interested in changing its behavior based on the current context. + + To push the current context, :meth:`Context.scope` can be used. + + .. versionadded:: 5.0 + + :param silent: if set to `True` the return value is `None` if no context + is available. The default behavior is to raise a + :exc:`RuntimeError`. + """ + try: + return t.cast("Context", _local.stack[-1]) + except (AttributeError, IndexError) as e: + if not silent: + raise RuntimeError("There is no active click context.") from e + + return None + + +def push_context(ctx: "Context") -> None: + """Pushes a new context to the current stack.""" + _local.__dict__.setdefault("stack", []).append(ctx) + + +def pop_context() -> None: + """Removes the top level from the stack.""" + _local.stack.pop() + + +def resolve_color_default(color: t.Optional[bool] = None) -> t.Optional[bool]: + """Internal helper to get the default value of the color flag. If a + value is passed it's returned unchanged, otherwise it's looked up from + the current context. + """ + if color is not None: + return color + + ctx = get_current_context(silent=True) + + if ctx is not None: + return ctx.color + + return None diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/click/parser.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/parser.py new file mode 100644 index 000000000..5fa7adfac --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/parser.py @@ -0,0 +1,529 @@ +""" +This module started out as largely a copy paste from the stdlib's +optparse module with the features removed that we do not need from +optparse because we implement them in Click on a higher level (for +instance type handling, help formatting and a lot more). + +The plan is to remove more and more from here over time. + +The reason this is a different module and not optparse from the stdlib +is that there are differences in 2.x and 3.x about the error messages +generated and optparse in the stdlib uses gettext for no good reason +and might cause us issues. + +Click uses parts of optparse written by Gregory P. Ward and maintained +by the Python Software Foundation. This is limited to code in parser.py. + +Copyright 2001-2006 Gregory P. Ward. All rights reserved. +Copyright 2002-2006 Python Software Foundation. All rights reserved. +""" +# This code uses parts of optparse written by Gregory P. Ward and +# maintained by the Python Software Foundation. +# Copyright 2001-2006 Gregory P. Ward +# Copyright 2002-2006 Python Software Foundation +import typing as t +from collections import deque +from gettext import gettext as _ +from gettext import ngettext + +from .exceptions import BadArgumentUsage +from .exceptions import BadOptionUsage +from .exceptions import NoSuchOption +from .exceptions import UsageError + +if t.TYPE_CHECKING: + import typing_extensions as te + from .core import Argument as CoreArgument + from .core import Context + from .core import Option as CoreOption + from .core import Parameter as CoreParameter + +V = t.TypeVar("V") + +# Sentinel value that indicates an option was passed as a flag without a +# value but is not a flag option. Option.consume_value uses this to +# prompt or use the flag_value. +_flag_needs_value = object() + + +def _unpack_args( + args: t.Sequence[str], nargs_spec: t.Sequence[int] +) -> t.Tuple[t.Sequence[t.Union[str, t.Sequence[t.Optional[str]], None]], t.List[str]]: + """Given an iterable of arguments and an iterable of nargs specifications, + it returns a tuple with all the unpacked arguments at the first index + and all remaining arguments as the second. + + The nargs specification is the number of arguments that should be consumed + or `-1` to indicate that this position should eat up all the remainders. + + Missing items are filled with `None`. + """ + args = deque(args) + nargs_spec = deque(nargs_spec) + rv: t.List[t.Union[str, t.Tuple[t.Optional[str], ...], None]] = [] + spos: t.Optional[int] = None + + def _fetch(c: "te.Deque[V]") -> t.Optional[V]: + try: + if spos is None: + return c.popleft() + else: + return c.pop() + except IndexError: + return None + + while nargs_spec: + nargs = _fetch(nargs_spec) + + if nargs is None: + continue + + if nargs == 1: + rv.append(_fetch(args)) + elif nargs > 1: + x = [_fetch(args) for _ in range(nargs)] + + # If we're reversed, we're pulling in the arguments in reverse, + # so we need to turn them around. + if spos is not None: + x.reverse() + + rv.append(tuple(x)) + elif nargs < 0: + if spos is not None: + raise TypeError("Cannot have two nargs < 0") + + spos = len(rv) + rv.append(None) + + # spos is the position of the wildcard (star). If it's not `None`, + # we fill it with the remainder. + if spos is not None: + rv[spos] = tuple(args) + args = [] + rv[spos + 1 :] = reversed(rv[spos + 1 :]) + + return tuple(rv), list(args) + + +def split_opt(opt: str) -> t.Tuple[str, str]: + first = opt[:1] + if first.isalnum(): + return "", opt + if opt[1:2] == first: + return opt[:2], opt[2:] + return first, opt[1:] + + +def normalize_opt(opt: str, ctx: t.Optional["Context"]) -> str: + if ctx is None or ctx.token_normalize_func is None: + return opt + prefix, opt = split_opt(opt) + return f"{prefix}{ctx.token_normalize_func(opt)}" + + +def split_arg_string(string: str) -> t.List[str]: + """Split an argument string as with :func:`shlex.split`, but don't + fail if the string is incomplete. Ignores a missing closing quote or + incomplete escape sequence and uses the partial token as-is. + + .. code-block:: python + + split_arg_string("example 'my file") + ["example", "my file"] + + split_arg_string("example my\\") + ["example", "my"] + + :param string: String to split. + """ + import shlex + + lex = shlex.shlex(string, posix=True) + lex.whitespace_split = True + lex.commenters = "" + out = [] + + try: + for token in lex: + out.append(token) + except ValueError: + # Raised when end-of-string is reached in an invalid state. Use + # the partial token as-is. The quote or escape character is in + # lex.state, not lex.token. + out.append(lex.token) + + return out + + +class Option: + def __init__( + self, + obj: "CoreOption", + opts: t.Sequence[str], + dest: t.Optional[str], + action: t.Optional[str] = None, + nargs: int = 1, + const: t.Optional[t.Any] = None, + ): + self._short_opts = [] + self._long_opts = [] + self.prefixes: t.Set[str] = set() + + for opt in opts: + prefix, value = split_opt(opt) + if not prefix: + raise ValueError(f"Invalid start character for option ({opt})") + self.prefixes.add(prefix[0]) + if len(prefix) == 1 and len(value) == 1: + self._short_opts.append(opt) + else: + self._long_opts.append(opt) + self.prefixes.add(prefix) + + if action is None: + action = "store" + + self.dest = dest + self.action = action + self.nargs = nargs + self.const = const + self.obj = obj + + @property + def takes_value(self) -> bool: + return self.action in ("store", "append") + + def process(self, value: t.Any, state: "ParsingState") -> None: + if self.action == "store": + state.opts[self.dest] = value # type: ignore + elif self.action == "store_const": + state.opts[self.dest] = self.const # type: ignore + elif self.action == "append": + state.opts.setdefault(self.dest, []).append(value) # type: ignore + elif self.action == "append_const": + state.opts.setdefault(self.dest, []).append(self.const) # type: ignore + elif self.action == "count": + state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 # type: ignore + else: + raise ValueError(f"unknown action '{self.action}'") + state.order.append(self.obj) + + +class Argument: + def __init__(self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1): + self.dest = dest + self.nargs = nargs + self.obj = obj + + def process( + self, + value: t.Union[t.Optional[str], t.Sequence[t.Optional[str]]], + state: "ParsingState", + ) -> None: + if self.nargs > 1: + assert value is not None + holes = sum(1 for x in value if x is None) + if holes == len(value): + value = None + elif holes != 0: + raise BadArgumentUsage( + _("Argument {name!r} takes {nargs} values.").format( + name=self.dest, nargs=self.nargs + ) + ) + + if self.nargs == -1 and self.obj.envvar is not None and value == (): + # Replace empty tuple with None so that a value from the + # environment may be tried. + value = None + + state.opts[self.dest] = value # type: ignore + state.order.append(self.obj) + + +class ParsingState: + def __init__(self, rargs: t.List[str]) -> None: + self.opts: t.Dict[str, t.Any] = {} + self.largs: t.List[str] = [] + self.rargs = rargs + self.order: t.List["CoreParameter"] = [] + + +class OptionParser: + """The option parser is an internal class that is ultimately used to + parse options and arguments. It's modelled after optparse and brings + a similar but vastly simplified API. It should generally not be used + directly as the high level Click classes wrap it for you. + + It's not nearly as extensible as optparse or argparse as it does not + implement features that are implemented on a higher level (such as + types or defaults). + + :param ctx: optionally the :class:`~click.Context` where this parser + should go with. + """ + + def __init__(self, ctx: t.Optional["Context"] = None) -> None: + #: The :class:`~click.Context` for this parser. This might be + #: `None` for some advanced use cases. + self.ctx = ctx + #: This controls how the parser deals with interspersed arguments. + #: If this is set to `False`, the parser will stop on the first + #: non-option. Click uses this to implement nested subcommands + #: safely. + self.allow_interspersed_args: bool = True + #: This tells the parser how to deal with unknown options. By + #: default it will error out (which is sensible), but there is a + #: second mode where it will ignore it and continue processing + #: after shifting all the unknown options into the resulting args. + self.ignore_unknown_options: bool = False + + if ctx is not None: + self.allow_interspersed_args = ctx.allow_interspersed_args + self.ignore_unknown_options = ctx.ignore_unknown_options + + self._short_opt: t.Dict[str, Option] = {} + self._long_opt: t.Dict[str, Option] = {} + self._opt_prefixes = {"-", "--"} + self._args: t.List[Argument] = [] + + def add_option( + self, + obj: "CoreOption", + opts: t.Sequence[str], + dest: t.Optional[str], + action: t.Optional[str] = None, + nargs: int = 1, + const: t.Optional[t.Any] = None, + ) -> None: + """Adds a new option named `dest` to the parser. The destination + is not inferred (unlike with optparse) and needs to be explicitly + provided. Action can be any of ``store``, ``store_const``, + ``append``, ``append_const`` or ``count``. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + opts = [normalize_opt(opt, self.ctx) for opt in opts] + option = Option(obj, opts, dest, action=action, nargs=nargs, const=const) + self._opt_prefixes.update(option.prefixes) + for opt in option._short_opts: + self._short_opt[opt] = option + for opt in option._long_opts: + self._long_opt[opt] = option + + def add_argument( + self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1 + ) -> None: + """Adds a positional argument named `dest` to the parser. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + self._args.append(Argument(obj, dest=dest, nargs=nargs)) + + def parse_args( + self, args: t.List[str] + ) -> t.Tuple[t.Dict[str, t.Any], t.List[str], t.List["CoreParameter"]]: + """Parses positional arguments and returns ``(values, args, order)`` + for the parsed options and arguments as well as the leftover + arguments if there are any. The order is a list of objects as they + appear on the command line. If arguments appear multiple times they + will be memorized multiple times as well. + """ + state = ParsingState(args) + try: + self._process_args_for_options(state) + self._process_args_for_args(state) + except UsageError: + if self.ctx is None or not self.ctx.resilient_parsing: + raise + return state.opts, state.largs, state.order + + def _process_args_for_args(self, state: ParsingState) -> None: + pargs, args = _unpack_args( + state.largs + state.rargs, [x.nargs for x in self._args] + ) + + for idx, arg in enumerate(self._args): + arg.process(pargs[idx], state) + + state.largs = args + state.rargs = [] + + def _process_args_for_options(self, state: ParsingState) -> None: + while state.rargs: + arg = state.rargs.pop(0) + arglen = len(arg) + # Double dashes always handled explicitly regardless of what + # prefixes are valid. + if arg == "--": + return + elif arg[:1] in self._opt_prefixes and arglen > 1: + self._process_opts(arg, state) + elif self.allow_interspersed_args: + state.largs.append(arg) + else: + state.rargs.insert(0, arg) + return + + # Say this is the original argument list: + # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] + # ^ + # (we are about to process arg(i)). + # + # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of + # [arg0, ..., arg(i-1)] (any options and their arguments will have + # been removed from largs). + # + # The while loop will usually consume 1 or more arguments per pass. + # If it consumes 1 (eg. arg is an option that takes no arguments), + # then after _process_arg() is done the situation is: + # + # largs = subset of [arg0, ..., arg(i)] + # rargs = [arg(i+1), ..., arg(N-1)] + # + # If allow_interspersed_args is false, largs will always be + # *empty* -- still a subset of [arg0, ..., arg(i-1)], but + # not a very interesting subset! + + def _match_long_opt( + self, opt: str, explicit_value: t.Optional[str], state: ParsingState + ) -> None: + if opt not in self._long_opt: + from difflib import get_close_matches + + possibilities = get_close_matches(opt, self._long_opt) + raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx) + + option = self._long_opt[opt] + if option.takes_value: + # At this point it's safe to modify rargs by injecting the + # explicit value, because no exception is raised in this + # branch. This means that the inserted value will be fully + # consumed. + if explicit_value is not None: + state.rargs.insert(0, explicit_value) + + value = self._get_value_from_state(opt, option, state) + + elif explicit_value is not None: + raise BadOptionUsage( + opt, _("Option {name!r} does not take a value.").format(name=opt) + ) + + else: + value = None + + option.process(value, state) + + def _match_short_opt(self, arg: str, state: ParsingState) -> None: + stop = False + i = 1 + prefix = arg[0] + unknown_options = [] + + for ch in arg[1:]: + opt = normalize_opt(f"{prefix}{ch}", self.ctx) + option = self._short_opt.get(opt) + i += 1 + + if not option: + if self.ignore_unknown_options: + unknown_options.append(ch) + continue + raise NoSuchOption(opt, ctx=self.ctx) + if option.takes_value: + # Any characters left in arg? Pretend they're the + # next arg, and stop consuming characters of arg. + if i < len(arg): + state.rargs.insert(0, arg[i:]) + stop = True + + value = self._get_value_from_state(opt, option, state) + + else: + value = None + + option.process(value, state) + + if stop: + break + + # If we got any unknown options we recombine the string of the + # remaining options and re-attach the prefix, then report that + # to the state as new larg. This way there is basic combinatorics + # that can be achieved while still ignoring unknown arguments. + if self.ignore_unknown_options and unknown_options: + state.largs.append(f"{prefix}{''.join(unknown_options)}") + + def _get_value_from_state( + self, option_name: str, option: Option, state: ParsingState + ) -> t.Any: + nargs = option.nargs + + if len(state.rargs) < nargs: + if option.obj._flag_needs_value: + # Option allows omitting the value. + value = _flag_needs_value + else: + raise BadOptionUsage( + option_name, + ngettext( + "Option {name!r} requires an argument.", + "Option {name!r} requires {nargs} arguments.", + nargs, + ).format(name=option_name, nargs=nargs), + ) + elif nargs == 1: + next_rarg = state.rargs[0] + + if ( + option.obj._flag_needs_value + and isinstance(next_rarg, str) + and next_rarg[:1] in self._opt_prefixes + and len(next_rarg) > 1 + ): + # The next arg looks like the start of an option, don't + # use it as the value if omitting the value is allowed. + value = _flag_needs_value + else: + value = state.rargs.pop(0) + else: + value = tuple(state.rargs[:nargs]) + del state.rargs[:nargs] + + return value + + def _process_opts(self, arg: str, state: ParsingState) -> None: + explicit_value = None + # Long option handling happens in two parts. The first part is + # supporting explicitly attached values. In any case, we will try + # to long match the option first. + if "=" in arg: + long_opt, explicit_value = arg.split("=", 1) + else: + long_opt = arg + norm_long_opt = normalize_opt(long_opt, self.ctx) + + # At this point we will match the (assumed) long option through + # the long option matching code. Note that this allows options + # like "-foo" to be matched as long options. + try: + self._match_long_opt(norm_long_opt, explicit_value, state) + except NoSuchOption: + # At this point the long option matching failed, and we need + # to try with short options. However there is a special rule + # which says, that if we have a two character options prefix + # (applies to "--foo" for instance), we do not dispatch to the + # short option code and will instead raise the no option + # error. + if arg[:2] not in self._opt_prefixes: + self._match_short_opt(arg, state) + return + + if not self.ignore_unknown_options: + raise + + state.largs.append(arg) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/click/py.typed b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/click/shell_completion.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/shell_completion.py new file mode 100644 index 000000000..dc9e00b9b --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/shell_completion.py @@ -0,0 +1,596 @@ +import os +import re +import typing as t +from gettext import gettext as _ + +from .core import Argument +from .core import BaseCommand +from .core import Context +from .core import MultiCommand +from .core import Option +from .core import Parameter +from .core import ParameterSource +from .parser import split_arg_string +from .utils import echo + + +def shell_complete( + cli: BaseCommand, + ctx_args: t.MutableMapping[str, t.Any], + prog_name: str, + complete_var: str, + instruction: str, +) -> int: + """Perform shell completion for the given CLI program. + + :param cli: Command being called. + :param ctx_args: Extra arguments to pass to + ``cli.make_context``. + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. + :param instruction: Value of ``complete_var`` with the completion + instruction and shell, in the form ``instruction_shell``. + :return: Status code to exit with. + """ + shell, _, instruction = instruction.partition("_") + comp_cls = get_completion_class(shell) + + if comp_cls is None: + return 1 + + comp = comp_cls(cli, ctx_args, prog_name, complete_var) + + if instruction == "source": + echo(comp.source()) + return 0 + + if instruction == "complete": + echo(comp.complete()) + return 0 + + return 1 + + +class CompletionItem: + """Represents a completion value and metadata about the value. The + default metadata is ``type`` to indicate special shell handling, + and ``help`` if a shell supports showing a help string next to the + value. + + Arbitrary parameters can be passed when creating the object, and + accessed using ``item.attr``. If an attribute wasn't passed, + accessing it returns ``None``. + + :param value: The completion suggestion. + :param type: Tells the shell script to provide special completion + support for the type. Click uses ``"dir"`` and ``"file"``. + :param help: String shown next to the value if supported. + :param kwargs: Arbitrary metadata. The built-in implementations + don't use this, but custom type completions paired with custom + shell support could use it. + """ + + __slots__ = ("value", "type", "help", "_info") + + def __init__( + self, + value: t.Any, + type: str = "plain", + help: t.Optional[str] = None, + **kwargs: t.Any, + ) -> None: + self.value: t.Any = value + self.type: str = type + self.help: t.Optional[str] = help + self._info = kwargs + + def __getattr__(self, name: str) -> t.Any: + return self._info.get(name) + + +# Only Bash >= 4.4 has the nosort option. +_SOURCE_BASH = """\ +%(complete_func)s() { + local IFS=$'\\n' + local response + + response=$(env COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD \ +%(complete_var)s=bash_complete $1) + + for completion in $response; do + IFS=',' read type value <<< "$completion" + + if [[ $type == 'dir' ]]; then + COMPREPLY=() + compopt -o dirnames + elif [[ $type == 'file' ]]; then + COMPREPLY=() + compopt -o default + elif [[ $type == 'plain' ]]; then + COMPREPLY+=($value) + fi + done + + return 0 +} + +%(complete_func)s_setup() { + complete -o nosort -F %(complete_func)s %(prog_name)s +} + +%(complete_func)s_setup; +""" + +_SOURCE_ZSH = """\ +#compdef %(prog_name)s + +%(complete_func)s() { + local -a completions + local -a completions_with_descriptions + local -a response + (( ! $+commands[%(prog_name)s] )) && return 1 + + response=("${(@f)$(env COMP_WORDS="${words[*]}" COMP_CWORD=$((CURRENT-1)) \ +%(complete_var)s=zsh_complete %(prog_name)s)}") + + for type key descr in ${response}; do + if [[ "$type" == "plain" ]]; then + if [[ "$descr" == "_" ]]; then + completions+=("$key") + else + completions_with_descriptions+=("$key":"$descr") + fi + elif [[ "$type" == "dir" ]]; then + _path_files -/ + elif [[ "$type" == "file" ]]; then + _path_files -f + fi + done + + if [ -n "$completions_with_descriptions" ]; then + _describe -V unsorted completions_with_descriptions -U + fi + + if [ -n "$completions" ]; then + compadd -U -V unsorted -a completions + fi +} + +if [[ $zsh_eval_context[-1] == loadautofunc ]]; then + # autoload from fpath, call function directly + %(complete_func)s "$@" +else + # eval/source/. command, register function for later + compdef %(complete_func)s %(prog_name)s +fi +""" + +_SOURCE_FISH = """\ +function %(complete_func)s; + set -l response (env %(complete_var)s=fish_complete COMP_WORDS=(commandline -cp) \ +COMP_CWORD=(commandline -t) %(prog_name)s); + + for completion in $response; + set -l metadata (string split "," $completion); + + if test $metadata[1] = "dir"; + __fish_complete_directories $metadata[2]; + else if test $metadata[1] = "file"; + __fish_complete_path $metadata[2]; + else if test $metadata[1] = "plain"; + echo $metadata[2]; + end; + end; +end; + +complete --no-files --command %(prog_name)s --arguments \ +"(%(complete_func)s)"; +""" + + +class ShellComplete: + """Base class for providing shell completion support. A subclass for + a given shell will override attributes and methods to implement the + completion instructions (``source`` and ``complete``). + + :param cli: Command being called. + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. + + .. versionadded:: 8.0 + """ + + name: t.ClassVar[str] + """Name to register the shell as with :func:`add_completion_class`. + This is used in completion instructions (``{name}_source`` and + ``{name}_complete``). + """ + + source_template: t.ClassVar[str] + """Completion script template formatted by :meth:`source`. This must + be provided by subclasses. + """ + + def __init__( + self, + cli: BaseCommand, + ctx_args: t.MutableMapping[str, t.Any], + prog_name: str, + complete_var: str, + ) -> None: + self.cli = cli + self.ctx_args = ctx_args + self.prog_name = prog_name + self.complete_var = complete_var + + @property + def func_name(self) -> str: + """The name of the shell function defined by the completion + script. + """ + safe_name = re.sub(r"\W*", "", self.prog_name.replace("-", "_"), flags=re.ASCII) + return f"_{safe_name}_completion" + + def source_vars(self) -> t.Dict[str, t.Any]: + """Vars for formatting :attr:`source_template`. + + By default this provides ``complete_func``, ``complete_var``, + and ``prog_name``. + """ + return { + "complete_func": self.func_name, + "complete_var": self.complete_var, + "prog_name": self.prog_name, + } + + def source(self) -> str: + """Produce the shell script that defines the completion + function. By default this ``%``-style formats + :attr:`source_template` with the dict returned by + :meth:`source_vars`. + """ + return self.source_template % self.source_vars() + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + """Use the env vars defined by the shell script to return a + tuple of ``args, incomplete``. This must be implemented by + subclasses. + """ + raise NotImplementedError + + def get_completions( + self, args: t.List[str], incomplete: str + ) -> t.List[CompletionItem]: + """Determine the context and last complete command or parameter + from the complete args. Call that object's ``shell_complete`` + method to get the completions for the incomplete value. + + :param args: List of complete args before the incomplete value. + :param incomplete: Value being completed. May be empty. + """ + ctx = _resolve_context(self.cli, self.ctx_args, self.prog_name, args) + obj, incomplete = _resolve_incomplete(ctx, args, incomplete) + return obj.shell_complete(ctx, incomplete) + + def format_completion(self, item: CompletionItem) -> str: + """Format a completion item into the form recognized by the + shell script. This must be implemented by subclasses. + + :param item: Completion item to format. + """ + raise NotImplementedError + + def complete(self) -> str: + """Produce the completion data to send back to the shell. + + By default this calls :meth:`get_completion_args`, gets the + completions, then calls :meth:`format_completion` for each + completion. + """ + args, incomplete = self.get_completion_args() + completions = self.get_completions(args, incomplete) + out = [self.format_completion(item) for item in completions] + return "\n".join(out) + + +class BashComplete(ShellComplete): + """Shell completion for Bash.""" + + name = "bash" + source_template = _SOURCE_BASH + + @staticmethod + def _check_version() -> None: + import subprocess + + output = subprocess.run( + ["bash", "-c", 'echo "${BASH_VERSION}"'], stdout=subprocess.PIPE + ) + match = re.search(r"^(\d+)\.(\d+)\.\d+", output.stdout.decode()) + + if match is not None: + major, minor = match.groups() + + if major < "4" or major == "4" and minor < "4": + echo( + _( + "Shell completion is not supported for Bash" + " versions older than 4.4." + ), + err=True, + ) + else: + echo( + _("Couldn't detect Bash version, shell completion is not supported."), + err=True, + ) + + def source(self) -> str: + self._check_version() + return super().source() + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + cword = int(os.environ["COMP_CWORD"]) + args = cwords[1:cword] + + try: + incomplete = cwords[cword] + except IndexError: + incomplete = "" + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + return f"{item.type},{item.value}" + + +class ZshComplete(ShellComplete): + """Shell completion for Zsh.""" + + name = "zsh" + source_template = _SOURCE_ZSH + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + cword = int(os.environ["COMP_CWORD"]) + args = cwords[1:cword] + + try: + incomplete = cwords[cword] + except IndexError: + incomplete = "" + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + return f"{item.type}\n{item.value}\n{item.help if item.help else '_'}" + + +class FishComplete(ShellComplete): + """Shell completion for Fish.""" + + name = "fish" + source_template = _SOURCE_FISH + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + incomplete = os.environ["COMP_CWORD"] + args = cwords[1:] + + # Fish stores the partial word in both COMP_WORDS and + # COMP_CWORD, remove it from complete args. + if incomplete and args and args[-1] == incomplete: + args.pop() + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + if item.help: + return f"{item.type},{item.value}\t{item.help}" + + return f"{item.type},{item.value}" + + +ShellCompleteType = t.TypeVar("ShellCompleteType", bound=t.Type[ShellComplete]) + + +_available_shells: t.Dict[str, t.Type[ShellComplete]] = { + "bash": BashComplete, + "fish": FishComplete, + "zsh": ZshComplete, +} + + +def add_completion_class( + cls: ShellCompleteType, name: t.Optional[str] = None +) -> ShellCompleteType: + """Register a :class:`ShellComplete` subclass under the given name. + The name will be provided by the completion instruction environment + variable during completion. + + :param cls: The completion class that will handle completion for the + shell. + :param name: Name to register the class under. Defaults to the + class's ``name`` attribute. + """ + if name is None: + name = cls.name + + _available_shells[name] = cls + + return cls + + +def get_completion_class(shell: str) -> t.Optional[t.Type[ShellComplete]]: + """Look up a registered :class:`ShellComplete` subclass by the name + provided by the completion instruction environment variable. If the + name isn't registered, returns ``None``. + + :param shell: Name the class is registered under. + """ + return _available_shells.get(shell) + + +def _is_incomplete_argument(ctx: Context, param: Parameter) -> bool: + """Determine if the given parameter is an argument that can still + accept values. + + :param ctx: Invocation context for the command represented by the + parsed complete args. + :param param: Argument object being checked. + """ + if not isinstance(param, Argument): + return False + + assert param.name is not None + # Will be None if expose_value is False. + value = ctx.params.get(param.name) + return ( + param.nargs == -1 + or ctx.get_parameter_source(param.name) is not ParameterSource.COMMANDLINE + or ( + param.nargs > 1 + and isinstance(value, (tuple, list)) + and len(value) < param.nargs + ) + ) + + +def _start_of_option(ctx: Context, value: str) -> bool: + """Check if the value looks like the start of an option.""" + if not value: + return False + + c = value[0] + return c in ctx._opt_prefixes + + +def _is_incomplete_option(ctx: Context, args: t.List[str], param: Parameter) -> bool: + """Determine if the given parameter is an option that needs a value. + + :param args: List of complete args before the incomplete value. + :param param: Option object being checked. + """ + if not isinstance(param, Option): + return False + + if param.is_flag or param.count: + return False + + last_option = None + + for index, arg in enumerate(reversed(args)): + if index + 1 > param.nargs: + break + + if _start_of_option(ctx, arg): + last_option = arg + + return last_option is not None and last_option in param.opts + + +def _resolve_context( + cli: BaseCommand, + ctx_args: t.MutableMapping[str, t.Any], + prog_name: str, + args: t.List[str], +) -> Context: + """Produce the context hierarchy starting with the command and + traversing the complete arguments. This only follows the commands, + it doesn't trigger input prompts or callbacks. + + :param cli: Command being called. + :param prog_name: Name of the executable in the shell. + :param args: List of complete args before the incomplete value. + """ + ctx_args["resilient_parsing"] = True + ctx = cli.make_context(prog_name, args.copy(), **ctx_args) + args = ctx.protected_args + ctx.args + + while args: + command = ctx.command + + if isinstance(command, MultiCommand): + if not command.chain: + name, cmd, args = command.resolve_command(ctx, args) + + if cmd is None: + return ctx + + ctx = cmd.make_context(name, args, parent=ctx, resilient_parsing=True) + args = ctx.protected_args + ctx.args + else: + sub_ctx = ctx + + while args: + name, cmd, args = command.resolve_command(ctx, args) + + if cmd is None: + return ctx + + sub_ctx = cmd.make_context( + name, + args, + parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False, + resilient_parsing=True, + ) + args = sub_ctx.args + + ctx = sub_ctx + args = [*sub_ctx.protected_args, *sub_ctx.args] + else: + break + + return ctx + + +def _resolve_incomplete( + ctx: Context, args: t.List[str], incomplete: str +) -> t.Tuple[t.Union[BaseCommand, Parameter], str]: + """Find the Click object that will handle the completion of the + incomplete value. Return the object and the incomplete value. + + :param ctx: Invocation context for the command represented by + the parsed complete args. + :param args: List of complete args before the incomplete value. + :param incomplete: Value being completed. May be empty. + """ + # Different shells treat an "=" between a long option name and + # value differently. Might keep the value joined, return the "=" + # as a separate item, or return the split name and value. Always + # split and discard the "=" to make completion easier. + if incomplete == "=": + incomplete = "" + elif "=" in incomplete and _start_of_option(ctx, incomplete): + name, _, incomplete = incomplete.partition("=") + args.append(name) + + # The "--" marker tells Click to stop treating values as options + # even if they start with the option character. If it hasn't been + # given and the incomplete arg looks like an option, the current + # command will provide option name completions. + if "--" not in args and _start_of_option(ctx, incomplete): + return ctx.command, incomplete + + params = ctx.command.get_params(ctx) + + # If the last complete arg is an option name with an incomplete + # value, the option will provide value completions. + for param in params: + if _is_incomplete_option(ctx, args, param): + return param, incomplete + + # It's not an option name or value. The first argument without a + # parsed value will provide value completions. + for param in params: + if _is_incomplete_argument(ctx, param): + return param, incomplete + + # There were no unparsed arguments, the command may be a group that + # will provide command name completions. + return ctx.command, incomplete diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/click/termui.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/termui.py new file mode 100644 index 000000000..db7a4b286 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/termui.py @@ -0,0 +1,784 @@ +import inspect +import io +import itertools +import sys +import typing as t +from gettext import gettext as _ + +from ._compat import isatty +from ._compat import strip_ansi +from .exceptions import Abort +from .exceptions import UsageError +from .globals import resolve_color_default +from .types import Choice +from .types import convert_type +from .types import ParamType +from .utils import echo +from .utils import LazyFile + +if t.TYPE_CHECKING: + from ._termui_impl import ProgressBar + +V = t.TypeVar("V") + +# The prompt functions to use. The doc tools currently override these +# functions to customize how they work. +visible_prompt_func: t.Callable[[str], str] = input + +_ansi_colors = { + "black": 30, + "red": 31, + "green": 32, + "yellow": 33, + "blue": 34, + "magenta": 35, + "cyan": 36, + "white": 37, + "reset": 39, + "bright_black": 90, + "bright_red": 91, + "bright_green": 92, + "bright_yellow": 93, + "bright_blue": 94, + "bright_magenta": 95, + "bright_cyan": 96, + "bright_white": 97, +} +_ansi_reset_all = "\033[0m" + + +def hidden_prompt_func(prompt: str) -> str: + import getpass + + return getpass.getpass(prompt) + + +def _build_prompt( + text: str, + suffix: str, + show_default: bool = False, + default: t.Optional[t.Any] = None, + show_choices: bool = True, + type: t.Optional[ParamType] = None, +) -> str: + prompt = text + if type is not None and show_choices and isinstance(type, Choice): + prompt += f" ({', '.join(map(str, type.choices))})" + if default is not None and show_default: + prompt = f"{prompt} [{_format_default(default)}]" + return f"{prompt}{suffix}" + + +def _format_default(default: t.Any) -> t.Any: + if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"): + return default.name + + return default + + +def prompt( + text: str, + default: t.Optional[t.Any] = None, + hide_input: bool = False, + confirmation_prompt: t.Union[bool, str] = False, + type: t.Optional[t.Union[ParamType, t.Any]] = None, + value_proc: t.Optional[t.Callable[[str], t.Any]] = None, + prompt_suffix: str = ": ", + show_default: bool = True, + err: bool = False, + show_choices: bool = True, +) -> t.Any: + """Prompts a user for input. This is a convenience function that can + be used to prompt a user for input later. + + If the user aborts the input by sending an interrupt signal, this + function will catch it and raise a :exc:`Abort` exception. + + :param text: the text to show for the prompt. + :param default: the default value to use if no input happens. If this + is not given it will prompt until it's aborted. + :param hide_input: if this is set to true then the input value will + be hidden. + :param confirmation_prompt: Prompt a second time to confirm the + value. Can be set to a string instead of ``True`` to customize + the message. + :param type: the type to use to check the value against. + :param value_proc: if this parameter is provided it's a function that + is invoked instead of the type conversion to + convert a value. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + :param show_choices: Show or hide choices if the passed type is a Choice. + For example if type is a Choice of either day or week, + show_choices is true and text is "Group by" then the + prompt will be "Group by (day, week): ". + + .. versionadded:: 8.0 + ``confirmation_prompt`` can be a custom string. + + .. versionadded:: 7.0 + Added the ``show_choices`` parameter. + + .. versionadded:: 6.0 + Added unicode support for cmd.exe on Windows. + + .. versionadded:: 4.0 + Added the `err` parameter. + + """ + + def prompt_func(text: str) -> str: + f = hidden_prompt_func if hide_input else visible_prompt_func + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(text.rstrip(" "), nl=False, err=err) + # Echo a space to stdout to work around an issue where + # readline causes backspace to clear the whole line. + return f(" ") + except (KeyboardInterrupt, EOFError): + # getpass doesn't print a newline if the user aborts input with ^C. + # Allegedly this behavior is inherited from getpass(3). + # A doc bug has been filed at https://bugs.python.org/issue24711 + if hide_input: + echo(None, err=err) + raise Abort() from None + + if value_proc is None: + value_proc = convert_type(type, default) + + prompt = _build_prompt( + text, prompt_suffix, show_default, default, show_choices, type + ) + + if confirmation_prompt: + if confirmation_prompt is True: + confirmation_prompt = _("Repeat for confirmation") + + confirmation_prompt = _build_prompt(confirmation_prompt, prompt_suffix) + + while True: + while True: + value = prompt_func(prompt) + if value: + break + elif default is not None: + value = default + break + try: + result = value_proc(value) + except UsageError as e: + if hide_input: + echo(_("Error: The value you entered was invalid."), err=err) + else: + echo(_("Error: {e.message}").format(e=e), err=err) # noqa: B306 + continue + if not confirmation_prompt: + return result + while True: + value2 = prompt_func(confirmation_prompt) + is_empty = not value and not value2 + if value2 or is_empty: + break + if value == value2: + return result + echo(_("Error: The two entered values do not match."), err=err) + + +def confirm( + text: str, + default: t.Optional[bool] = False, + abort: bool = False, + prompt_suffix: str = ": ", + show_default: bool = True, + err: bool = False, +) -> bool: + """Prompts for confirmation (yes/no question). + + If the user aborts the input by sending a interrupt signal this + function will catch it and raise a :exc:`Abort` exception. + + :param text: the question to ask. + :param default: The default value to use when no input is given. If + ``None``, repeat until input is given. + :param abort: if this is set to `True` a negative answer aborts the + exception by raising :exc:`Abort`. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + + .. versionchanged:: 8.0 + Repeat until input is given if ``default`` is ``None``. + + .. versionadded:: 4.0 + Added the ``err`` parameter. + """ + prompt = _build_prompt( + text, + prompt_suffix, + show_default, + "y/n" if default is None else ("Y/n" if default else "y/N"), + ) + + while True: + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(prompt.rstrip(" "), nl=False, err=err) + # Echo a space to stdout to work around an issue where + # readline causes backspace to clear the whole line. + value = visible_prompt_func(" ").lower().strip() + except (KeyboardInterrupt, EOFError): + raise Abort() from None + if value in ("y", "yes"): + rv = True + elif value in ("n", "no"): + rv = False + elif default is not None and value == "": + rv = default + else: + echo(_("Error: invalid input"), err=err) + continue + break + if abort and not rv: + raise Abort() + return rv + + +def echo_via_pager( + text_or_generator: t.Union[t.Iterable[str], t.Callable[[], t.Iterable[str]], str], + color: t.Optional[bool] = None, +) -> None: + """This function takes a text and shows it via an environment specific + pager on stdout. + + .. versionchanged:: 3.0 + Added the `color` flag. + + :param text_or_generator: the text to page, or alternatively, a + generator emitting the text to page. + :param color: controls if the pager supports ANSI colors or not. The + default is autodetection. + """ + color = resolve_color_default(color) + + if inspect.isgeneratorfunction(text_or_generator): + i = t.cast(t.Callable[[], t.Iterable[str]], text_or_generator)() + elif isinstance(text_or_generator, str): + i = [text_or_generator] + else: + i = iter(t.cast(t.Iterable[str], text_or_generator)) + + # convert every element of i to a text type if necessary + text_generator = (el if isinstance(el, str) else str(el) for el in i) + + from ._termui_impl import pager + + return pager(itertools.chain(text_generator, "\n"), color) + + +def progressbar( + iterable: t.Optional[t.Iterable[V]] = None, + length: t.Optional[int] = None, + label: t.Optional[str] = None, + show_eta: bool = True, + show_percent: t.Optional[bool] = None, + show_pos: bool = False, + item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None, + fill_char: str = "#", + empty_char: str = "-", + bar_template: str = "%(label)s [%(bar)s] %(info)s", + info_sep: str = " ", + width: int = 36, + file: t.Optional[t.TextIO] = None, + color: t.Optional[bool] = None, + update_min_steps: int = 1, +) -> "ProgressBar[V]": + """This function creates an iterable context manager that can be used + to iterate over something while showing a progress bar. It will + either iterate over the `iterable` or `length` items (that are counted + up). While iteration happens, this function will print a rendered + progress bar to the given `file` (defaults to stdout) and will attempt + to calculate remaining time and more. By default, this progress bar + will not be rendered if the file is not a terminal. + + The context manager creates the progress bar. When the context + manager is entered the progress bar is already created. With every + iteration over the progress bar, the iterable passed to the bar is + advanced and the bar is updated. When the context manager exits, + a newline is printed and the progress bar is finalized on screen. + + Note: The progress bar is currently designed for use cases where the + total progress can be expected to take at least several seconds. + Because of this, the ProgressBar class object won't display + progress that is considered too fast, and progress where the time + between steps is less than a second. + + No printing must happen or the progress bar will be unintentionally + destroyed. + + Example usage:: + + with progressbar(items) as bar: + for item in bar: + do_something_with(item) + + Alternatively, if no iterable is specified, one can manually update the + progress bar through the `update()` method instead of directly + iterating over the progress bar. The update method accepts the number + of steps to increment the bar with:: + + with progressbar(length=chunks.total_bytes) as bar: + for chunk in chunks: + process_chunk(chunk) + bar.update(chunks.bytes) + + The ``update()`` method also takes an optional value specifying the + ``current_item`` at the new position. This is useful when used + together with ``item_show_func`` to customize the output for each + manual step:: + + with click.progressbar( + length=total_size, + label='Unzipping archive', + item_show_func=lambda a: a.filename + ) as bar: + for archive in zip_file: + archive.extract() + bar.update(archive.size, archive) + + :param iterable: an iterable to iterate over. If not provided the length + is required. + :param length: the number of items to iterate over. By default the + progressbar will attempt to ask the iterator about its + length, which might or might not work. If an iterable is + also provided this parameter can be used to override the + length. If an iterable is not provided the progress bar + will iterate over a range of that length. + :param label: the label to show next to the progress bar. + :param show_eta: enables or disables the estimated time display. This is + automatically disabled if the length cannot be + determined. + :param show_percent: enables or disables the percentage display. The + default is `True` if the iterable has a length or + `False` if not. + :param show_pos: enables or disables the absolute position display. The + default is `False`. + :param item_show_func: A function called with the current item which + can return a string to show next to the progress bar. If the + function returns ``None`` nothing is shown. The current item can + be ``None``, such as when entering and exiting the bar. + :param fill_char: the character to use to show the filled part of the + progress bar. + :param empty_char: the character to use to show the non-filled part of + the progress bar. + :param bar_template: the format string to use as template for the bar. + The parameters in it are ``label`` for the label, + ``bar`` for the progress bar and ``info`` for the + info section. + :param info_sep: the separator between multiple info items (eta etc.) + :param width: the width of the progress bar in characters, 0 means full + terminal width + :param file: The file to write to. If this is not a terminal then + only the label is printed. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are included anywhere in the progress bar output + which is not the case by default. + :param update_min_steps: Render only when this many updates have + completed. This allows tuning for very fast iterators. + + .. versionchanged:: 8.0 + Output is shown even if execution time is less than 0.5 seconds. + + .. versionchanged:: 8.0 + ``item_show_func`` shows the current item, not the previous one. + + .. versionchanged:: 8.0 + Labels are echoed if the output is not a TTY. Reverts a change + in 7.0 that removed all output. + + .. versionadded:: 8.0 + Added the ``update_min_steps`` parameter. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. Added the ``update`` method to + the object. + + .. versionadded:: 2.0 + """ + from ._termui_impl import ProgressBar + + color = resolve_color_default(color) + return ProgressBar( + iterable=iterable, + length=length, + show_eta=show_eta, + show_percent=show_percent, + show_pos=show_pos, + item_show_func=item_show_func, + fill_char=fill_char, + empty_char=empty_char, + bar_template=bar_template, + info_sep=info_sep, + file=file, + label=label, + width=width, + color=color, + update_min_steps=update_min_steps, + ) + + +def clear() -> None: + """Clears the terminal screen. This will have the effect of clearing + the whole visible space of the terminal and moving the cursor to the + top left. This does not do anything if not connected to a terminal. + + .. versionadded:: 2.0 + """ + if not isatty(sys.stdout): + return + + # ANSI escape \033[2J clears the screen, \033[1;1H moves the cursor + echo("\033[2J\033[1;1H", nl=False) + + +def _interpret_color( + color: t.Union[int, t.Tuple[int, int, int], str], offset: int = 0 +) -> str: + if isinstance(color, int): + return f"{38 + offset};5;{color:d}" + + if isinstance(color, (tuple, list)): + r, g, b = color + return f"{38 + offset};2;{r:d};{g:d};{b:d}" + + return str(_ansi_colors[color] + offset) + + +def style( + text: t.Any, + fg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None, + bg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None, + bold: t.Optional[bool] = None, + dim: t.Optional[bool] = None, + underline: t.Optional[bool] = None, + overline: t.Optional[bool] = None, + italic: t.Optional[bool] = None, + blink: t.Optional[bool] = None, + reverse: t.Optional[bool] = None, + strikethrough: t.Optional[bool] = None, + reset: bool = True, +) -> str: + """Styles a text with ANSI styles and returns the new string. By + default the styling is self contained which means that at the end + of the string a reset code is issued. This can be prevented by + passing ``reset=False``. + + Examples:: + + click.echo(click.style('Hello World!', fg='green')) + click.echo(click.style('ATTENTION!', blink=True)) + click.echo(click.style('Some things', reverse=True, fg='cyan')) + click.echo(click.style('More colors', fg=(255, 12, 128), bg=117)) + + Supported color names: + + * ``black`` (might be a gray) + * ``red`` + * ``green`` + * ``yellow`` (might be an orange) + * ``blue`` + * ``magenta`` + * ``cyan`` + * ``white`` (might be light gray) + * ``bright_black`` + * ``bright_red`` + * ``bright_green`` + * ``bright_yellow`` + * ``bright_blue`` + * ``bright_magenta`` + * ``bright_cyan`` + * ``bright_white`` + * ``reset`` (reset the color code only) + + If the terminal supports it, color may also be specified as: + + - An integer in the interval [0, 255]. The terminal must support + 8-bit/256-color mode. + - An RGB tuple of three integers in [0, 255]. The terminal must + support 24-bit/true-color mode. + + See https://en.wikipedia.org/wiki/ANSI_color and + https://gist.github.com/XVilka/8346728 for more information. + + :param text: the string to style with ansi codes. + :param fg: if provided this will become the foreground color. + :param bg: if provided this will become the background color. + :param bold: if provided this will enable or disable bold mode. + :param dim: if provided this will enable or disable dim mode. This is + badly supported. + :param underline: if provided this will enable or disable underline. + :param overline: if provided this will enable or disable overline. + :param italic: if provided this will enable or disable italic. + :param blink: if provided this will enable or disable blinking. + :param reverse: if provided this will enable or disable inverse + rendering (foreground becomes background and the + other way round). + :param strikethrough: if provided this will enable or disable + striking through text. + :param reset: by default a reset-all code is added at the end of the + string which means that styles do not carry over. This + can be disabled to compose styles. + + .. versionchanged:: 8.0 + A non-string ``message`` is converted to a string. + + .. versionchanged:: 8.0 + Added support for 256 and RGB color codes. + + .. versionchanged:: 8.0 + Added the ``strikethrough``, ``italic``, and ``overline`` + parameters. + + .. versionchanged:: 7.0 + Added support for bright colors. + + .. versionadded:: 2.0 + """ + if not isinstance(text, str): + text = str(text) + + bits = [] + + if fg: + try: + bits.append(f"\033[{_interpret_color(fg)}m") + except KeyError: + raise TypeError(f"Unknown color {fg!r}") from None + + if bg: + try: + bits.append(f"\033[{_interpret_color(bg, 10)}m") + except KeyError: + raise TypeError(f"Unknown color {bg!r}") from None + + if bold is not None: + bits.append(f"\033[{1 if bold else 22}m") + if dim is not None: + bits.append(f"\033[{2 if dim else 22}m") + if underline is not None: + bits.append(f"\033[{4 if underline else 24}m") + if overline is not None: + bits.append(f"\033[{53 if overline else 55}m") + if italic is not None: + bits.append(f"\033[{3 if italic else 23}m") + if blink is not None: + bits.append(f"\033[{5 if blink else 25}m") + if reverse is not None: + bits.append(f"\033[{7 if reverse else 27}m") + if strikethrough is not None: + bits.append(f"\033[{9 if strikethrough else 29}m") + bits.append(text) + if reset: + bits.append(_ansi_reset_all) + return "".join(bits) + + +def unstyle(text: str) -> str: + """Removes ANSI styling information from a string. Usually it's not + necessary to use this function as Click's echo function will + automatically remove styling if necessary. + + .. versionadded:: 2.0 + + :param text: the text to remove style information from. + """ + return strip_ansi(text) + + +def secho( + message: t.Optional[t.Any] = None, + file: t.Optional[t.IO[t.AnyStr]] = None, + nl: bool = True, + err: bool = False, + color: t.Optional[bool] = None, + **styles: t.Any, +) -> None: + """This function combines :func:`echo` and :func:`style` into one + call. As such the following two calls are the same:: + + click.secho('Hello World!', fg='green') + click.echo(click.style('Hello World!', fg='green')) + + All keyword arguments are forwarded to the underlying functions + depending on which one they go with. + + Non-string types will be converted to :class:`str`. However, + :class:`bytes` are passed directly to :meth:`echo` without applying + style. If you want to style bytes that represent text, call + :meth:`bytes.decode` first. + + .. versionchanged:: 8.0 + A non-string ``message`` is converted to a string. Bytes are + passed through without style applied. + + .. versionadded:: 2.0 + """ + if message is not None and not isinstance(message, (bytes, bytearray)): + message = style(message, **styles) + + return echo(message, file=file, nl=nl, err=err, color=color) + + +def edit( + text: t.Optional[t.AnyStr] = None, + editor: t.Optional[str] = None, + env: t.Optional[t.Mapping[str, str]] = None, + require_save: bool = True, + extension: str = ".txt", + filename: t.Optional[str] = None, +) -> t.Optional[t.AnyStr]: + r"""Edits the given text in the defined editor. If an editor is given + (should be the full path to the executable but the regular operating + system search path is used for finding the executable) it overrides + the detected editor. Optionally, some environment variables can be + used. If the editor is closed without changes, `None` is returned. In + case a file is edited directly the return value is always `None` and + `require_save` and `extension` are ignored. + + If the editor cannot be opened a :exc:`UsageError` is raised. + + Note for Windows: to simplify cross-platform usage, the newlines are + automatically converted from POSIX to Windows and vice versa. As such, + the message here will have ``\n`` as newline markers. + + :param text: the text to edit. + :param editor: optionally the editor to use. Defaults to automatic + detection. + :param env: environment variables to forward to the editor. + :param require_save: if this is true, then not saving in the editor + will make the return value become `None`. + :param extension: the extension to tell the editor about. This defaults + to `.txt` but changing this might change syntax + highlighting. + :param filename: if provided it will edit this file instead of the + provided text contents. It will not use a temporary + file as an indirection in that case. + """ + from ._termui_impl import Editor + + ed = Editor(editor=editor, env=env, require_save=require_save, extension=extension) + + if filename is None: + return ed.edit(text) + + ed.edit_file(filename) + return None + + +def launch(url: str, wait: bool = False, locate: bool = False) -> int: + """This function launches the given URL (or filename) in the default + viewer application for this file type. If this is an executable, it + might launch the executable in a new session. The return value is + the exit code of the launched application. Usually, ``0`` indicates + success. + + Examples:: + + click.launch('https://click.palletsprojects.com/') + click.launch('/my/downloaded/file', locate=True) + + .. versionadded:: 2.0 + + :param url: URL or filename of the thing to launch. + :param wait: Wait for the program to exit before returning. This + only works if the launched program blocks. In particular, + ``xdg-open`` on Linux does not block. + :param locate: if this is set to `True` then instead of launching the + application associated with the URL it will attempt to + launch a file manager with the file located. This + might have weird effects if the URL does not point to + the filesystem. + """ + from ._termui_impl import open_url + + return open_url(url, wait=wait, locate=locate) + + +# If this is provided, getchar() calls into this instead. This is used +# for unittesting purposes. +_getchar: t.Optional[t.Callable[[bool], str]] = None + + +def getchar(echo: bool = False) -> str: + """Fetches a single character from the terminal and returns it. This + will always return a unicode character and under certain rare + circumstances this might return more than one character. The + situations which more than one character is returned is when for + whatever reason multiple characters end up in the terminal buffer or + standard input was not actually a terminal. + + Note that this will always read from the terminal, even if something + is piped into the standard input. + + Note for Windows: in rare cases when typing non-ASCII characters, this + function might wait for a second character and then return both at once. + This is because certain Unicode characters look like special-key markers. + + .. versionadded:: 2.0 + + :param echo: if set to `True`, the character read will also show up on + the terminal. The default is to not show it. + """ + global _getchar + + if _getchar is None: + from ._termui_impl import getchar as f + + _getchar = f + + return _getchar(echo) + + +def raw_terminal() -> t.ContextManager[int]: + from ._termui_impl import raw_terminal as f + + return f() + + +def pause(info: t.Optional[str] = None, err: bool = False) -> None: + """This command stops execution and waits for the user to press any + key to continue. This is similar to the Windows batch "pause" + command. If the program is not run through a terminal, this command + will instead do nothing. + + .. versionadded:: 2.0 + + .. versionadded:: 4.0 + Added the `err` parameter. + + :param info: The message to print before pausing. Defaults to + ``"Press any key to continue..."``. + :param err: if set to message goes to ``stderr`` instead of + ``stdout``, the same as with echo. + """ + if not isatty(sys.stdin) or not isatty(sys.stdout): + return + + if info is None: + info = _("Press any key to continue...") + + try: + if info: + echo(info, nl=False, err=err) + try: + getchar() + except (KeyboardInterrupt, EOFError): + pass + finally: + if info: + echo(err=err) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/click/testing.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/testing.py new file mode 100644 index 000000000..e0df0d2a6 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/testing.py @@ -0,0 +1,479 @@ +import contextlib +import io +import os +import shlex +import shutil +import sys +import tempfile +import typing as t +from types import TracebackType + +from . import formatting +from . import termui +from . import utils +from ._compat import _find_binary_reader + +if t.TYPE_CHECKING: + from .core import BaseCommand + + +class EchoingStdin: + def __init__(self, input: t.BinaryIO, output: t.BinaryIO) -> None: + self._input = input + self._output = output + self._paused = False + + def __getattr__(self, x: str) -> t.Any: + return getattr(self._input, x) + + def _echo(self, rv: bytes) -> bytes: + if not self._paused: + self._output.write(rv) + + return rv + + def read(self, n: int = -1) -> bytes: + return self._echo(self._input.read(n)) + + def read1(self, n: int = -1) -> bytes: + return self._echo(self._input.read1(n)) # type: ignore + + def readline(self, n: int = -1) -> bytes: + return self._echo(self._input.readline(n)) + + def readlines(self) -> t.List[bytes]: + return [self._echo(x) for x in self._input.readlines()] + + def __iter__(self) -> t.Iterator[bytes]: + return iter(self._echo(x) for x in self._input) + + def __repr__(self) -> str: + return repr(self._input) + + +@contextlib.contextmanager +def _pause_echo(stream: t.Optional[EchoingStdin]) -> t.Iterator[None]: + if stream is None: + yield + else: + stream._paused = True + yield + stream._paused = False + + +class _NamedTextIOWrapper(io.TextIOWrapper): + def __init__( + self, buffer: t.BinaryIO, name: str, mode: str, **kwargs: t.Any + ) -> None: + super().__init__(buffer, **kwargs) + self._name = name + self._mode = mode + + @property + def name(self) -> str: + return self._name + + @property + def mode(self) -> str: + return self._mode + + +def make_input_stream( + input: t.Optional[t.Union[str, bytes, t.IO[t.Any]]], charset: str +) -> t.BinaryIO: + # Is already an input stream. + if hasattr(input, "read"): + rv = _find_binary_reader(t.cast(t.IO[t.Any], input)) + + if rv is not None: + return rv + + raise TypeError("Could not find binary reader for input stream.") + + if input is None: + input = b"" + elif isinstance(input, str): + input = input.encode(charset) + + return io.BytesIO(input) + + +class Result: + """Holds the captured result of an invoked CLI script.""" + + def __init__( + self, + runner: "CliRunner", + stdout_bytes: bytes, + stderr_bytes: t.Optional[bytes], + return_value: t.Any, + exit_code: int, + exception: t.Optional[BaseException], + exc_info: t.Optional[ + t.Tuple[t.Type[BaseException], BaseException, TracebackType] + ] = None, + ): + #: The runner that created the result + self.runner = runner + #: The standard output as bytes. + self.stdout_bytes = stdout_bytes + #: The standard error as bytes, or None if not available + self.stderr_bytes = stderr_bytes + #: The value returned from the invoked command. + #: + #: .. versionadded:: 8.0 + self.return_value = return_value + #: The exit code as integer. + self.exit_code = exit_code + #: The exception that happened if one did. + self.exception = exception + #: The traceback + self.exc_info = exc_info + + @property + def output(self) -> str: + """The (standard) output as unicode string.""" + return self.stdout + + @property + def stdout(self) -> str: + """The standard output as unicode string.""" + return self.stdout_bytes.decode(self.runner.charset, "replace").replace( + "\r\n", "\n" + ) + + @property + def stderr(self) -> str: + """The standard error as unicode string.""" + if self.stderr_bytes is None: + raise ValueError("stderr not separately captured") + return self.stderr_bytes.decode(self.runner.charset, "replace").replace( + "\r\n", "\n" + ) + + def __repr__(self) -> str: + exc_str = repr(self.exception) if self.exception else "okay" + return f"<{type(self).__name__} {exc_str}>" + + +class CliRunner: + """The CLI runner provides functionality to invoke a Click command line + script for unittesting purposes in a isolated environment. This only + works in single-threaded systems without any concurrency as it changes the + global interpreter state. + + :param charset: the character set for the input and output data. + :param env: a dictionary with environment variables for overriding. + :param echo_stdin: if this is set to `True`, then reading from stdin writes + to stdout. This is useful for showing examples in + some circumstances. Note that regular prompts + will automatically echo the input. + :param mix_stderr: if this is set to `False`, then stdout and stderr are + preserved as independent streams. This is useful for + Unix-philosophy apps that have predictable stdout and + noisy stderr, such that each may be measured + independently + """ + + def __init__( + self, + charset: str = "utf-8", + env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, + echo_stdin: bool = False, + mix_stderr: bool = True, + ) -> None: + self.charset = charset + self.env: t.Mapping[str, t.Optional[str]] = env or {} + self.echo_stdin = echo_stdin + self.mix_stderr = mix_stderr + + def get_default_prog_name(self, cli: "BaseCommand") -> str: + """Given a command object it will return the default program name + for it. The default is the `name` attribute or ``"root"`` if not + set. + """ + return cli.name or "root" + + def make_env( + self, overrides: t.Optional[t.Mapping[str, t.Optional[str]]] = None + ) -> t.Mapping[str, t.Optional[str]]: + """Returns the environment overrides for invoking a script.""" + rv = dict(self.env) + if overrides: + rv.update(overrides) + return rv + + @contextlib.contextmanager + def isolation( + self, + input: t.Optional[t.Union[str, bytes, t.IO[t.Any]]] = None, + env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, + color: bool = False, + ) -> t.Iterator[t.Tuple[io.BytesIO, t.Optional[io.BytesIO]]]: + """A context manager that sets up the isolation for invoking of a + command line tool. This sets up stdin with the given input data + and `os.environ` with the overrides from the given dictionary. + This also rebinds some internals in Click to be mocked (like the + prompt functionality). + + This is automatically done in the :meth:`invoke` method. + + :param input: the input stream to put into sys.stdin. + :param env: the environment overrides as dictionary. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + + .. versionchanged:: 8.0 + ``stderr`` is opened with ``errors="backslashreplace"`` + instead of the default ``"strict"``. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + """ + bytes_input = make_input_stream(input, self.charset) + echo_input = None + + old_stdin = sys.stdin + old_stdout = sys.stdout + old_stderr = sys.stderr + old_forced_width = formatting.FORCED_WIDTH + formatting.FORCED_WIDTH = 80 + + env = self.make_env(env) + + bytes_output = io.BytesIO() + + if self.echo_stdin: + bytes_input = echo_input = t.cast( + t.BinaryIO, EchoingStdin(bytes_input, bytes_output) + ) + + sys.stdin = text_input = _NamedTextIOWrapper( + bytes_input, encoding=self.charset, name="", mode="r" + ) + + if self.echo_stdin: + # Force unbuffered reads, otherwise TextIOWrapper reads a + # large chunk which is echoed early. + text_input._CHUNK_SIZE = 1 # type: ignore + + sys.stdout = _NamedTextIOWrapper( + bytes_output, encoding=self.charset, name="", mode="w" + ) + + bytes_error = None + if self.mix_stderr: + sys.stderr = sys.stdout + else: + bytes_error = io.BytesIO() + sys.stderr = _NamedTextIOWrapper( + bytes_error, + encoding=self.charset, + name="", + mode="w", + errors="backslashreplace", + ) + + @_pause_echo(echo_input) # type: ignore + def visible_input(prompt: t.Optional[str] = None) -> str: + sys.stdout.write(prompt or "") + val = text_input.readline().rstrip("\r\n") + sys.stdout.write(f"{val}\n") + sys.stdout.flush() + return val + + @_pause_echo(echo_input) # type: ignore + def hidden_input(prompt: t.Optional[str] = None) -> str: + sys.stdout.write(f"{prompt or ''}\n") + sys.stdout.flush() + return text_input.readline().rstrip("\r\n") + + @_pause_echo(echo_input) # type: ignore + def _getchar(echo: bool) -> str: + char = sys.stdin.read(1) + + if echo: + sys.stdout.write(char) + + sys.stdout.flush() + return char + + default_color = color + + def should_strip_ansi( + stream: t.Optional[t.IO[t.Any]] = None, color: t.Optional[bool] = None + ) -> bool: + if color is None: + return not default_color + return not color + + old_visible_prompt_func = termui.visible_prompt_func + old_hidden_prompt_func = termui.hidden_prompt_func + old__getchar_func = termui._getchar + old_should_strip_ansi = utils.should_strip_ansi # type: ignore + termui.visible_prompt_func = visible_input + termui.hidden_prompt_func = hidden_input + termui._getchar = _getchar + utils.should_strip_ansi = should_strip_ansi # type: ignore + + old_env = {} + try: + for key, value in env.items(): + old_env[key] = os.environ.get(key) + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + yield (bytes_output, bytes_error) + finally: + for key, value in old_env.items(): + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + sys.stdout = old_stdout + sys.stderr = old_stderr + sys.stdin = old_stdin + termui.visible_prompt_func = old_visible_prompt_func + termui.hidden_prompt_func = old_hidden_prompt_func + termui._getchar = old__getchar_func + utils.should_strip_ansi = old_should_strip_ansi # type: ignore + formatting.FORCED_WIDTH = old_forced_width + + def invoke( + self, + cli: "BaseCommand", + args: t.Optional[t.Union[str, t.Sequence[str]]] = None, + input: t.Optional[t.Union[str, bytes, t.IO[t.Any]]] = None, + env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, + catch_exceptions: bool = True, + color: bool = False, + **extra: t.Any, + ) -> Result: + """Invokes a command in an isolated environment. The arguments are + forwarded directly to the command line script, the `extra` keyword + arguments are passed to the :meth:`~clickpkg.Command.main` function of + the command. + + This returns a :class:`Result` object. + + :param cli: the command to invoke + :param args: the arguments to invoke. It may be given as an iterable + or a string. When given as string it will be interpreted + as a Unix shell command. More details at + :func:`shlex.split`. + :param input: the input data for `sys.stdin`. + :param env: the environment overrides. + :param catch_exceptions: Whether to catch any other exceptions than + ``SystemExit``. + :param extra: the keyword arguments to pass to :meth:`main`. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + + .. versionchanged:: 8.0 + The result object has the ``return_value`` attribute with + the value returned from the invoked command. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + + .. versionchanged:: 3.0 + Added the ``catch_exceptions`` parameter. + + .. versionchanged:: 3.0 + The result object has the ``exc_info`` attribute with the + traceback if available. + """ + exc_info = None + with self.isolation(input=input, env=env, color=color) as outstreams: + return_value = None + exception: t.Optional[BaseException] = None + exit_code = 0 + + if isinstance(args, str): + args = shlex.split(args) + + try: + prog_name = extra.pop("prog_name") + except KeyError: + prog_name = self.get_default_prog_name(cli) + + try: + return_value = cli.main(args=args or (), prog_name=prog_name, **extra) + except SystemExit as e: + exc_info = sys.exc_info() + e_code = t.cast(t.Optional[t.Union[int, t.Any]], e.code) + + if e_code is None: + e_code = 0 + + if e_code != 0: + exception = e + + if not isinstance(e_code, int): + sys.stdout.write(str(e_code)) + sys.stdout.write("\n") + e_code = 1 + + exit_code = e_code + + except Exception as e: + if not catch_exceptions: + raise + exception = e + exit_code = 1 + exc_info = sys.exc_info() + finally: + sys.stdout.flush() + stdout = outstreams[0].getvalue() + if self.mix_stderr: + stderr = None + else: + stderr = outstreams[1].getvalue() # type: ignore + + return Result( + runner=self, + stdout_bytes=stdout, + stderr_bytes=stderr, + return_value=return_value, + exit_code=exit_code, + exception=exception, + exc_info=exc_info, # type: ignore + ) + + @contextlib.contextmanager + def isolated_filesystem( + self, temp_dir: t.Optional[t.Union[str, "os.PathLike[str]"]] = None + ) -> t.Iterator[str]: + """A context manager that creates a temporary directory and + changes the current working directory to it. This isolates tests + that affect the contents of the CWD to prevent them from + interfering with each other. + + :param temp_dir: Create the temporary directory under this + directory. If given, the created directory is not removed + when exiting. + + .. versionchanged:: 8.0 + Added the ``temp_dir`` parameter. + """ + cwd = os.getcwd() + dt = tempfile.mkdtemp(dir=temp_dir) + os.chdir(dt) + + try: + yield dt + finally: + os.chdir(cwd) + + if temp_dir is None: + try: + shutil.rmtree(dt) + except OSError: # noqa: B014 + pass diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/click/types.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/types.py new file mode 100644 index 000000000..2b1d1797f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/types.py @@ -0,0 +1,1089 @@ +import os +import stat +import sys +import typing as t +from datetime import datetime +from gettext import gettext as _ +from gettext import ngettext + +from ._compat import _get_argv_encoding +from ._compat import open_stream +from .exceptions import BadParameter +from .utils import format_filename +from .utils import LazyFile +from .utils import safecall + +if t.TYPE_CHECKING: + import typing_extensions as te + from .core import Context + from .core import Parameter + from .shell_completion import CompletionItem + + +class ParamType: + """Represents the type of a parameter. Validates and converts values + from the command line or Python into the correct type. + + To implement a custom type, subclass and implement at least the + following: + + - The :attr:`name` class attribute must be set. + - Calling an instance of the type with ``None`` must return + ``None``. This is already implemented by default. + - :meth:`convert` must convert string values to the correct type. + - :meth:`convert` must accept values that are already the correct + type. + - It must be able to convert a value if the ``ctx`` and ``param`` + arguments are ``None``. This can occur when converting prompt + input. + """ + + is_composite: t.ClassVar[bool] = False + arity: t.ClassVar[int] = 1 + + #: the descriptive name of this type + name: str + + #: if a list of this type is expected and the value is pulled from a + #: string environment variable, this is what splits it up. `None` + #: means any whitespace. For all parameters the general rule is that + #: whitespace splits them up. The exception are paths and files which + #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on + #: Windows). + envvar_list_splitter: t.ClassVar[t.Optional[str]] = None + + def to_info_dict(self) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + .. versionadded:: 8.0 + """ + # The class name without the "ParamType" suffix. + param_type = type(self).__name__.partition("ParamType")[0] + param_type = param_type.partition("ParameterType")[0] + + # Custom subclasses might not remember to set a name. + if hasattr(self, "name"): + name = self.name + else: + name = param_type + + return {"param_type": param_type, "name": name} + + def __call__( + self, + value: t.Any, + param: t.Optional["Parameter"] = None, + ctx: t.Optional["Context"] = None, + ) -> t.Any: + if value is not None: + return self.convert(value, param, ctx) + + def get_metavar(self, param: "Parameter") -> t.Optional[str]: + """Returns the metavar default for this param if it provides one.""" + + def get_missing_message(self, param: "Parameter") -> t.Optional[str]: + """Optionally might return extra information about a missing + parameter. + + .. versionadded:: 2.0 + """ + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + """Convert the value to the correct type. This is not called if + the value is ``None`` (the missing value). + + This must accept string values from the command line, as well as + values that are already the correct type. It may also convert + other compatible types. + + The ``param`` and ``ctx`` arguments may be ``None`` in certain + situations, such as when converting prompt input. + + If the value cannot be converted, call :meth:`fail` with a + descriptive message. + + :param value: The value to convert. + :param param: The parameter that is using this type to convert + its value. May be ``None``. + :param ctx: The current context that arrived at this value. May + be ``None``. + """ + return value + + def split_envvar_value(self, rv: str) -> t.Sequence[str]: + """Given a value from an environment variable this splits it up + into small chunks depending on the defined envvar list splitter. + + If the splitter is set to `None`, which means that whitespace splits, + then leading and trailing whitespace is ignored. Otherwise, leading + and trailing splitters usually lead to empty items being included. + """ + return (rv or "").split(self.envvar_list_splitter) + + def fail( + self, + message: str, + param: t.Optional["Parameter"] = None, + ctx: t.Optional["Context"] = None, + ) -> "t.NoReturn": + """Helper method to fail with an invalid value message.""" + raise BadParameter(message, ctx=ctx, param=param) + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Return a list of + :class:`~click.shell_completion.CompletionItem` objects for the + incomplete value. Most types do not provide completions, but + some do, and this allows custom types to provide custom + completions as well. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + return [] + + +class CompositeParamType(ParamType): + is_composite = True + + @property + def arity(self) -> int: # type: ignore + raise NotImplementedError() + + +class FuncParamType(ParamType): + def __init__(self, func: t.Callable[[t.Any], t.Any]) -> None: + self.name: str = func.__name__ + self.func = func + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["func"] = self.func + return info_dict + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + try: + return self.func(value) + except ValueError: + try: + value = str(value) + except UnicodeError: + value = value.decode("utf-8", "replace") + + self.fail(value, param, ctx) + + +class UnprocessedParamType(ParamType): + name = "text" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + return value + + def __repr__(self) -> str: + return "UNPROCESSED" + + +class StringParamType(ParamType): + name = "text" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + if isinstance(value, bytes): + enc = _get_argv_encoding() + try: + value = value.decode(enc) + except UnicodeError: + fs_enc = sys.getfilesystemencoding() + if fs_enc != enc: + try: + value = value.decode(fs_enc) + except UnicodeError: + value = value.decode("utf-8", "replace") + else: + value = value.decode("utf-8", "replace") + return value + return str(value) + + def __repr__(self) -> str: + return "STRING" + + +class Choice(ParamType): + """The choice type allows a value to be checked against a fixed set + of supported values. All of these values have to be strings. + + You should only pass a list or tuple of choices. Other iterables + (like generators) may lead to surprising results. + + The resulting value will always be one of the originally passed choices + regardless of ``case_sensitive`` or any ``ctx.token_normalize_func`` + being specified. + + See :ref:`choice-opts` for an example. + + :param case_sensitive: Set to false to make choices case + insensitive. Defaults to true. + """ + + name = "choice" + + def __init__(self, choices: t.Sequence[str], case_sensitive: bool = True) -> None: + self.choices = choices + self.case_sensitive = case_sensitive + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["choices"] = self.choices + info_dict["case_sensitive"] = self.case_sensitive + return info_dict + + def get_metavar(self, param: "Parameter") -> str: + choices_str = "|".join(self.choices) + + # Use curly braces to indicate a required argument. + if param.required and param.param_type_name == "argument": + return f"{{{choices_str}}}" + + # Use square braces to indicate an option or optional argument. + return f"[{choices_str}]" + + def get_missing_message(self, param: "Parameter") -> str: + return _("Choose from:\n\t{choices}").format(choices=",\n\t".join(self.choices)) + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + # Match through normalization and case sensitivity + # first do token_normalize_func, then lowercase + # preserve original `value` to produce an accurate message in + # `self.fail` + normed_value = value + normed_choices = {choice: choice for choice in self.choices} + + if ctx is not None and ctx.token_normalize_func is not None: + normed_value = ctx.token_normalize_func(value) + normed_choices = { + ctx.token_normalize_func(normed_choice): original + for normed_choice, original in normed_choices.items() + } + + if not self.case_sensitive: + normed_value = normed_value.casefold() + normed_choices = { + normed_choice.casefold(): original + for normed_choice, original in normed_choices.items() + } + + if normed_value in normed_choices: + return normed_choices[normed_value] + + choices_str = ", ".join(map(repr, self.choices)) + self.fail( + ngettext( + "{value!r} is not {choice}.", + "{value!r} is not one of {choices}.", + len(self.choices), + ).format(value=value, choice=choices_str, choices=choices_str), + param, + ctx, + ) + + def __repr__(self) -> str: + return f"Choice({list(self.choices)})" + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Complete choices that start with the incomplete value. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + str_choices = map(str, self.choices) + + if self.case_sensitive: + matched = (c for c in str_choices if c.startswith(incomplete)) + else: + incomplete = incomplete.lower() + matched = (c for c in str_choices if c.lower().startswith(incomplete)) + + return [CompletionItem(c) for c in matched] + + +class DateTime(ParamType): + """The DateTime type converts date strings into `datetime` objects. + + The format strings which are checked are configurable, but default to some + common (non-timezone aware) ISO 8601 formats. + + When specifying *DateTime* formats, you should only pass a list or a tuple. + Other iterables, like generators, may lead to surprising results. + + The format strings are processed using ``datetime.strptime``, and this + consequently defines the format strings which are allowed. + + Parsing is tried using each format, in order, and the first format which + parses successfully is used. + + :param formats: A list or tuple of date format strings, in the order in + which they should be tried. Defaults to + ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``, + ``'%Y-%m-%d %H:%M:%S'``. + """ + + name = "datetime" + + def __init__(self, formats: t.Optional[t.Sequence[str]] = None): + self.formats: t.Sequence[str] = formats or [ + "%Y-%m-%d", + "%Y-%m-%dT%H:%M:%S", + "%Y-%m-%d %H:%M:%S", + ] + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["formats"] = self.formats + return info_dict + + def get_metavar(self, param: "Parameter") -> str: + return f"[{'|'.join(self.formats)}]" + + def _try_to_convert_date(self, value: t.Any, format: str) -> t.Optional[datetime]: + try: + return datetime.strptime(value, format) + except ValueError: + return None + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + if isinstance(value, datetime): + return value + + for format in self.formats: + converted = self._try_to_convert_date(value, format) + + if converted is not None: + return converted + + formats_str = ", ".join(map(repr, self.formats)) + self.fail( + ngettext( + "{value!r} does not match the format {format}.", + "{value!r} does not match the formats {formats}.", + len(self.formats), + ).format(value=value, format=formats_str, formats=formats_str), + param, + ctx, + ) + + def __repr__(self) -> str: + return "DateTime" + + +class _NumberParamTypeBase(ParamType): + _number_class: t.ClassVar[t.Type[t.Any]] + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + try: + return self._number_class(value) + except ValueError: + self.fail( + _("{value!r} is not a valid {number_type}.").format( + value=value, number_type=self.name + ), + param, + ctx, + ) + + +class _NumberRangeBase(_NumberParamTypeBase): + def __init__( + self, + min: t.Optional[float] = None, + max: t.Optional[float] = None, + min_open: bool = False, + max_open: bool = False, + clamp: bool = False, + ) -> None: + self.min = min + self.max = max + self.min_open = min_open + self.max_open = max_open + self.clamp = clamp + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + min=self.min, + max=self.max, + min_open=self.min_open, + max_open=self.max_open, + clamp=self.clamp, + ) + return info_dict + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + import operator + + rv = super().convert(value, param, ctx) + lt_min: bool = self.min is not None and ( + operator.le if self.min_open else operator.lt + )(rv, self.min) + gt_max: bool = self.max is not None and ( + operator.ge if self.max_open else operator.gt + )(rv, self.max) + + if self.clamp: + if lt_min: + return self._clamp(self.min, 1, self.min_open) # type: ignore + + if gt_max: + return self._clamp(self.max, -1, self.max_open) # type: ignore + + if lt_min or gt_max: + self.fail( + _("{value} is not in the range {range}.").format( + value=rv, range=self._describe_range() + ), + param, + ctx, + ) + + return rv + + def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float: + """Find the valid value to clamp to bound in the given + direction. + + :param bound: The boundary value. + :param dir: 1 or -1 indicating the direction to move. + :param open: If true, the range does not include the bound. + """ + raise NotImplementedError + + def _describe_range(self) -> str: + """Describe the range for use in help text.""" + if self.min is None: + op = "<" if self.max_open else "<=" + return f"x{op}{self.max}" + + if self.max is None: + op = ">" if self.min_open else ">=" + return f"x{op}{self.min}" + + lop = "<" if self.min_open else "<=" + rop = "<" if self.max_open else "<=" + return f"{self.min}{lop}x{rop}{self.max}" + + def __repr__(self) -> str: + clamp = " clamped" if self.clamp else "" + return f"<{type(self).__name__} {self._describe_range()}{clamp}>" + + +class IntParamType(_NumberParamTypeBase): + name = "integer" + _number_class = int + + def __repr__(self) -> str: + return "INT" + + +class IntRange(_NumberRangeBase, IntParamType): + """Restrict an :data:`click.INT` value to a range of accepted + values. See :ref:`ranges`. + + If ``min`` or ``max`` are not passed, any value is accepted in that + direction. If ``min_open`` or ``max_open`` are enabled, the + corresponding boundary is not included in the range. + + If ``clamp`` is enabled, a value outside the range is clamped to the + boundary instead of failing. + + .. versionchanged:: 8.0 + Added the ``min_open`` and ``max_open`` parameters. + """ + + name = "integer range" + + def _clamp( # type: ignore + self, bound: int, dir: "te.Literal[1, -1]", open: bool + ) -> int: + if not open: + return bound + + return bound + dir + + +class FloatParamType(_NumberParamTypeBase): + name = "float" + _number_class = float + + def __repr__(self) -> str: + return "FLOAT" + + +class FloatRange(_NumberRangeBase, FloatParamType): + """Restrict a :data:`click.FLOAT` value to a range of accepted + values. See :ref:`ranges`. + + If ``min`` or ``max`` are not passed, any value is accepted in that + direction. If ``min_open`` or ``max_open`` are enabled, the + corresponding boundary is not included in the range. + + If ``clamp`` is enabled, a value outside the range is clamped to the + boundary instead of failing. This is not supported if either + boundary is marked ``open``. + + .. versionchanged:: 8.0 + Added the ``min_open`` and ``max_open`` parameters. + """ + + name = "float range" + + def __init__( + self, + min: t.Optional[float] = None, + max: t.Optional[float] = None, + min_open: bool = False, + max_open: bool = False, + clamp: bool = False, + ) -> None: + super().__init__( + min=min, max=max, min_open=min_open, max_open=max_open, clamp=clamp + ) + + if (min_open or max_open) and clamp: + raise TypeError("Clamping is not supported for open bounds.") + + def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float: + if not open: + return bound + + # Could use Python 3.9's math.nextafter here, but clamping an + # open float range doesn't seem to be particularly useful. It's + # left up to the user to write a callback to do it if needed. + raise RuntimeError("Clamping is not supported for open bounds.") + + +class BoolParamType(ParamType): + name = "boolean" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + if value in {False, True}: + return bool(value) + + norm = value.strip().lower() + + if norm in {"1", "true", "t", "yes", "y", "on"}: + return True + + if norm in {"0", "false", "f", "no", "n", "off"}: + return False + + self.fail( + _("{value!r} is not a valid boolean.").format(value=value), param, ctx + ) + + def __repr__(self) -> str: + return "BOOL" + + +class UUIDParameterType(ParamType): + name = "uuid" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + import uuid + + if isinstance(value, uuid.UUID): + return value + + value = value.strip() + + try: + return uuid.UUID(value) + except ValueError: + self.fail( + _("{value!r} is not a valid UUID.").format(value=value), param, ctx + ) + + def __repr__(self) -> str: + return "UUID" + + +class File(ParamType): + """Declares a parameter to be a file for reading or writing. The file + is automatically closed once the context tears down (after the command + finished working). + + Files can be opened for reading or writing. The special value ``-`` + indicates stdin or stdout depending on the mode. + + By default, the file is opened for reading text data, but it can also be + opened in binary mode or for writing. The encoding parameter can be used + to force a specific encoding. + + The `lazy` flag controls if the file should be opened immediately or upon + first IO. The default is to be non-lazy for standard input and output + streams as well as files opened for reading, `lazy` otherwise. When opening a + file lazily for reading, it is still opened temporarily for validation, but + will not be held open until first IO. lazy is mainly useful when opening + for writing to avoid creating the file until it is needed. + + Starting with Click 2.0, files can also be opened atomically in which + case all writes go into a separate file in the same folder and upon + completion the file will be moved over to the original location. This + is useful if a file regularly read by other users is modified. + + See :ref:`file-args` for more information. + """ + + name = "filename" + envvar_list_splitter: t.ClassVar[str] = os.path.pathsep + + def __init__( + self, + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + lazy: t.Optional[bool] = None, + atomic: bool = False, + ) -> None: + self.mode = mode + self.encoding = encoding + self.errors = errors + self.lazy = lazy + self.atomic = atomic + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update(mode=self.mode, encoding=self.encoding) + return info_dict + + def resolve_lazy_flag(self, value: "t.Union[str, os.PathLike[str]]") -> bool: + if self.lazy is not None: + return self.lazy + if os.fspath(value) == "-": + return False + elif "w" in self.mode: + return True + return False + + def convert( + self, + value: t.Union[str, "os.PathLike[str]", t.IO[t.Any]], + param: t.Optional["Parameter"], + ctx: t.Optional["Context"], + ) -> t.IO[t.Any]: + if _is_file_like(value): + return value + + value = t.cast("t.Union[str, os.PathLike[str]]", value) + + try: + lazy = self.resolve_lazy_flag(value) + + if lazy: + lf = LazyFile( + value, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + + if ctx is not None: + ctx.call_on_close(lf.close_intelligently) + + return t.cast(t.IO[t.Any], lf) + + f, should_close = open_stream( + value, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + + # If a context is provided, we automatically close the file + # at the end of the context execution (or flush out). If a + # context does not exist, it's the caller's responsibility to + # properly close the file. This for instance happens when the + # type is used with prompts. + if ctx is not None: + if should_close: + ctx.call_on_close(safecall(f.close)) + else: + ctx.call_on_close(safecall(f.flush)) + + return f + except OSError as e: # noqa: B014 + self.fail(f"'{format_filename(value)}': {e.strerror}", param, ctx) + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Return a special completion marker that tells the completion + system to use the shell to provide file path completions. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + return [CompletionItem(incomplete, type="file")] + + +def _is_file_like(value: t.Any) -> "te.TypeGuard[t.IO[t.Any]]": + return hasattr(value, "read") or hasattr(value, "write") + + +class Path(ParamType): + """The ``Path`` type is similar to the :class:`File` type, but + returns the filename instead of an open file. Various checks can be + enabled to validate the type of file and permissions. + + :param exists: The file or directory needs to exist for the value to + be valid. If this is not set to ``True``, and the file does not + exist, then all further checks are silently skipped. + :param file_okay: Allow a file as a value. + :param dir_okay: Allow a directory as a value. + :param readable: if true, a readable check is performed. + :param writable: if true, a writable check is performed. + :param executable: if true, an executable check is performed. + :param resolve_path: Make the value absolute and resolve any + symlinks. A ``~`` is not expanded, as this is supposed to be + done by the shell only. + :param allow_dash: Allow a single dash as a value, which indicates + a standard stream (but does not open it). Use + :func:`~click.open_file` to handle opening this value. + :param path_type: Convert the incoming path value to this type. If + ``None``, keep Python's default, which is ``str``. Useful to + convert to :class:`pathlib.Path`. + + .. versionchanged:: 8.1 + Added the ``executable`` parameter. + + .. versionchanged:: 8.0 + Allow passing ``path_type=pathlib.Path``. + + .. versionchanged:: 6.0 + Added the ``allow_dash`` parameter. + """ + + envvar_list_splitter: t.ClassVar[str] = os.path.pathsep + + def __init__( + self, + exists: bool = False, + file_okay: bool = True, + dir_okay: bool = True, + writable: bool = False, + readable: bool = True, + resolve_path: bool = False, + allow_dash: bool = False, + path_type: t.Optional[t.Type[t.Any]] = None, + executable: bool = False, + ): + self.exists = exists + self.file_okay = file_okay + self.dir_okay = dir_okay + self.readable = readable + self.writable = writable + self.executable = executable + self.resolve_path = resolve_path + self.allow_dash = allow_dash + self.type = path_type + + if self.file_okay and not self.dir_okay: + self.name: str = _("file") + elif self.dir_okay and not self.file_okay: + self.name = _("directory") + else: + self.name = _("path") + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + exists=self.exists, + file_okay=self.file_okay, + dir_okay=self.dir_okay, + writable=self.writable, + readable=self.readable, + allow_dash=self.allow_dash, + ) + return info_dict + + def coerce_path_result( + self, value: "t.Union[str, os.PathLike[str]]" + ) -> "t.Union[str, bytes, os.PathLike[str]]": + if self.type is not None and not isinstance(value, self.type): + if self.type is str: + return os.fsdecode(value) + elif self.type is bytes: + return os.fsencode(value) + else: + return t.cast("os.PathLike[str]", self.type(value)) + + return value + + def convert( + self, + value: "t.Union[str, os.PathLike[str]]", + param: t.Optional["Parameter"], + ctx: t.Optional["Context"], + ) -> "t.Union[str, bytes, os.PathLike[str]]": + rv = value + + is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-") + + if not is_dash: + if self.resolve_path: + # os.path.realpath doesn't resolve symlinks on Windows + # until Python 3.8. Use pathlib for now. + import pathlib + + rv = os.fsdecode(pathlib.Path(rv).resolve()) + + try: + st = os.stat(rv) + except OSError: + if not self.exists: + return self.coerce_path_result(rv) + self.fail( + _("{name} {filename!r} does not exist.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if not self.file_okay and stat.S_ISREG(st.st_mode): + self.fail( + _("{name} {filename!r} is a file.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + if not self.dir_okay and stat.S_ISDIR(st.st_mode): + self.fail( + _("{name} '{filename}' is a directory.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if self.readable and not os.access(rv, os.R_OK): + self.fail( + _("{name} {filename!r} is not readable.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if self.writable and not os.access(rv, os.W_OK): + self.fail( + _("{name} {filename!r} is not writable.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if self.executable and not os.access(value, os.X_OK): + self.fail( + _("{name} {filename!r} is not executable.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + return self.coerce_path_result(rv) + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Return a special completion marker that tells the completion + system to use the shell to provide path completions for only + directories or any paths. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + type = "dir" if self.dir_okay and not self.file_okay else "file" + return [CompletionItem(incomplete, type=type)] + + +class Tuple(CompositeParamType): + """The default behavior of Click is to apply a type on a value directly. + This works well in most cases, except for when `nargs` is set to a fixed + count and different types should be used for different items. In this + case the :class:`Tuple` type can be used. This type can only be used + if `nargs` is set to a fixed number. + + For more information see :ref:`tuple-type`. + + This can be selected by using a Python tuple literal as a type. + + :param types: a list of types that should be used for the tuple items. + """ + + def __init__(self, types: t.Sequence[t.Union[t.Type[t.Any], ParamType]]) -> None: + self.types: t.Sequence[ParamType] = [convert_type(ty) for ty in types] + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["types"] = [t.to_info_dict() for t in self.types] + return info_dict + + @property + def name(self) -> str: # type: ignore + return f"<{' '.join(ty.name for ty in self.types)}>" + + @property + def arity(self) -> int: # type: ignore + return len(self.types) + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + len_type = len(self.types) + len_value = len(value) + + if len_value != len_type: + self.fail( + ngettext( + "{len_type} values are required, but {len_value} was given.", + "{len_type} values are required, but {len_value} were given.", + len_value, + ).format(len_type=len_type, len_value=len_value), + param=param, + ctx=ctx, + ) + + return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value)) + + +def convert_type(ty: t.Optional[t.Any], default: t.Optional[t.Any] = None) -> ParamType: + """Find the most appropriate :class:`ParamType` for the given Python + type. If the type isn't provided, it can be inferred from a default + value. + """ + guessed_type = False + + if ty is None and default is not None: + if isinstance(default, (tuple, list)): + # If the default is empty, ty will remain None and will + # return STRING. + if default: + item = default[0] + + # A tuple of tuples needs to detect the inner types. + # Can't call convert recursively because that would + # incorrectly unwind the tuple to a single type. + if isinstance(item, (tuple, list)): + ty = tuple(map(type, item)) + else: + ty = type(item) + else: + ty = type(default) + + guessed_type = True + + if isinstance(ty, tuple): + return Tuple(ty) + + if isinstance(ty, ParamType): + return ty + + if ty is str or ty is None: + return STRING + + if ty is int: + return INT + + if ty is float: + return FLOAT + + if ty is bool: + return BOOL + + if guessed_type: + return STRING + + if __debug__: + try: + if issubclass(ty, ParamType): + raise AssertionError( + f"Attempted to use an uninstantiated parameter type ({ty})." + ) + except TypeError: + # ty is an instance (correct), so issubclass fails. + pass + + return FuncParamType(ty) + + +#: A dummy parameter type that just does nothing. From a user's +#: perspective this appears to just be the same as `STRING` but +#: internally no string conversion takes place if the input was bytes. +#: This is usually useful when working with file paths as they can +#: appear in bytes and unicode. +#: +#: For path related uses the :class:`Path` type is a better choice but +#: there are situations where an unprocessed type is useful which is why +#: it is is provided. +#: +#: .. versionadded:: 4.0 +UNPROCESSED = UnprocessedParamType() + +#: A unicode string parameter type which is the implicit default. This +#: can also be selected by using ``str`` as type. +STRING = StringParamType() + +#: An integer parameter. This can also be selected by using ``int`` as +#: type. +INT = IntParamType() + +#: A floating point value parameter. This can also be selected by using +#: ``float`` as type. +FLOAT = FloatParamType() + +#: A boolean parameter. This is the default for boolean flags. This can +#: also be selected by using ``bool`` as a type. +BOOL = BoolParamType() + +#: A UUID parameter. +UUID = UUIDParameterType() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/click/utils.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/utils.py new file mode 100644 index 000000000..d536434f0 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/click/utils.py @@ -0,0 +1,624 @@ +import os +import re +import sys +import typing as t +from functools import update_wrapper +from types import ModuleType +from types import TracebackType + +from ._compat import _default_text_stderr +from ._compat import _default_text_stdout +from ._compat import _find_binary_writer +from ._compat import auto_wrap_for_ansi +from ._compat import binary_streams +from ._compat import open_stream +from ._compat import should_strip_ansi +from ._compat import strip_ansi +from ._compat import text_streams +from ._compat import WIN +from .globals import resolve_color_default + +if t.TYPE_CHECKING: + import typing_extensions as te + + P = te.ParamSpec("P") + +R = t.TypeVar("R") + + +def _posixify(name: str) -> str: + return "-".join(name.split()).lower() + + +def safecall(func: "t.Callable[P, R]") -> "t.Callable[P, t.Optional[R]]": + """Wraps a function so that it swallows exceptions.""" + + def wrapper(*args: "P.args", **kwargs: "P.kwargs") -> t.Optional[R]: + try: + return func(*args, **kwargs) + except Exception: + pass + return None + + return update_wrapper(wrapper, func) + + +def make_str(value: t.Any) -> str: + """Converts a value into a valid string.""" + if isinstance(value, bytes): + try: + return value.decode(sys.getfilesystemencoding()) + except UnicodeError: + return value.decode("utf-8", "replace") + return str(value) + + +def make_default_short_help(help: str, max_length: int = 45) -> str: + """Returns a condensed version of help string.""" + # Consider only the first paragraph. + paragraph_end = help.find("\n\n") + + if paragraph_end != -1: + help = help[:paragraph_end] + + # Collapse newlines, tabs, and spaces. + words = help.split() + + if not words: + return "" + + # The first paragraph started with a "no rewrap" marker, ignore it. + if words[0] == "\b": + words = words[1:] + + total_length = 0 + last_index = len(words) - 1 + + for i, word in enumerate(words): + total_length += len(word) + (i > 0) + + if total_length > max_length: # too long, truncate + break + + if word[-1] == ".": # sentence end, truncate without "..." + return " ".join(words[: i + 1]) + + if total_length == max_length and i != last_index: + break # not at sentence end, truncate with "..." + else: + return " ".join(words) # no truncation needed + + # Account for the length of the suffix. + total_length += len("...") + + # remove words until the length is short enough + while i > 0: + total_length -= len(words[i]) + (i > 0) + + if total_length <= max_length: + break + + i -= 1 + + return " ".join(words[:i]) + "..." + + +class LazyFile: + """A lazy file works like a regular file but it does not fully open + the file but it does perform some basic checks early to see if the + filename parameter does make sense. This is useful for safely opening + files for writing. + """ + + def __init__( + self, + filename: t.Union[str, "os.PathLike[str]"], + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + atomic: bool = False, + ): + self.name: str = os.fspath(filename) + self.mode = mode + self.encoding = encoding + self.errors = errors + self.atomic = atomic + self._f: t.Optional[t.IO[t.Any]] + self.should_close: bool + + if self.name == "-": + self._f, self.should_close = open_stream(filename, mode, encoding, errors) + else: + if "r" in mode: + # Open and close the file in case we're opening it for + # reading so that we can catch at least some errors in + # some cases early. + open(filename, mode).close() + self._f = None + self.should_close = True + + def __getattr__(self, name: str) -> t.Any: + return getattr(self.open(), name) + + def __repr__(self) -> str: + if self._f is not None: + return repr(self._f) + return f"" + + def open(self) -> t.IO[t.Any]: + """Opens the file if it's not yet open. This call might fail with + a :exc:`FileError`. Not handling this error will produce an error + that Click shows. + """ + if self._f is not None: + return self._f + try: + rv, self.should_close = open_stream( + self.name, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + except OSError as e: # noqa: E402 + from .exceptions import FileError + + raise FileError(self.name, hint=e.strerror) from e + self._f = rv + return rv + + def close(self) -> None: + """Closes the underlying file, no matter what.""" + if self._f is not None: + self._f.close() + + def close_intelligently(self) -> None: + """This function only closes the file if it was opened by the lazy + file wrapper. For instance this will never close stdin. + """ + if self.should_close: + self.close() + + def __enter__(self) -> "LazyFile": + return self + + def __exit__( + self, + exc_type: t.Optional[t.Type[BaseException]], + exc_value: t.Optional[BaseException], + tb: t.Optional[TracebackType], + ) -> None: + self.close_intelligently() + + def __iter__(self) -> t.Iterator[t.AnyStr]: + self.open() + return iter(self._f) # type: ignore + + +class KeepOpenFile: + def __init__(self, file: t.IO[t.Any]) -> None: + self._file: t.IO[t.Any] = file + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._file, name) + + def __enter__(self) -> "KeepOpenFile": + return self + + def __exit__( + self, + exc_type: t.Optional[t.Type[BaseException]], + exc_value: t.Optional[BaseException], + tb: t.Optional[TracebackType], + ) -> None: + pass + + def __repr__(self) -> str: + return repr(self._file) + + def __iter__(self) -> t.Iterator[t.AnyStr]: + return iter(self._file) + + +def echo( + message: t.Optional[t.Any] = None, + file: t.Optional[t.IO[t.Any]] = None, + nl: bool = True, + err: bool = False, + color: t.Optional[bool] = None, +) -> None: + """Print a message and newline to stdout or a file. This should be + used instead of :func:`print` because it provides better support + for different data, files, and environments. + + Compared to :func:`print`, this does the following: + + - Ensures that the output encoding is not misconfigured on Linux. + - Supports Unicode in the Windows console. + - Supports writing to binary outputs, and supports writing bytes + to text outputs. + - Supports colors and styles on Windows. + - Removes ANSI color and style codes if the output does not look + like an interactive terminal. + - Always flushes the output. + + :param message: The string or bytes to output. Other objects are + converted to strings. + :param file: The file to write to. Defaults to ``stdout``. + :param err: Write to ``stderr`` instead of ``stdout``. + :param nl: Print a newline after the message. Enabled by default. + :param color: Force showing or hiding colors and other styles. By + default Click will remove color if the output does not look like + an interactive terminal. + + .. versionchanged:: 6.0 + Support Unicode output on the Windows console. Click does not + modify ``sys.stdout``, so ``sys.stdout.write()`` and ``print()`` + will still not support Unicode. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + + .. versionadded:: 3.0 + Added the ``err`` parameter. + + .. versionchanged:: 2.0 + Support colors on Windows if colorama is installed. + """ + if file is None: + if err: + file = _default_text_stderr() + else: + file = _default_text_stdout() + + # There are no standard streams attached to write to. For example, + # pythonw on Windows. + if file is None: + return + + # Convert non bytes/text into the native string type. + if message is not None and not isinstance(message, (str, bytes, bytearray)): + out: t.Optional[t.Union[str, bytes]] = str(message) + else: + out = message + + if nl: + out = out or "" + if isinstance(out, str): + out += "\n" + else: + out += b"\n" + + if not out: + file.flush() + return + + # If there is a message and the value looks like bytes, we manually + # need to find the binary stream and write the message in there. + # This is done separately so that most stream types will work as you + # would expect. Eg: you can write to StringIO for other cases. + if isinstance(out, (bytes, bytearray)): + binary_file = _find_binary_writer(file) + + if binary_file is not None: + file.flush() + binary_file.write(out) + binary_file.flush() + return + + # ANSI style code support. For no message or bytes, nothing happens. + # When outputting to a file instead of a terminal, strip codes. + else: + color = resolve_color_default(color) + + if should_strip_ansi(file, color): + out = strip_ansi(out) + elif WIN: + if auto_wrap_for_ansi is not None: + file = auto_wrap_for_ansi(file) # type: ignore + elif not color: + out = strip_ansi(out) + + file.write(out) # type: ignore + file.flush() + + +def get_binary_stream(name: "te.Literal['stdin', 'stdout', 'stderr']") -> t.BinaryIO: + """Returns a system stream for byte processing. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + """ + opener = binary_streams.get(name) + if opener is None: + raise TypeError(f"Unknown standard stream '{name}'") + return opener() + + +def get_text_stream( + name: "te.Literal['stdin', 'stdout', 'stderr']", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", +) -> t.TextIO: + """Returns a system stream for text processing. This usually returns + a wrapped stream around a binary stream returned from + :func:`get_binary_stream` but it also can take shortcuts for already + correctly configured streams. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + :param encoding: overrides the detected default encoding. + :param errors: overrides the default error mode. + """ + opener = text_streams.get(name) + if opener is None: + raise TypeError(f"Unknown standard stream '{name}'") + return opener(encoding, errors) + + +def open_file( + filename: str, + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + lazy: bool = False, + atomic: bool = False, +) -> t.IO[t.Any]: + """Open a file, with extra behavior to handle ``'-'`` to indicate + a standard stream, lazy open on write, and atomic write. Similar to + the behavior of the :class:`~click.File` param type. + + If ``'-'`` is given to open ``stdout`` or ``stdin``, the stream is + wrapped so that using it in a context manager will not close it. + This makes it possible to use the function without accidentally + closing a standard stream: + + .. code-block:: python + + with open_file(filename) as f: + ... + + :param filename: The name of the file to open, or ``'-'`` for + ``stdin``/``stdout``. + :param mode: The mode in which to open the file. + :param encoding: The encoding to decode or encode a file opened in + text mode. + :param errors: The error handling mode. + :param lazy: Wait to open the file until it is accessed. For read + mode, the file is temporarily opened to raise access errors + early, then closed until it is read again. + :param atomic: Write to a temporary file and replace the given file + on close. + + .. versionadded:: 3.0 + """ + if lazy: + return t.cast( + t.IO[t.Any], LazyFile(filename, mode, encoding, errors, atomic=atomic) + ) + + f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic) + + if not should_close: + f = t.cast(t.IO[t.Any], KeepOpenFile(f)) + + return f + + +def format_filename( + filename: "t.Union[str, bytes, os.PathLike[str], os.PathLike[bytes]]", + shorten: bool = False, +) -> str: + """Format a filename as a string for display. Ensures the filename can be + displayed by replacing any invalid bytes or surrogate escapes in the name + with the replacement character ``�``. + + Invalid bytes or surrogate escapes will raise an error when written to a + stream with ``errors="strict". This will typically happen with ``stdout`` + when the locale is something like ``en_GB.UTF-8``. + + Many scenarios *are* safe to write surrogates though, due to PEP 538 and + PEP 540, including: + + - Writing to ``stderr``, which uses ``errors="backslashreplace"``. + - The system has ``LANG=C.UTF-8``, ``C``, or ``POSIX``. Python opens + stdout and stderr with ``errors="surrogateescape"``. + - None of ``LANG/LC_*`` are set. Python assumes ``LANG=C.UTF-8``. + - Python is started in UTF-8 mode with ``PYTHONUTF8=1`` or ``-X utf8``. + Python opens stdout and stderr with ``errors="surrogateescape"``. + + :param filename: formats a filename for UI display. This will also convert + the filename into unicode without failing. + :param shorten: this optionally shortens the filename to strip of the + path that leads up to it. + """ + if shorten: + filename = os.path.basename(filename) + else: + filename = os.fspath(filename) + + if isinstance(filename, bytes): + filename = filename.decode(sys.getfilesystemencoding(), "replace") + else: + filename = filename.encode("utf-8", "surrogateescape").decode( + "utf-8", "replace" + ) + + return filename + + +def get_app_dir(app_name: str, roaming: bool = True, force_posix: bool = False) -> str: + r"""Returns the config folder for the application. The default behavior + is to return whatever is most appropriate for the operating system. + + To give you an idea, for an app called ``"Foo Bar"``, something like + the following folders could be returned: + + Mac OS X: + ``~/Library/Application Support/Foo Bar`` + Mac OS X (POSIX): + ``~/.foo-bar`` + Unix: + ``~/.config/foo-bar`` + Unix (POSIX): + ``~/.foo-bar`` + Windows (roaming): + ``C:\Users\\AppData\Roaming\Foo Bar`` + Windows (not roaming): + ``C:\Users\\AppData\Local\Foo Bar`` + + .. versionadded:: 2.0 + + :param app_name: the application name. This should be properly capitalized + and can contain whitespace. + :param roaming: controls if the folder should be roaming or not on Windows. + Has no effect otherwise. + :param force_posix: if this is set to `True` then on any POSIX system the + folder will be stored in the home folder with a leading + dot instead of the XDG config home or darwin's + application support folder. + """ + if WIN: + key = "APPDATA" if roaming else "LOCALAPPDATA" + folder = os.environ.get(key) + if folder is None: + folder = os.path.expanduser("~") + return os.path.join(folder, app_name) + if force_posix: + return os.path.join(os.path.expanduser(f"~/.{_posixify(app_name)}")) + if sys.platform == "darwin": + return os.path.join( + os.path.expanduser("~/Library/Application Support"), app_name + ) + return os.path.join( + os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")), + _posixify(app_name), + ) + + +class PacifyFlushWrapper: + """This wrapper is used to catch and suppress BrokenPipeErrors resulting + from ``.flush()`` being called on broken pipe during the shutdown/final-GC + of the Python interpreter. Notably ``.flush()`` is always called on + ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any + other cleanup code, and the case where the underlying file is not a broken + pipe, all calls and attributes are proxied. + """ + + def __init__(self, wrapped: t.IO[t.Any]) -> None: + self.wrapped = wrapped + + def flush(self) -> None: + try: + self.wrapped.flush() + except OSError as e: + import errno + + if e.errno != errno.EPIPE: + raise + + def __getattr__(self, attr: str) -> t.Any: + return getattr(self.wrapped, attr) + + +def _detect_program_name( + path: t.Optional[str] = None, _main: t.Optional[ModuleType] = None +) -> str: + """Determine the command used to run the program, for use in help + text. If a file or entry point was executed, the file name is + returned. If ``python -m`` was used to execute a module or package, + ``python -m name`` is returned. + + This doesn't try to be too precise, the goal is to give a concise + name for help text. Files are only shown as their name without the + path. ``python`` is only shown for modules, and the full path to + ``sys.executable`` is not shown. + + :param path: The Python file being executed. Python puts this in + ``sys.argv[0]``, which is used by default. + :param _main: The ``__main__`` module. This should only be passed + during internal testing. + + .. versionadded:: 8.0 + Based on command args detection in the Werkzeug reloader. + + :meta private: + """ + if _main is None: + _main = sys.modules["__main__"] + + if not path: + path = sys.argv[0] + + # The value of __package__ indicates how Python was called. It may + # not exist if a setuptools script is installed as an egg. It may be + # set incorrectly for entry points created with pip on Windows. + # It is set to "" inside a Shiv or PEX zipapp. + if getattr(_main, "__package__", None) in {None, ""} or ( + os.name == "nt" + and _main.__package__ == "" + and not os.path.exists(path) + and os.path.exists(f"{path}.exe") + ): + # Executed a file, like "python app.py". + return os.path.basename(path) + + # Executed a module, like "python -m example". + # Rewritten by Python from "-m script" to "/path/to/script.py". + # Need to look at main module to determine how it was executed. + py_module = t.cast(str, _main.__package__) + name = os.path.splitext(os.path.basename(path))[0] + + # A submodule like "example.cli". + if name != "__main__": + py_module = f"{py_module}.{name}" + + return f"python -m {py_module.lstrip('.')}" + + +def _expand_args( + args: t.Iterable[str], + *, + user: bool = True, + env: bool = True, + glob_recursive: bool = True, +) -> t.List[str]: + """Simulate Unix shell expansion with Python functions. + + See :func:`glob.glob`, :func:`os.path.expanduser`, and + :func:`os.path.expandvars`. + + This is intended for use on Windows, where the shell does not do any + expansion. It may not exactly match what a Unix shell would do. + + :param args: List of command line arguments to expand. + :param user: Expand user home directory. + :param env: Expand environment variables. + :param glob_recursive: ``**`` matches directories recursively. + + .. versionchanged:: 8.1 + Invalid glob patterns are treated as empty expansions rather + than raising an error. + + .. versionadded:: 8.0 + + :meta private: + """ + from glob import glob + + out = [] + + for arg in args: + if user: + arg = os.path.expanduser(arg) + + if env: + arg = os.path.expandvars(arg) + + try: + matches = glob(arg, recursive=glob_recursive) + except re.error: + matches = [] + + if not matches: + out.append(arg) + else: + out.extend(matches) + + return out diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage-5.5.dist-info/INSTALLER b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage-5.5.dist-info/INSTALLER new file mode 100644 index 000000000..ce3133dc1 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage-5.5.dist-info/INSTALLER @@ -0,0 +1 @@ +Poetry 2.2.1 \ No newline at end of file diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage-5.5.dist-info/LICENSE.txt b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage-5.5.dist-info/LICENSE.txt new file mode 100644 index 000000000..f433b1a53 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage-5.5.dist-info/LICENSE.txt @@ -0,0 +1,177 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage-5.5.dist-info/METADATA b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage-5.5.dist-info/METADATA new file mode 100644 index 000000000..25a6049c4 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage-5.5.dist-info/METADATA @@ -0,0 +1,190 @@ +Metadata-Version: 2.1 +Name: coverage +Version: 5.5 +Summary: Code coverage measurement for Python +Home-page: https://github.com/nedbat/coveragepy +Author: Ned Batchelder and 142 others +Author-email: ned@nedbatchelder.com +License: Apache 2.0 +Project-URL: Documentation, https://coverage.readthedocs.io +Project-URL: Funding, https://tidelift.com/subscription/pkg/pypi-coverage?utm_source=pypi-coverage&utm_medium=referral&utm_campaign=pypi +Project-URL: Issues, https://github.com/nedbat/coveragepy/issues +Keywords: code coverage testing +Platform: UNKNOWN +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Quality Assurance +Classifier: Topic :: Software Development :: Testing +Classifier: Development Status :: 5 - Production/Stable +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4 +Description-Content-Type: text/x-rst +Provides-Extra: toml +Requires-Dist: toml ; extra == 'toml' + +.. Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 +.. For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt + +=========== +Coverage.py +=========== + +Code coverage testing for Python. + +| |license| |versions| |status| +| |test-status| |quality-status| |docs| |codecov| +| |kit| |format| |repos| |downloads| +| |stars| |forks| |contributors| +| |tidelift| |twitter-coveragepy| |twitter-nedbat| + +Coverage.py measures code coverage, typically during test execution. It uses +the code analysis tools and tracing hooks provided in the Python standard +library to determine which lines are executable, and which have been executed. + +Coverage.py runs on many versions of Python: + +* CPython 2.7. +* CPython 3.5 through 3.10 alpha. +* PyPy2 7.3.3 and PyPy3 7.3.3. + +Documentation is on `Read the Docs`_. Code repository and issue tracker are on +`GitHub`_. + +.. _Read the Docs: https://coverage.readthedocs.io/ +.. _GitHub: https://github.com/nedbat/coveragepy + + +**New in 5.x:** SQLite data storage, JSON report, contexts, relative filenames, +dropped support for Python 2.6, 3.3 and 3.4. + + +For Enterprise +-------------- + +.. |tideliftlogo| image:: https://nedbatchelder.com/pix/Tidelift_Logo_small.png + :alt: Tidelift + :target: https://tidelift.com/subscription/pkg/pypi-coverage?utm_source=pypi-coverage&utm_medium=referral&utm_campaign=readme + +.. list-table:: + :widths: 10 100 + + * - |tideliftlogo| + - `Available as part of the Tidelift Subscription. `_ + Coverage and thousands of other packages are working with + Tidelift to deliver one enterprise subscription that covers all of the open + source you use. If you want the flexibility of open source and the confidence + of commercial-grade software, this is for you. + `Learn more. `_ + + +Getting Started +--------------- + +See the `Quick Start section`_ of the docs. + +.. _Quick Start section: https://coverage.readthedocs.io/#quick-start + + +Change history +-------------- + +The complete history of changes is on the `change history page`_. + +.. _change history page: https://coverage.readthedocs.io/en/latest/changes.html + + +Contributing +------------ + +See the `Contributing section`_ of the docs. + +.. _Contributing section: https://coverage.readthedocs.io/en/latest/contributing.html + + +Security +-------- + +To report a security vulnerability, please use the `Tidelift security +contact`_. Tidelift will coordinate the fix and disclosure. + +.. _Tidelift security contact: https://tidelift.com/security + + +License +------- + +Licensed under the `Apache 2.0 License`_. For details, see `NOTICE.txt`_. + +.. _Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 +.. _NOTICE.txt: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt + + +.. |test-status| image:: https://github.com/nedbat/coveragepy/actions/workflows/testsuite.yml/badge.svg?branch=master&event=push + :target: https://github.com/nedbat/coveragepy/actions/workflows/testsuite.yml + :alt: Test suite status +.. |quality-status| image:: https://github.com/nedbat/coveragepy/actions/workflows/quality.yml/badge.svg?branch=master&event=push + :target: https://github.com/nedbat/coveragepy/actions/workflows/quality.yml + :alt: Quality check status +.. |docs| image:: https://readthedocs.org/projects/coverage/badge/?version=latest&style=flat + :target: https://coverage.readthedocs.io/ + :alt: Documentation +.. |reqs| image:: https://requires.io/github/nedbat/coveragepy/requirements.svg?branch=master + :target: https://requires.io/github/nedbat/coveragepy/requirements/?branch=master + :alt: Requirements status +.. |kit| image:: https://badge.fury.io/py/coverage.svg + :target: https://pypi.org/project/coverage/ + :alt: PyPI status +.. |format| image:: https://img.shields.io/pypi/format/coverage.svg + :target: https://pypi.org/project/coverage/ + :alt: Kit format +.. |downloads| image:: https://img.shields.io/pypi/dw/coverage.svg + :target: https://pypi.org/project/coverage/ + :alt: Weekly PyPI downloads +.. |versions| image:: https://img.shields.io/pypi/pyversions/coverage.svg?logo=python&logoColor=FBE072 + :target: https://pypi.org/project/coverage/ + :alt: Python versions supported +.. |status| image:: https://img.shields.io/pypi/status/coverage.svg + :target: https://pypi.org/project/coverage/ + :alt: Package stability +.. |license| image:: https://img.shields.io/pypi/l/coverage.svg + :target: https://pypi.org/project/coverage/ + :alt: License +.. |codecov| image:: https://codecov.io/github/nedbat/coveragepy/coverage.svg?branch=master&precision=2 + :target: https://codecov.io/github/nedbat/coveragepy?branch=master + :alt: Coverage! +.. |repos| image:: https://repology.org/badge/tiny-repos/python:coverage.svg + :target: https://repology.org/metapackage/python:coverage/versions + :alt: Packaging status +.. |tidelift| image:: https://tidelift.com/badges/package/pypi/coverage + :target: https://tidelift.com/subscription/pkg/pypi-coverage?utm_source=pypi-coverage&utm_medium=referral&utm_campaign=readme + :alt: Tidelift +.. |stars| image:: https://img.shields.io/github/stars/nedbat/coveragepy.svg?logo=github + :target: https://github.com/nedbat/coveragepy/stargazers + :alt: Github stars +.. |forks| image:: https://img.shields.io/github/forks/nedbat/coveragepy.svg?logo=github + :target: https://github.com/nedbat/coveragepy/network/members + :alt: Github forks +.. |contributors| image:: https://img.shields.io/github/contributors/nedbat/coveragepy.svg?logo=github + :target: https://github.com/nedbat/coveragepy/graphs/contributors + :alt: Contributors +.. |twitter-coveragepy| image:: https://img.shields.io/twitter/follow/coveragepy.svg?label=coveragepy&style=flat&logo=twitter&logoColor=4FADFF + :target: https://twitter.com/coveragepy + :alt: coverage.py on Twitter +.. |twitter-nedbat| image:: https://img.shields.io/twitter/follow/nedbat.svg?label=nedbat&style=flat&logo=twitter&logoColor=4FADFF + :target: https://twitter.com/nedbat + :alt: nedbat on Twitter + + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage-5.5.dist-info/RECORD b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage-5.5.dist-info/RECORD new file mode 100644 index 000000000..8c97e60de --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage-5.5.dist-info/RECORD @@ -0,0 +1,61 @@ +../../../bin/coverage,sha256=s4uEfX3hOPidQXA9E3zkgCF92t1Hq4nzAip-Wv3QvP0,275 +../../../bin/coverage-3.8,sha256=s4uEfX3hOPidQXA9E3zkgCF92t1Hq4nzAip-Wv3QvP0,275 +../../../bin/coverage3,sha256=s4uEfX3hOPidQXA9E3zkgCF92t1Hq4nzAip-Wv3QvP0,275 +coverage-5.5.dist-info/METADATA,sha256=M3jsErof7Jju0-2_tJMY3jfbYE5kPv_kydK7ehqsghk,7980 +coverage-5.5.dist-info/LICENSE.txt,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174 +coverage-5.5.dist-info/top_level.txt,sha256=BjhyiIvusb5OJkqCXjRncTF3soKF-mDOby-hxkWwwv0,9 +coverage-5.5.dist-info/WHEEL,sha256=3Rn77v1cOIQWUJHbJxwJJ4HycjuBQ6DIO-hbFDX-kLU,111 +coverage-5.5.dist-info/entry_points.txt,sha256=ldb8Seg6KXEyNviat4q5qt4neooZgn7PybptoyVb6YA,123 +coverage/annotate.py,sha256=FCS90-FKxNZ1wUBNmGZvFp0f0fsxL3r9jjgA5CDG8VY,3557 +coverage/disposition.py,sha256=HF2eJULSz9NEMOgN22daJJvgBzJtvEKtsj2gw8ZK26E,1255 +coverage/report.py,sha256=AvFOS1l_kr0qMBpaQTOzV-tgkrt4N2CvMJAtBc0ZpSQ,3222 +coverage/__main__.py,sha256=IOd5fAsdpJd1t8ZyrkGcFk-eqMd3Sdc2qbhNb8YQBW0,257 +coverage/xmlreport.py,sha256=ogkVOk8rLmFmPh5EaKWxSgQQabxjvUhcTLmTjrV-y1A,8700 +coverage/misc.py,sha256=0sFG3MjjvZ213fJhMNx9MQ_QUj8CLKXZtyl1T5p--CA,11004 +coverage/context.py,sha256=aG4mZVyQROCGqlLhCeKYtNErn6Ai2mG3_byN6ZjjBTo,3091 +coverage/debug.py,sha256=5J6qm0OA2RYCofBk-V-88X1RaPr3gniiFdwgew1Vu28,13872 +coverage/bytecode.py,sha256=RgL-pG1AicUD8ksZ6P_swwfzlTJldCrBvr4oZ-G2Qyc,609 +coverage/results.py,sha256=RrUx7AfIdb0nt7pnd6bI3ecYsoQdSHHoqwjIfeH97Ow,12225 +coverage/phystokens.py,sha256=oqJ3UoLIjBhvC_-moJvXRvIqZfukAgurEjoTOs6Ftcw,10045 +coverage/collector.py,sha256=ypqm0HfmDjjWqdO9SzRjgdgR7oMUmrjQmLV-KTIX27s,17852 +coverage/version.py,sha256=vyDttATHaXSb-mcEkuwQ_0LJAARQAiajZTG0RIFgxIo,1260 +coverage/tracer.cpython-38-x86_64-linux-gnu.so,sha256=vqGlQ63qp_BhDQC6r5KY91cs6Ne2hK_7sHtG7mF2JXg,146808 +coverage/env.py,sha256=xa7bUFbCU9oT6MNAdZO3bC_gAIA5T6sKwKOxqr79AII,4693 +coverage/control.py,sha256=gKoj_dM94zQL16QPcCQZVAUUfySyfi9gc3hMmTMdqfM,42379 +coverage/tomlconfig.py,sha256=gW0kJfOLY-aOZjcmXlJ7E74L3urRYyr4IKHXtuqwJDY,5560 +coverage/pytracer.py,sha256=mUq-_vZoQa3epjtw0RYMI_rX7oBygw4GrLnCpKOfsG8,10991 +coverage/sqldata.py,sha256=aK_ku63OM9ZgJD_lUWlT4Xtwbu-9AoZmo5MVrmnW6wI,43865 +coverage/inorout.py,sha256=I2VG99xtlde8jVvVeerNiPIX-GCraUsJJ6cpWq_I36o,19752 +coverage/data.py,sha256=o3IDu0lvd1MZowxOQqyHopQQZWrZAHZfx5m31SKI3ok,4550 +coverage/multiproc.py,sha256=eOYolk_Bp1pGpF7Cgc5NcX3QEsuM23ZkKq1fTw1WkFk,3841 +coverage/config.py,sha256=_fDI5Ch-n6p9LOPIMkOZIP7lethVJEiPD26SZzRNsOg,19766 +coverage/parser.py,sha256=zPVmVqsJ2qM0vyuLP7QM2ITpz9jW_jK4TZa9TZQRncM,49599 +coverage/templite.py,sha256=ZrsnMuc0rlMVYb54KGA8Whp1CMh5XsGeu3_7aW-OkBE,10504 +coverage/files.py,sha256=1ZKYpCNdnjdnj1x_yEjJXQE36dnLMDSrUhDgnyduBvw,14326 +coverage/numbits.py,sha256=kPD3WHU9orL6cl5FB56eF5Dz5QQgYUR0tMadI4RDHd8,5529 +coverage/plugin_support.py,sha256=-MQ_4TSZug4u6ReNqNdb1CnAlBHkJH5h5HO03o4HgdI,9101 +coverage/backward.py,sha256=yehFvzNoQ5-7FZqgPVYS-Dn9UIYm3sHuiTjjO5FPnrs,7545 +coverage/plugin.py,sha256=-RMXO-rY06qwW9FWZBO92q1b1ABGNmyNqeKbIALvnOE,18549 +coverage/jsonreport.py,sha256=PZVkril1YUHeQSc7ASNjWxfEKdnMWZeo22Vy6TNrgw0,3686 +coverage/html.py,sha256=bCf5LNwzlw0lejtZLN5j2HpfEQAQ8Lm2qAPB0S9tRaI,17873 +coverage/cmdline.py,sha256=ebxHbxEyucIcGCgaV1lm_OsgX9h6RUDrCLfyGjSAmME,30741 +coverage/summary.py,sha256=So2Yn2s-k0QlsG1keX-H67Vp5-GS4u4T4F19w-WW1R4,5883 +coverage/execfile.py,sha256=sewNsJ4z9GNJVxscKCTz0e_BQsh6npoj1sHNZWlIf04,13444 +coverage/python.py,sha256=LsqDDXMc1qeVxeez058NGTnlSG3xQiSoSa6De04OgrM,7633 +coverage/__init__.py,sha256=8KDKMbvJG6vWupvHNzzHUIV906S6IPu6-vRdWnZg3fI,1283 +coverage/fullcoverage/encodings.py,sha256=DW3X3yoqrI-Waq3l3lgGdIxLFia4gsrjLR03cr_EWX8,2548 +coverage/htmlfiles/coverage_html.js,sha256=0RGp8VupFTT1fgNo4vL5ReNnq7JJLpUw2PZkm5vTXto,19445 +coverage/htmlfiles/favicon_32.png,sha256=vIEA-odDwRvSQ-syWfSwEnWGUWEv2b-Tv4tzTRfwJWE,1732 +coverage/htmlfiles/jquery.min.js,sha256=JCYrqv7xcJKSfD2v52SqpSoqNxuD7SJJzKfkFN-Z-sE,95785 +coverage/htmlfiles/style.css,sha256=jkvVPWTFwAB4Nkqr3jR8P0jpg81i_Hr2364wDYlGqxM,11692 +coverage/htmlfiles/keybd_closed.png,sha256=FNDmw-Lx9aptnMr-XiZ4gh2EGPs17nft-QKmXgilIe0,112 +coverage/htmlfiles/keybd_open.png,sha256=FNDmw-Lx9aptnMr-XiZ4gh2EGPs17nft-QKmXgilIe0,112 +coverage/htmlfiles/jquery.hotkeys.js,sha256=VVqbRGJlCvARPGMJXcSiRRIJwkpged3wG5fQ47gi42U,3065 +coverage/htmlfiles/style.scss,sha256=SE96hwXj-62zfyAodXbhJrWAPuiMk1nJ8diesKEY2Jg,16281 +coverage/htmlfiles/jquery.isonscreen.js,sha256=WEyXE6yTYNzAYfzViwksNu7AJAY5zZBI9q6-J9pF1Zw,1502 +coverage/htmlfiles/jquery.tablesorter.min.js,sha256=t4ifnz2eByQEUafncoSdJUwD2jUt68VY8CzNjAywo08,12795 +coverage/htmlfiles/jquery.ba-throttle-debounce.min.js,sha256=wXepUsOX1VYr5AyWGbIoAqlrvjQz9unPbtEyM7wFBnw,731 +coverage/htmlfiles/index.html,sha256=h-P_bm8PsAdVIbWd4JxZvapsVShntxV7b6VLwqrQfbM,4254 +coverage/htmlfiles/pyfile.html,sha256=kEAzajng44FAbiltAh13XoKAihtJXvlpIBtn71btdQw,4621 +coverage-5.5.dist-info/INSTALLER,sha256=sz0Tnp99msIbbLB51q7U9nA6AvRKcsOeUKhrHH0Ulg4,12 +coverage-5.5.dist-info/RECORD,, diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage-5.5.dist-info/WHEEL b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage-5.5.dist-info/WHEEL new file mode 100644 index 000000000..3f8f7734c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage-5.5.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.35.1) +Root-Is-Purelib: false +Tag: cp38-cp38-manylinux2010_x86_64 + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage-5.5.dist-info/entry_points.txt b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage-5.5.dist-info/entry_points.txt new file mode 100644 index 000000000..58d31949d --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage-5.5.dist-info/entry_points.txt @@ -0,0 +1,5 @@ +[console_scripts] +coverage = coverage.cmdline:main +coverage-3.8 = coverage.cmdline:main +coverage3 = coverage.cmdline:main + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage-5.5.dist-info/top_level.txt b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage-5.5.dist-info/top_level.txt new file mode 100644 index 000000000..4ebc8aea5 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage-5.5.dist-info/top_level.txt @@ -0,0 +1 @@ +coverage diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/__init__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/__init__.py new file mode 100644 index 000000000..331b304b6 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/__init__.py @@ -0,0 +1,36 @@ +# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 +# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt + +"""Code coverage measurement for Python. + +Ned Batchelder +https://nedbatchelder.com/code/coverage + +""" + +import sys + +from coverage.version import __version__, __url__, version_info + +from coverage.control import Coverage, process_startup +from coverage.data import CoverageData +from coverage.misc import CoverageException +from coverage.plugin import CoveragePlugin, FileTracer, FileReporter +from coverage.pytracer import PyTracer + +# Backward compatibility. +coverage = Coverage + +# On Windows, we encode and decode deep enough that something goes wrong and +# the encodings.utf_8 module is loaded and then unloaded, I don't know why. +# Adding a reference here prevents it from being unloaded. Yuk. +import encodings.utf_8 # pylint: disable=wrong-import-position, wrong-import-order + +# Because of the "from coverage.control import fooey" lines at the top of the +# file, there's an entry for coverage.coverage in sys.modules, mapped to None. +# This makes some inspection tools (like pydoc) unable to find the class +# coverage.coverage. So remove that entry. +try: + del sys.modules['coverage.coverage'] +except KeyError: + pass diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/__main__.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/__main__.py new file mode 100644 index 000000000..79aa4e2b3 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/__main__.py @@ -0,0 +1,8 @@ +# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 +# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt + +"""Coverage.py's main entry point.""" + +import sys +from coverage.cmdline import main +sys.exit(main()) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/annotate.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/annotate.py new file mode 100644 index 000000000..999ab6e55 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/annotate.py @@ -0,0 +1,108 @@ +# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 +# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt + +"""Source file annotation for coverage.py.""" + +import io +import os +import re + +from coverage.files import flat_rootname +from coverage.misc import ensure_dir, isolate_module +from coverage.report import get_analysis_to_report + +os = isolate_module(os) + + +class AnnotateReporter(object): + """Generate annotated source files showing line coverage. + + This reporter creates annotated copies of the measured source files. Each + .py file is copied as a .py,cover file, with a left-hand margin annotating + each line:: + + > def h(x): + - if 0: #pragma: no cover + - pass + > if x == 1: + ! a = 1 + > else: + > a = 2 + + > h(2) + + Executed lines use '>', lines not executed use '!', lines excluded from + consideration use '-'. + + """ + + def __init__(self, coverage): + self.coverage = coverage + self.config = self.coverage.config + self.directory = None + + blank_re = re.compile(r"\s*(#|$)") + else_re = re.compile(r"\s*else\s*:\s*(#|$)") + + def report(self, morfs, directory=None): + """Run the report. + + See `coverage.report()` for arguments. + + """ + self.directory = directory + self.coverage.get_data() + for fr, analysis in get_analysis_to_report(self.coverage, morfs): + self.annotate_file(fr, analysis) + + def annotate_file(self, fr, analysis): + """Annotate a single file. + + `fr` is the FileReporter for the file to annotate. + + """ + statements = sorted(analysis.statements) + missing = sorted(analysis.missing) + excluded = sorted(analysis.excluded) + + if self.directory: + ensure_dir(self.directory) + dest_file = os.path.join(self.directory, flat_rootname(fr.relative_filename())) + if dest_file.endswith("_py"): + dest_file = dest_file[:-3] + ".py" + dest_file += ",cover" + else: + dest_file = fr.filename + ",cover" + + with io.open(dest_file, 'w', encoding='utf8') as dest: + i = 0 + j = 0 + covered = True + source = fr.source() + for lineno, line in enumerate(source.splitlines(True), start=1): + while i < len(statements) and statements[i] < lineno: + i += 1 + while j < len(missing) and missing[j] < lineno: + j += 1 + if i < len(statements) and statements[i] == lineno: + covered = j >= len(missing) or missing[j] > lineno + if self.blank_re.match(line): + dest.write(u' ') + elif self.else_re.match(line): + # Special logic for lines containing only 'else:'. + if i >= len(statements) and j >= len(missing): + dest.write(u'! ') + elif i >= len(statements) or j >= len(missing): + dest.write(u'> ') + elif statements[i] == missing[j]: + dest.write(u'! ') + else: + dest.write(u'> ') + elif lineno in excluded: + dest.write(u'- ') + elif covered: + dest.write(u'> ') + else: + dest.write(u'! ') + + dest.write(line) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/backward.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/backward.py new file mode 100644 index 000000000..ac781ab96 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/backward.py @@ -0,0 +1,267 @@ +# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 +# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt + +"""Add things to old Pythons so I can pretend they are newer.""" + +# This file's purpose is to provide modules to be imported from here. +# pylint: disable=unused-import + +import os +import sys + +from datetime import datetime + +from coverage import env + + +# Pythons 2 and 3 differ on where to get StringIO. +try: + from cStringIO import StringIO +except ImportError: + from io import StringIO + +# In py3, ConfigParser was renamed to the more-standard configparser. +# But there's a py3 backport that installs "configparser" in py2, and I don't +# want it because it has annoying deprecation warnings. So try the real py2 +# import first. +try: + import ConfigParser as configparser +except ImportError: + import configparser + +# What's a string called? +try: + string_class = basestring +except NameError: + string_class = str + +# What's a Unicode string called? +try: + unicode_class = unicode +except NameError: + unicode_class = str + +# range or xrange? +try: + range = xrange # pylint: disable=redefined-builtin +except NameError: + range = range + +try: + from itertools import zip_longest +except ImportError: + from itertools import izip_longest as zip_longest + +# Where do we get the thread id from? +try: + from thread import get_ident as get_thread_id +except ImportError: + from threading import get_ident as get_thread_id + +try: + os.PathLike +except AttributeError: + # This is Python 2 and 3 + path_types = (bytes, string_class, unicode_class) +else: + # 3.6+ + path_types = (bytes, str, os.PathLike) + +# shlex.quote is new, but there's an undocumented implementation in "pipes", +# who knew!? +try: + from shlex import quote as shlex_quote +except ImportError: + # Useful function, available under a different (undocumented) name + # in Python versions earlier than 3.3. + from pipes import quote as shlex_quote + +try: + import reprlib +except ImportError: # pragma: not covered + # We need this on Python 2, but in testing environments, a backport is + # installed, so this import isn't used. + import repr as reprlib + +# A function to iterate listlessly over a dict's items, and one to get the +# items as a list. +try: + {}.iteritems +except AttributeError: + # Python 3 + def iitems(d): + """Produce the items from dict `d`.""" + return d.items() + + def litems(d): + """Return a list of items from dict `d`.""" + return list(d.items()) +else: + # Python 2 + def iitems(d): + """Produce the items from dict `d`.""" + return d.iteritems() + + def litems(d): + """Return a list of items from dict `d`.""" + return d.items() + +# Getting the `next` function from an iterator is different in 2 and 3. +try: + iter([]).next +except AttributeError: + def iternext(seq): + """Get the `next` function for iterating over `seq`.""" + return iter(seq).__next__ +else: + def iternext(seq): + """Get the `next` function for iterating over `seq`.""" + return iter(seq).next + +# Python 3.x is picky about bytes and strings, so provide methods to +# get them right, and make them no-ops in 2.x +if env.PY3: + def to_bytes(s): + """Convert string `s` to bytes.""" + return s.encode('utf8') + + def to_string(b): + """Convert bytes `b` to string.""" + return b.decode('utf8') + + def binary_bytes(byte_values): + """Produce a byte string with the ints from `byte_values`.""" + return bytes(byte_values) + + def byte_to_int(byte): + """Turn a byte indexed from a bytes object into an int.""" + return byte + + def bytes_to_ints(bytes_value): + """Turn a bytes object into a sequence of ints.""" + # In Python 3, iterating bytes gives ints. + return bytes_value + +else: + def to_bytes(s): + """Convert string `s` to bytes (no-op in 2.x).""" + return s + + def to_string(b): + """Convert bytes `b` to string.""" + return b + + def binary_bytes(byte_values): + """Produce a byte string with the ints from `byte_values`.""" + return "".join(chr(b) for b in byte_values) + + def byte_to_int(byte): + """Turn a byte indexed from a bytes object into an int.""" + return ord(byte) + + def bytes_to_ints(bytes_value): + """Turn a bytes object into a sequence of ints.""" + for byte in bytes_value: + yield ord(byte) + + +try: + # In Python 2.x, the builtins were in __builtin__ + BUILTINS = sys.modules['__builtin__'] +except KeyError: + # In Python 3.x, they're in builtins + BUILTINS = sys.modules['builtins'] + + +# imp was deprecated in Python 3.3 +try: + import importlib + import importlib.util + imp = None +except ImportError: + importlib = None + +# We only want to use importlib if it has everything we need. +try: + importlib_util_find_spec = importlib.util.find_spec +except Exception: + import imp + importlib_util_find_spec = None + +# What is the .pyc magic number for this version of Python? +try: + PYC_MAGIC_NUMBER = importlib.util.MAGIC_NUMBER +except AttributeError: + PYC_MAGIC_NUMBER = imp.get_magic() + + +def code_object(fn): + """Get the code object from a function.""" + try: + return fn.func_code + except AttributeError: + return fn.__code__ + + +try: + from types import SimpleNamespace +except ImportError: + # The code from https://docs.python.org/3/library/types.html#types.SimpleNamespace + class SimpleNamespace: + """Python implementation of SimpleNamespace, for Python 2.""" + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + + def __repr__(self): + keys = sorted(self.__dict__) + items = ("{}={!r}".format(k, self.__dict__[k]) for k in keys) + return "{}({})".format(type(self).__name__, ", ".join(items)) + + +def format_local_datetime(dt): + """Return a string with local timezone representing the date. + If python version is lower than 3.6, the time zone is not included. + """ + try: + return dt.astimezone().strftime('%Y-%m-%d %H:%M %z') + except (TypeError, ValueError): + # Datetime.astimezone in Python 3.5 can not handle naive datetime + return dt.strftime('%Y-%m-%d %H:%M') + + +def invalidate_import_caches(): + """Invalidate any import caches that may or may not exist.""" + if importlib and hasattr(importlib, "invalidate_caches"): + importlib.invalidate_caches() + + +def import_local_file(modname, modfile=None): + """Import a local file as a module. + + Opens a file in the current directory named `modname`.py, imports it + as `modname`, and returns the module object. `modfile` is the file to + import if it isn't in the current directory. + + """ + try: + import importlib.util as importlib_util + except ImportError: + importlib_util = None + + if modfile is None: + modfile = modname + '.py' + if importlib_util: + spec = importlib_util.spec_from_file_location(modname, modfile) + mod = importlib_util.module_from_spec(spec) + sys.modules[modname] = mod + spec.loader.exec_module(mod) + else: + for suff in imp.get_suffixes(): # pragma: part covered + if suff[0] == '.py': + break + + with open(modfile, 'r') as f: + # pylint: disable=undefined-loop-variable + mod = imp.load_module(modname, f, modfile, suff) + + return mod diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/bytecode.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/bytecode.py new file mode 100644 index 000000000..ceb18cf37 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/bytecode.py @@ -0,0 +1,19 @@ +# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 +# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt + +"""Bytecode manipulation for coverage.py""" + +import types + + +def code_objects(code): + """Iterate over all the code objects in `code`.""" + stack = [code] + while stack: + # We're going to return the code object on the stack, but first + # push its children for later returning. + code = stack.pop() + for c in code.co_consts: + if isinstance(c, types.CodeType): + stack.append(c) + yield code diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/cmdline.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/cmdline.py new file mode 100644 index 000000000..0be0cca19 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/cmdline.py @@ -0,0 +1,910 @@ +# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 +# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt + +"""Command-line support for coverage.py.""" + +from __future__ import print_function + +import glob +import optparse +import os.path +import shlex +import sys +import textwrap +import traceback + +import coverage +from coverage import Coverage +from coverage import env +from coverage.collector import CTracer +from coverage.data import line_counts +from coverage.debug import info_formatter, info_header, short_stack +from coverage.execfile import PyRunner +from coverage.misc import BaseCoverageException, ExceptionDuringRun, NoSource, output_encoding +from coverage.results import should_fail_under + + +class Opts(object): + """A namespace class for individual options we'll build parsers from.""" + + append = optparse.make_option( + '-a', '--append', action='store_true', + help="Append coverage data to .coverage, otherwise it starts clean each time.", + ) + keep = optparse.make_option( + '', '--keep', action='store_true', + help="Keep original coverage files, otherwise they are deleted.", + ) + branch = optparse.make_option( + '', '--branch', action='store_true', + help="Measure branch coverage in addition to statement coverage.", + ) + CONCURRENCY_CHOICES = [ + "thread", "gevent", "greenlet", "eventlet", "multiprocessing", + ] + concurrency = optparse.make_option( + '', '--concurrency', action='store', metavar="LIB", + choices=CONCURRENCY_CHOICES, + help=( + "Properly measure code using a concurrency library. " + "Valid values are: %s." + ) % ", ".join(CONCURRENCY_CHOICES), + ) + context = optparse.make_option( + '', '--context', action='store', metavar="LABEL", + help="The context label to record for this coverage run.", + ) + debug = optparse.make_option( + '', '--debug', action='store', metavar="OPTS", + help="Debug options, separated by commas. [env: COVERAGE_DEBUG]", + ) + directory = optparse.make_option( + '-d', '--directory', action='store', metavar="DIR", + help="Write the output files to DIR.", + ) + fail_under = optparse.make_option( + '', '--fail-under', action='store', metavar="MIN", type="float", + help="Exit with a status of 2 if the total coverage is less than MIN.", + ) + help = optparse.make_option( + '-h', '--help', action='store_true', + help="Get help on this command.", + ) + ignore_errors = optparse.make_option( + '-i', '--ignore-errors', action='store_true', + help="Ignore errors while reading source files.", + ) + include = optparse.make_option( + '', '--include', action='store', + metavar="PAT1,PAT2,...", + help=( + "Include only files whose paths match one of these patterns. " + "Accepts shell-style wildcards, which must be quoted." + ), + ) + pylib = optparse.make_option( + '-L', '--pylib', action='store_true', + help=( + "Measure coverage even inside the Python installed library, " + "which isn't done by default." + ), + ) + sort = optparse.make_option( + '--sort', action='store', metavar='COLUMN', + help="Sort the report by the named column: name, stmts, miss, branch, brpart, or cover. " + "Default is name." + ) + show_missing = optparse.make_option( + '-m', '--show-missing', action='store_true', + help="Show line numbers of statements in each module that weren't executed.", + ) + skip_covered = optparse.make_option( + '--skip-covered', action='store_true', + help="Skip files with 100% coverage.", + ) + no_skip_covered = optparse.make_option( + '--no-skip-covered', action='store_false', dest='skip_covered', + help="Disable --skip-covered.", + ) + skip_empty = optparse.make_option( + '--skip-empty', action='store_true', + help="Skip files with no code.", + ) + show_contexts = optparse.make_option( + '--show-contexts', action='store_true', + help="Show contexts for covered lines.", + ) + omit = optparse.make_option( + '', '--omit', action='store', + metavar="PAT1,PAT2,...", + help=( + "Omit files whose paths match one of these patterns. " + "Accepts shell-style wildcards, which must be quoted." + ), + ) + contexts = optparse.make_option( + '', '--contexts', action='store', + metavar="REGEX1,REGEX2,...", + help=( + "Only display data from lines covered in the given contexts. " + "Accepts Python regexes, which must be quoted." + ), + ) + output_xml = optparse.make_option( + '-o', '', action='store', dest="outfile", + metavar="OUTFILE", + help="Write the XML report to this file. Defaults to 'coverage.xml'", + ) + output_json = optparse.make_option( + '-o', '', action='store', dest="outfile", + metavar="OUTFILE", + help="Write the JSON report to this file. Defaults to 'coverage.json'", + ) + json_pretty_print = optparse.make_option( + '', '--pretty-print', action='store_true', + help="Format the JSON for human readers.", + ) + parallel_mode = optparse.make_option( + '-p', '--parallel-mode', action='store_true', + help=( + "Append the machine name, process id and random number to the " + ".coverage data file name to simplify collecting data from " + "many processes." + ), + ) + module = optparse.make_option( + '-m', '--module', action='store_true', + help=( + " is an importable Python module, not a script path, " + "to be run as 'python -m' would run it." + ), + ) + precision = optparse.make_option( + '', '--precision', action='store', metavar='N', type=int, + help=( + "Number of digits after the decimal point to display for " + "reported coverage percentages." + ), + ) + rcfile = optparse.make_option( + '', '--rcfile', action='store', + help=( + "Specify configuration file. " + "By default '.coveragerc', 'setup.cfg', 'tox.ini', and " + "'pyproject.toml' are tried. [env: COVERAGE_RCFILE]" + ), + ) + source = optparse.make_option( + '', '--source', action='store', metavar="SRC1,SRC2,...", + help="A list of packages or directories of code to be measured.", + ) + timid = optparse.make_option( + '', '--timid', action='store_true', + help=( + "Use a simpler but slower trace method. Try this if you get " + "seemingly impossible results!" + ), + ) + title = optparse.make_option( + '', '--title', action='store', metavar="TITLE", + help="A text string to use as the title on the HTML.", + ) + version = optparse.make_option( + '', '--version', action='store_true', + help="Display version information and exit.", + ) + + +class CoverageOptionParser(optparse.OptionParser, object): + """Base OptionParser for coverage.py. + + Problems don't exit the program. + Defaults are initialized for all options. + + """ + + def __init__(self, *args, **kwargs): + super(CoverageOptionParser, self).__init__( + add_help_option=False, *args, **kwargs + ) + self.set_defaults( + action=None, + append=None, + branch=None, + concurrency=None, + context=None, + debug=None, + directory=None, + fail_under=None, + help=None, + ignore_errors=None, + include=None, + keep=None, + module=None, + omit=None, + contexts=None, + parallel_mode=None, + precision=None, + pylib=None, + rcfile=True, + show_missing=None, + skip_covered=None, + skip_empty=None, + show_contexts=None, + sort=None, + source=None, + timid=None, + title=None, + version=None, + ) + + self.disable_interspersed_args() + + class OptionParserError(Exception): + """Used to stop the optparse error handler ending the process.""" + pass + + def parse_args_ok(self, args=None, options=None): + """Call optparse.parse_args, but return a triple: + + (ok, options, args) + + """ + try: + options, args = super(CoverageOptionParser, self).parse_args(args, options) + except self.OptionParserError: + return False, None, None + return True, options, args + + def error(self, msg): + """Override optparse.error so sys.exit doesn't get called.""" + show_help(msg) + raise self.OptionParserError + + +class GlobalOptionParser(CoverageOptionParser): + """Command-line parser for coverage.py global option arguments.""" + + def __init__(self): + super(GlobalOptionParser, self).__init__() + + self.add_options([ + Opts.help, + Opts.version, + ]) + + +class CmdOptionParser(CoverageOptionParser): + """Parse one of the new-style commands for coverage.py.""" + + def __init__(self, action, options, defaults=None, usage=None, description=None): + """Create an OptionParser for a coverage.py command. + + `action` is the slug to put into `options.action`. + `options` is a list of Option's for the command. + `defaults` is a dict of default value for options. + `usage` is the usage string to display in help. + `description` is the description of the command, for the help text. + + """ + if usage: + usage = "%prog " + usage + super(CmdOptionParser, self).__init__( + usage=usage, + description=description, + ) + self.set_defaults(action=action, **(defaults or {})) + self.add_options(options) + self.cmd = action + + def __eq__(self, other): + # A convenience equality, so that I can put strings in unit test + # results, and they will compare equal to objects. + return (other == "" % self.cmd) + + __hash__ = None # This object doesn't need to be hashed. + + def get_prog_name(self): + """Override of an undocumented function in optparse.OptionParser.""" + program_name = super(CmdOptionParser, self).get_prog_name() + + # Include the sub-command for this parser as part of the command. + return "{command} {subcommand}".format(command=program_name, subcommand=self.cmd) + + +GLOBAL_ARGS = [ + Opts.debug, + Opts.help, + Opts.rcfile, + ] + +CMDS = { + 'annotate': CmdOptionParser( + "annotate", + [ + Opts.directory, + Opts.ignore_errors, + Opts.include, + Opts.omit, + ] + GLOBAL_ARGS, + usage="[options] [modules]", + description=( + "Make annotated copies of the given files, marking statements that are executed " + "with > and statements that are missed with !." + ), + ), + + 'combine': CmdOptionParser( + "combine", + [ + Opts.append, + Opts.keep, + ] + GLOBAL_ARGS, + usage="[options] ... ", + description=( + "Combine data from multiple coverage files collected " + "with 'run -p'. The combined results are written to a single " + "file representing the union of the data. The positional " + "arguments are data files or directories containing data files. " + "If no paths are provided, data files in the default data file's " + "directory are combined." + ), + ), + + 'debug': CmdOptionParser( + "debug", GLOBAL_ARGS, + usage="", + description=( + "Display information about the internals of coverage.py, " + "for diagnosing problems. " + "Topics are: " + "'data' to show a summary of the collected data; " + "'sys' to show installation information; " + "'config' to show the configuration; " + "'premain' to show what is calling coverage." + ), + ), + + 'erase': CmdOptionParser( + "erase", GLOBAL_ARGS, + description="Erase previously collected coverage data.", + ), + + 'help': CmdOptionParser( + "help", GLOBAL_ARGS, + usage="[command]", + description="Describe how to use coverage.py", + ), + + 'html': CmdOptionParser( + "html", + [ + Opts.contexts, + Opts.directory, + Opts.fail_under, + Opts.ignore_errors, + Opts.include, + Opts.omit, + Opts.precision, + Opts.show_contexts, + Opts.skip_covered, + Opts.no_skip_covered, + Opts.skip_empty, + Opts.title, + ] + GLOBAL_ARGS, + usage="[options] [modules]", + description=( + "Create an HTML report of the coverage of the files. " + "Each file gets its own page, with the source decorated to show " + "executed, excluded, and missed lines." + ), + ), + + 'json': CmdOptionParser( + "json", + [ + Opts.contexts, + Opts.fail_under, + Opts.ignore_errors, + Opts.include, + Opts.omit, + Opts.output_json, + Opts.json_pretty_print, + Opts.show_contexts, + ] + GLOBAL_ARGS, + usage="[options] [modules]", + description="Generate a JSON report of coverage results." + ), + + 'report': CmdOptionParser( + "report", + [ + Opts.contexts, + Opts.fail_under, + Opts.ignore_errors, + Opts.include, + Opts.omit, + Opts.precision, + Opts.sort, + Opts.show_missing, + Opts.skip_covered, + Opts.no_skip_covered, + Opts.skip_empty, + ] + GLOBAL_ARGS, + usage="[options] [modules]", + description="Report coverage statistics on modules." + ), + + 'run': CmdOptionParser( + "run", + [ + Opts.append, + Opts.branch, + Opts.concurrency, + Opts.context, + Opts.include, + Opts.module, + Opts.omit, + Opts.pylib, + Opts.parallel_mode, + Opts.source, + Opts.timid, + ] + GLOBAL_ARGS, + usage="[options] [program options]", + description="Run a Python program, measuring code execution." + ), + + 'xml': CmdOptionParser( + "xml", + [ + Opts.fail_under, + Opts.ignore_errors, + Opts.include, + Opts.omit, + Opts.output_xml, + Opts.skip_empty, + ] + GLOBAL_ARGS, + usage="[options] [modules]", + description="Generate an XML report of coverage results." + ), +} + + +def show_help(error=None, topic=None, parser=None): + """Display an error message, or the named topic.""" + assert error or topic or parser + + program_path = sys.argv[0] + if program_path.endswith(os.path.sep + '__main__.py'): + # The path is the main module of a package; get that path instead. + program_path = os.path.dirname(program_path) + program_name = os.path.basename(program_path) + if env.WINDOWS: + # entry_points={'console_scripts':...} on Windows makes files + # called coverage.exe, coverage3.exe, and coverage-3.5.exe. These + # invoke coverage-script.py, coverage3-script.py, and + # coverage-3.5-script.py. argv[0] is the .py file, but we want to + # get back to the original form. + auto_suffix = "-script.py" + if program_name.endswith(auto_suffix): + program_name = program_name[:-len(auto_suffix)] + + help_params = dict(coverage.__dict__) + help_params['program_name'] = program_name + if CTracer is not None: + help_params['extension_modifier'] = 'with C extension' + else: + help_params['extension_modifier'] = 'without C extension' + + if error: + print(error, file=sys.stderr) + print("Use '%s help' for help." % (program_name,), file=sys.stderr) + elif parser: + print(parser.format_help().strip()) + print() + else: + help_msg = textwrap.dedent(HELP_TOPICS.get(topic, '')).strip() + if help_msg: + print(help_msg.format(**help_params)) + else: + print("Don't know topic %r" % topic) + print("Full documentation is at {__url__}".format(**help_params)) + + +OK, ERR, FAIL_UNDER = 0, 1, 2 + + +class CoverageScript(object): + """The command-line interface to coverage.py.""" + + def __init__(self): + self.global_option = False + self.coverage = None + + def command_line(self, argv): + """The bulk of the command line interface to coverage.py. + + `argv` is the argument list to process. + + Returns 0 if all is well, 1 if something went wrong. + + """ + # Collect the command-line options. + if not argv: + show_help(topic='minimum_help') + return OK + + # The command syntax we parse depends on the first argument. Global + # switch syntax always starts with an option. + self.global_option = argv[0].startswith('-') + if self.global_option: + parser = GlobalOptionParser() + else: + parser = CMDS.get(argv[0]) + if not parser: + show_help("Unknown command: '%s'" % argv[0]) + return ERR + argv = argv[1:] + + ok, options, args = parser.parse_args_ok(argv) + if not ok: + return ERR + + # Handle help and version. + if self.do_help(options, args, parser): + return OK + + # Listify the list options. + source = unshell_list(options.source) + omit = unshell_list(options.omit) + include = unshell_list(options.include) + debug = unshell_list(options.debug) + contexts = unshell_list(options.contexts) + + # Do something. + self.coverage = Coverage( + data_suffix=options.parallel_mode, + cover_pylib=options.pylib, + timid=options.timid, + branch=options.branch, + config_file=options.rcfile, + source=source, + omit=omit, + include=include, + debug=debug, + concurrency=options.concurrency, + check_preimported=True, + context=options.context, + ) + + if options.action == "debug": + return self.do_debug(args) + + elif options.action == "erase": + self.coverage.erase() + return OK + + elif options.action == "run": + return self.do_run(options, args) + + elif options.action == "combine": + if options.append: + self.coverage.load() + data_dirs = args or None + self.coverage.combine(data_dirs, strict=True, keep=bool(options.keep)) + self.coverage.save() + return OK + + # Remaining actions are reporting, with some common options. + report_args = dict( + morfs=unglob_args(args), + ignore_errors=options.ignore_errors, + omit=omit, + include=include, + contexts=contexts, + ) + + # We need to be able to import from the current directory, because + # plugins may try to, for example, to read Django settings. + sys.path.insert(0, '') + + self.coverage.load() + + total = None + if options.action == "report": + total = self.coverage.report( + show_missing=options.show_missing, + skip_covered=options.skip_covered, + skip_empty=options.skip_empty, + precision=options.precision, + sort=options.sort, + **report_args + ) + elif options.action == "annotate": + self.coverage.annotate(directory=options.directory, **report_args) + elif options.action == "html": + total = self.coverage.html_report( + directory=options.directory, + title=options.title, + skip_covered=options.skip_covered, + skip_empty=options.skip_empty, + show_contexts=options.show_contexts, + precision=options.precision, + **report_args + ) + elif options.action == "xml": + outfile = options.outfile + total = self.coverage.xml_report( + outfile=outfile, skip_empty=options.skip_empty, + **report_args + ) + elif options.action == "json": + outfile = options.outfile + total = self.coverage.json_report( + outfile=outfile, + pretty_print=options.pretty_print, + show_contexts=options.show_contexts, + **report_args + ) + + if total is not None: + # Apply the command line fail-under options, and then use the config + # value, so we can get fail_under from the config file. + if options.fail_under is not None: + self.coverage.set_option("report:fail_under", options.fail_under) + + fail_under = self.coverage.get_option("report:fail_under") + precision = self.coverage.get_option("report:precision") + if should_fail_under(total, fail_under, precision): + msg = "total of {total:.{p}f} is less than fail-under={fail_under:.{p}f}".format( + total=total, fail_under=fail_under, p=precision, + ) + print("Coverage failure:", msg) + return FAIL_UNDER + + return OK + + def do_help(self, options, args, parser): + """Deal with help requests. + + Return True if it handled the request, False if not. + + """ + # Handle help. + if options.help: + if self.global_option: + show_help(topic='help') + else: + show_help(parser=parser) + return True + + if options.action == "help": + if args: + for a in args: + parser = CMDS.get(a) + if parser: + show_help(parser=parser) + else: + show_help(topic=a) + else: + show_help(topic='help') + return True + + # Handle version. + if options.version: + show_help(topic='version') + return True + + return False + + def do_run(self, options, args): + """Implementation of 'coverage run'.""" + + if not args: + if options.module: + # Specified -m with nothing else. + show_help("No module specified for -m") + return ERR + command_line = self.coverage.get_option("run:command_line") + if command_line is not None: + args = shlex.split(command_line) + if args and args[0] == "-m": + options.module = True + args = args[1:] + if not args: + show_help("Nothing to do.") + return ERR + + if options.append and self.coverage.get_option("run:parallel"): + show_help("Can't append to data files in parallel mode.") + return ERR + + if options.concurrency == "multiprocessing": + # Can't set other run-affecting command line options with + # multiprocessing. + for opt_name in ['branch', 'include', 'omit', 'pylib', 'source', 'timid']: + # As it happens, all of these options have no default, meaning + # they will be None if they have not been specified. + if getattr(options, opt_name) is not None: + show_help( + "Options affecting multiprocessing must only be specified " + "in a configuration file.\n" + "Remove --{} from the command line.".format(opt_name) + ) + return ERR + + runner = PyRunner(args, as_module=bool(options.module)) + runner.prepare() + + if options.append: + self.coverage.load() + + # Run the script. + self.coverage.start() + code_ran = True + try: + runner.run() + except NoSource: + code_ran = False + raise + finally: + self.coverage.stop() + if code_ran: + self.coverage.save() + + return OK + + def do_debug(self, args): + """Implementation of 'coverage debug'.""" + + if not args: + show_help("What information would you like: config, data, sys, premain?") + return ERR + + for info in args: + if info == 'sys': + sys_info = self.coverage.sys_info() + print(info_header("sys")) + for line in info_formatter(sys_info): + print(" %s" % line) + elif info == 'data': + self.coverage.load() + data = self.coverage.get_data() + print(info_header("data")) + print("path: %s" % data.data_filename()) + if data: + print("has_arcs: %r" % data.has_arcs()) + summary = line_counts(data, fullpath=True) + filenames = sorted(summary.keys()) + print("\n%d files:" % len(filenames)) + for f in filenames: + line = "%s: %d lines" % (f, summary[f]) + plugin = data.file_tracer(f) + if plugin: + line += " [%s]" % plugin + print(line) + else: + print("No data collected") + elif info == 'config': + print(info_header("config")) + config_info = self.coverage.config.__dict__.items() + for line in info_formatter(config_info): + print(" %s" % line) + elif info == "premain": + print(info_header("premain")) + print(short_stack()) + else: + show_help("Don't know what you mean by %r" % info) + return ERR + + return OK + + +def unshell_list(s): + """Turn a command-line argument into a list.""" + if not s: + return None + if env.WINDOWS: + # When running coverage.py as coverage.exe, some of the behavior + # of the shell is emulated: wildcards are expanded into a list of + # file names. So you have to single-quote patterns on the command + # line, but (not) helpfully, the single quotes are included in the + # argument, so we have to strip them off here. + s = s.strip("'") + return s.split(',') + + +def unglob_args(args): + """Interpret shell wildcards for platforms that need it.""" + if env.WINDOWS: + globbed = [] + for arg in args: + if '?' in arg or '*' in arg: + globbed.extend(glob.glob(arg)) + else: + globbed.append(arg) + args = globbed + return args + + +HELP_TOPICS = { + 'help': """\ + Coverage.py, version {__version__} {extension_modifier} + Measure, collect, and report on code coverage in Python programs. + + usage: {program_name} [options] [args] + + Commands: + annotate Annotate source files with execution information. + combine Combine a number of data files. + debug Display information about the internals of coverage.py + erase Erase previously collected coverage data. + help Get help on using coverage.py. + html Create an HTML report. + json Create a JSON report of coverage results. + report Report coverage stats on modules. + run Run a Python program and measure code execution. + xml Create an XML report of coverage results. + + Use "{program_name} help " for detailed help on any command. + """, + + 'minimum_help': """\ + Code coverage for Python, version {__version__} {extension_modifier}. Use '{program_name} help' for help. + """, + + 'version': """\ + Coverage.py, version {__version__} {extension_modifier} + """, +} + + +def main(argv=None): + """The main entry point to coverage.py. + + This is installed as the script entry point. + + """ + if argv is None: + argv = sys.argv[1:] + try: + status = CoverageScript().command_line(argv) + except ExceptionDuringRun as err: + # An exception was caught while running the product code. The + # sys.exc_info() return tuple is packed into an ExceptionDuringRun + # exception. + traceback.print_exception(*err.args) # pylint: disable=no-value-for-parameter + status = ERR + except BaseCoverageException as err: + # A controlled error inside coverage.py: print the message to the user. + msg = err.args[0] + if env.PY2: + msg = msg.encode(output_encoding()) + print(msg) + status = ERR + except SystemExit as err: + # The user called `sys.exit()`. Exit with their argument, if any. + if err.args: + status = err.args[0] + else: + status = None + return status + +# Profiling using ox_profile. Install it from GitHub: +# pip install git+https://github.com/emin63/ox_profile.git +# +# $set_env.py: COVERAGE_PROFILE - Set to use ox_profile. +_profile = os.environ.get("COVERAGE_PROFILE", "") +if _profile: # pragma: debugging + from ox_profile.core.launchers import SimpleLauncher # pylint: disable=import-error + original_main = main + + def main(argv=None): # pylint: disable=function-redefined + """A wrapper around main that profiles.""" + profiler = SimpleLauncher.launch() + try: + return original_main(argv) + finally: + data, _ = profiler.query(re_filter='coverage', max_records=100) + print(profiler.show(query=data, limit=100, sep='', col='')) + profiler.cancel() diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/collector.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/collector.py new file mode 100644 index 000000000..a4f1790dd --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/collector.py @@ -0,0 +1,451 @@ +# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 +# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt + +"""Raw data collector for coverage.py.""" + +import os +import sys + +from coverage import env +from coverage.backward import litems, range # pylint: disable=redefined-builtin +from coverage.debug import short_stack +from coverage.disposition import FileDisposition +from coverage.misc import CoverageException, isolate_module +from coverage.pytracer import PyTracer + +os = isolate_module(os) + + +try: + # Use the C extension code when we can, for speed. + from coverage.tracer import CTracer, CFileDisposition +except ImportError: + # Couldn't import the C extension, maybe it isn't built. + if os.getenv('COVERAGE_TEST_TRACER') == 'c': + # During testing, we use the COVERAGE_TEST_TRACER environment variable + # to indicate that we've fiddled with the environment to test this + # fallback code. If we thought we had a C tracer, but couldn't import + # it, then exit quickly and clearly instead of dribbling confusing + # errors. I'm using sys.exit here instead of an exception because an + # exception here causes all sorts of other noise in unittest. + sys.stderr.write("*** COVERAGE_TEST_TRACER is 'c' but can't import CTracer!\n") + sys.exit(1) + CTracer = None + + +class Collector(object): + """Collects trace data. + + Creates a Tracer object for each thread, since they track stack + information. Each Tracer points to the same shared data, contributing + traced data points. + + When the Collector is started, it creates a Tracer for the current thread, + and installs a function to create Tracers for each new thread started. + When the Collector is stopped, all active Tracers are stopped. + + Threads started while the Collector is stopped will never have Tracers + associated with them. + + """ + + # The stack of active Collectors. Collectors are added here when started, + # and popped when stopped. Collectors on the stack are paused when not + # the top, and resumed when they become the top again. + _collectors = [] + + # The concurrency settings we support here. + SUPPORTED_CONCURRENCIES = {"greenlet", "eventlet", "gevent", "thread"} + + def __init__( + self, should_trace, check_include, should_start_context, file_mapper, + timid, branch, warn, concurrency, + ): + """Create a collector. + + `should_trace` is a function, taking a file name and a frame, and + returning a `coverage.FileDisposition object`. + + `check_include` is a function taking a file name and a frame. It returns + a boolean: True if the file should be traced, False if not. + + `should_start_context` is a function taking a frame, and returning a + string. If the frame should be the start of a new context, the string + is the new context. If the frame should not be the start of a new + context, return None. + + `file_mapper` is a function taking a filename, and returning a Unicode + filename. The result is the name that will be recorded in the data + file. + + If `timid` is true, then a slower simpler trace function will be + used. This is important for some environments where manipulation of + tracing functions make the faster more sophisticated trace function not + operate properly. + + If `branch` is true, then branches will be measured. This involves + collecting data on which statements followed each other (arcs). Use + `get_arc_data` to get the arc data. + + `warn` is a warning function, taking a single string message argument + and an optional slug argument which will be a string or None, to be + used if a warning needs to be issued. + + `concurrency` is a list of strings indicating the concurrency libraries + in use. Valid values are "greenlet", "eventlet", "gevent", or "thread" + (the default). Of these four values, only one can be supplied. Other + values are ignored. + + """ + self.should_trace = should_trace + self.check_include = check_include + self.should_start_context = should_start_context + self.file_mapper = file_mapper + self.warn = warn + self.branch = branch + self.threading = None + self.covdata = None + + self.static_context = None + + self.origin = short_stack() + + self.concur_id_func = None + self.mapped_file_cache = {} + + # We can handle a few concurrency options here, but only one at a time. + these_concurrencies = self.SUPPORTED_CONCURRENCIES.intersection(concurrency) + if len(these_concurrencies) > 1: + raise CoverageException("Conflicting concurrency settings: %s" % concurrency) + self.concurrency = these_concurrencies.pop() if these_concurrencies else '' + + try: + if self.concurrency == "greenlet": + import greenlet + self.concur_id_func = greenlet.getcurrent + elif self.concurrency == "eventlet": + import eventlet.greenthread # pylint: disable=import-error,useless-suppression + self.concur_id_func = eventlet.greenthread.getcurrent + elif self.concurrency == "gevent": + import gevent # pylint: disable=import-error,useless-suppression + self.concur_id_func = gevent.getcurrent + elif self.concurrency == "thread" or not self.concurrency: + # It's important to import threading only if we need it. If + # it's imported early, and the program being measured uses + # gevent, then gevent's monkey-patching won't work properly. + import threading + self.threading = threading + else: + raise CoverageException("Don't understand concurrency=%s" % concurrency) + except ImportError: + raise CoverageException( + "Couldn't trace with concurrency=%s, the module isn't installed." % ( + self.concurrency, + ) + ) + + self.reset() + + if timid: + # Being timid: use the simple Python trace function. + self._trace_class = PyTracer + else: + # Being fast: use the C Tracer if it is available, else the Python + # trace function. + self._trace_class = CTracer or PyTracer + + if self._trace_class is CTracer: + self.file_disposition_class = CFileDisposition + self.supports_plugins = True + else: + self.file_disposition_class = FileDisposition + self.supports_plugins = False + + def __repr__(self): + return "" % (id(self), self.tracer_name()) + + def use_data(self, covdata, context): + """Use `covdata` for recording data.""" + self.covdata = covdata + self.static_context = context + self.covdata.set_context(self.static_context) + + def tracer_name(self): + """Return the class name of the tracer we're using.""" + return self._trace_class.__name__ + + def _clear_data(self): + """Clear out existing data, but stay ready for more collection.""" + # We used to used self.data.clear(), but that would remove filename + # keys and data values that were still in use higher up the stack + # when we are called as part of switch_context. + for d in self.data.values(): + d.clear() + + for tracer in self.tracers: + tracer.reset_activity() + + def reset(self): + """Clear collected data, and prepare to collect more.""" + # A dictionary mapping file names to dicts with line number keys (if not + # branch coverage), or mapping file names to dicts with line number + # pairs as keys (if branch coverage). + self.data = {} + + # A dictionary mapping file names to file tracer plugin names that will + # handle them. + self.file_tracers = {} + + self.disabled_plugins = set() + + # The .should_trace_cache attribute is a cache from file names to + # coverage.FileDisposition objects, or None. When a file is first + # considered for tracing, a FileDisposition is obtained from + # Coverage.should_trace. Its .trace attribute indicates whether the + # file should be traced or not. If it should be, a plugin with dynamic + # file names can decide not to trace it based on the dynamic file name + # being excluded by the inclusion rules, in which case the + # FileDisposition will be replaced by None in the cache. + if env.PYPY: + import __pypy__ # pylint: disable=import-error + # Alex Gaynor said: + # should_trace_cache is a strictly growing key: once a key is in + # it, it never changes. Further, the keys used to access it are + # generally constant, given sufficient context. That is to say, at + # any given point _trace() is called, pypy is able to know the key. + # This is because the key is determined by the physical source code + # line, and that's invariant with the call site. + # + # This property of a dict with immutable keys, combined with + # call-site-constant keys is a match for PyPy's module dict, + # which is optimized for such workloads. + # + # This gives a 20% benefit on the workload described at + # https://bitbucket.org/pypy/pypy/issue/1871/10x-slower-than-cpython-under-coverage + self.should_trace_cache = __pypy__.newdict("module") + else: + self.should_trace_cache = {} + + # Our active Tracers. + self.tracers = [] + + self._clear_data() + + def _start_tracer(self): + """Start a new Tracer object, and store it in self.tracers.""" + tracer = self._trace_class() + tracer.data = self.data + tracer.trace_arcs = self.branch + tracer.should_trace = self.should_trace + tracer.should_trace_cache = self.should_trace_cache + tracer.warn = self.warn + + if hasattr(tracer, 'concur_id_func'): + tracer.concur_id_func = self.concur_id_func + elif self.concur_id_func: + raise CoverageException( + "Can't support concurrency=%s with %s, only threads are supported" % ( + self.concurrency, self.tracer_name(), + ) + ) + + if hasattr(tracer, 'file_tracers'): + tracer.file_tracers = self.file_tracers + if hasattr(tracer, 'threading'): + tracer.threading = self.threading + if hasattr(tracer, 'check_include'): + tracer.check_include = self.check_include + if hasattr(tracer, 'should_start_context'): + tracer.should_start_context = self.should_start_context + tracer.switch_context = self.switch_context + if hasattr(tracer, 'disable_plugin'): + tracer.disable_plugin = self.disable_plugin + + fn = tracer.start() + self.tracers.append(tracer) + + return fn + + # The trace function has to be set individually on each thread before + # execution begins. Ironically, the only support the threading module has + # for running code before the thread main is the tracing function. So we + # install this as a trace function, and the first time it's called, it does + # the real trace installation. + + def _installation_trace(self, frame, event, arg): + """Called on new threads, installs the real tracer.""" + # Remove ourselves as the trace function. + sys.settrace(None) + # Install the real tracer. + fn = self._start_tracer() + # Invoke the real trace function with the current event, to be sure + # not to lose an event. + if fn: + fn = fn(frame, event, arg) + # Return the new trace function to continue tracing in this scope. + return fn + + def start(self): + """Start collecting trace information.""" + if self._collectors: + self._collectors[-1].pause() + + self.tracers = [] + + # Check to see whether we had a fullcoverage tracer installed. If so, + # get the stack frames it stashed away for us. + traces0 = [] + fn0 = sys.gettrace() + if fn0: + tracer0 = getattr(fn0, '__self__', None) + if tracer0: + traces0 = getattr(tracer0, 'traces', []) + + try: + # Install the tracer on this thread. + fn = self._start_tracer() + except: + if self._collectors: + self._collectors[-1].resume() + raise + + # If _start_tracer succeeded, then we add ourselves to the global + # stack of collectors. + self._collectors.append(self) + + # Replay all the events from fullcoverage into the new trace function. + for args in traces0: + (frame, event, arg), lineno = args + try: + fn(frame, event, arg, lineno=lineno) + except TypeError: + raise Exception("fullcoverage must be run with the C trace function.") + + # Install our installation tracer in threading, to jump-start other + # threads. + if self.threading: + self.threading.settrace(self._installation_trace) + + def stop(self): + """Stop collecting trace information.""" + assert self._collectors + if self._collectors[-1] is not self: + print("self._collectors:") + for c in self._collectors: + print(" {!r}\n{}".format(c, c.origin)) + assert self._collectors[-1] is self, ( + "Expected current collector to be %r, but it's %r" % (self, self._collectors[-1]) + ) + + self.pause() + + # Remove this Collector from the stack, and resume the one underneath + # (if any). + self._collectors.pop() + if self._collectors: + self._collectors[-1].resume() + + def pause(self): + """Pause tracing, but be prepared to `resume`.""" + for tracer in self.tracers: + tracer.stop() + stats = tracer.get_stats() + if stats: + print("\nCoverage.py tracer stats:") + for k in sorted(stats.keys()): + print("%20s: %s" % (k, stats[k])) + if self.threading: + self.threading.settrace(None) + + def resume(self): + """Resume tracing after a `pause`.""" + for tracer in self.tracers: + tracer.start() + if self.threading: + self.threading.settrace(self._installation_trace) + else: + self._start_tracer() + + def _activity(self): + """Has any activity been traced? + + Returns a boolean, True if any trace function was invoked. + + """ + return any(tracer.activity() for tracer in self.tracers) + + def switch_context(self, new_context): + """Switch to a new dynamic context.""" + self.flush_data() + if self.static_context: + context = self.static_context + if new_context: + context += "|" + new_context + else: + context = new_context + self.covdata.set_context(context) + + def disable_plugin(self, disposition): + """Disable the plugin mentioned in `disposition`.""" + file_tracer = disposition.file_tracer + plugin = file_tracer._coverage_plugin + plugin_name = plugin._coverage_plugin_name + self.warn("Disabling plug-in {!r} due to previous exception".format(plugin_name)) + plugin._coverage_enabled = False + disposition.trace = False + + def cached_mapped_file(self, filename): + """A locally cached version of file names mapped through file_mapper.""" + key = (type(filename), filename) + try: + return self.mapped_file_cache[key] + except KeyError: + return self.mapped_file_cache.setdefault(key, self.file_mapper(filename)) + + def mapped_file_dict(self, d): + """Return a dict like d, but with keys modified by file_mapper.""" + # The call to litems() ensures that the GIL protects the dictionary + # iterator against concurrent modifications by tracers running + # in other threads. We try three times in case of concurrent + # access, hoping to get a clean copy. + runtime_err = None + for _ in range(3): + try: + items = litems(d) + except RuntimeError as ex: + runtime_err = ex + else: + break + else: + raise runtime_err + + return dict((self.cached_mapped_file(k), v) for k, v in items if v) + + def plugin_was_disabled(self, plugin): + """Record that `plugin` was disabled during the run.""" + self.disabled_plugins.add(plugin._coverage_plugin_name) + + def flush_data(self): + """Save the collected data to our associated `CoverageData`. + + Data may have also been saved along the way. This forces the + last of the data to be saved. + + Returns True if there was data to save, False if not. + """ + if not self._activity(): + return False + + if self.branch: + self.covdata.add_arcs(self.mapped_file_dict(self.data)) + else: + self.covdata.add_lines(self.mapped_file_dict(self.data)) + + file_tracers = { + k: v for k, v in self.file_tracers.items() + if v not in self.disabled_plugins + } + self.covdata.add_file_tracers(self.mapped_file_dict(file_tracers)) + + self._clear_data() + return True diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/config.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/config.py new file mode 100644 index 000000000..7ef7e7ae7 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/config.py @@ -0,0 +1,570 @@ +# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 +# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt + +"""Config file for coverage.py""" + +import collections +import copy +import os +import os.path +import re + +from coverage import env +from coverage.backward import configparser, iitems, string_class +from coverage.misc import contract, CoverageException, isolate_module +from coverage.misc import substitute_variables + +from coverage.tomlconfig import TomlConfigParser, TomlDecodeError + +os = isolate_module(os) + + +class HandyConfigParser(configparser.RawConfigParser): + """Our specialization of ConfigParser.""" + + def __init__(self, our_file): + """Create the HandyConfigParser. + + `our_file` is True if this config file is specifically for coverage, + False if we are examining another config file (tox.ini, setup.cfg) + for possible settings. + """ + + configparser.RawConfigParser.__init__(self) + self.section_prefixes = ["coverage:"] + if our_file: + self.section_prefixes.append("") + + def read(self, filenames, encoding=None): + """Read a file name as UTF-8 configuration data.""" + kwargs = {} + if env.PYVERSION >= (3, 2): + kwargs['encoding'] = encoding or "utf-8" + return configparser.RawConfigParser.read(self, filenames, **kwargs) + + def has_option(self, section, option): + for section_prefix in self.section_prefixes: + real_section = section_prefix + section + has = configparser.RawConfigParser.has_option(self, real_section, option) + if has: + return has + return False + + def has_section(self, section): + for section_prefix in self.section_prefixes: + real_section = section_prefix + section + has = configparser.RawConfigParser.has_section(self, real_section) + if has: + return real_section + return False + + def options(self, section): + for section_prefix in self.section_prefixes: + real_section = section_prefix + section + if configparser.RawConfigParser.has_section(self, real_section): + return configparser.RawConfigParser.options(self, real_section) + raise configparser.NoSectionError(section) + + def get_section(self, section): + """Get the contents of a section, as a dictionary.""" + d = {} + for opt in self.options(section): + d[opt] = self.get(section, opt) + return d + + def get(self, section, option, *args, **kwargs): + """Get a value, replacing environment variables also. + + The arguments are the same as `RawConfigParser.get`, but in the found + value, ``$WORD`` or ``${WORD}`` are replaced by the value of the + environment variable ``WORD``. + + Returns the finished value. + + """ + for section_prefix in self.section_prefixes: + real_section = section_prefix + section + if configparser.RawConfigParser.has_option(self, real_section, option): + break + else: + raise configparser.NoOptionError(option, section) + + v = configparser.RawConfigParser.get(self, real_section, option, *args, **kwargs) + v = substitute_variables(v, os.environ) + return v + + def getlist(self, section, option): + """Read a list of strings. + + The value of `section` and `option` is treated as a comma- and newline- + separated list of strings. Each value is stripped of whitespace. + + Returns the list of strings. + + """ + value_list = self.get(section, option) + values = [] + for value_line in value_list.split('\n'): + for value in value_line.split(','): + value = value.strip() + if value: + values.append(value) + return values + + def getregexlist(self, section, option): + """Read a list of full-line regexes. + + The value of `section` and `option` is treated as a newline-separated + list of regexes. Each value is stripped of whitespace. + + Returns the list of strings. + + """ + line_list = self.get(section, option) + value_list = [] + for value in line_list.splitlines(): + value = value.strip() + try: + re.compile(value) + except re.error as e: + raise CoverageException( + "Invalid [%s].%s value %r: %s" % (section, option, value, e) + ) + if value: + value_list.append(value) + return value_list + + +# The default line exclusion regexes. +DEFAULT_EXCLUDE = [ + r'#\s*(pragma|PRAGMA)[:\s]?\s*(no|NO)\s*(cover|COVER)', +] + +# The default partial branch regexes, to be modified by the user. +DEFAULT_PARTIAL = [ + r'#\s*(pragma|PRAGMA)[:\s]?\s*(no|NO)\s*(branch|BRANCH)', +] + +# The default partial branch regexes, based on Python semantics. +# These are any Python branching constructs that can't actually execute all +# their branches. +DEFAULT_PARTIAL_ALWAYS = [ + 'while (True|1|False|0):', + 'if (True|1|False|0):', +] + + +class CoverageConfig(object): + """Coverage.py configuration. + + The attributes of this class are the various settings that control the + operation of coverage.py. + + """ + # pylint: disable=too-many-instance-attributes + + def __init__(self): + """Initialize the configuration attributes to their defaults.""" + # Metadata about the config. + # We tried to read these config files. + self.attempted_config_files = [] + # We did read these config files, but maybe didn't find any content for us. + self.config_files_read = [] + # The file that gave us our configuration. + self.config_file = None + self._config_contents = None + + # Defaults for [run] and [report] + self._include = None + self._omit = None + + # Defaults for [run] + self.branch = False + self.command_line = None + self.concurrency = None + self.context = None + self.cover_pylib = False + self.data_file = ".coverage" + self.debug = [] + self.disable_warnings = [] + self.dynamic_context = None + self.note = None + self.parallel = False + self.plugins = [] + self.relative_files = False + self.run_include = None + self.run_omit = None + self.source = None + self.source_pkgs = [] + self.timid = False + self._crash = None + + # Defaults for [report] + self.exclude_list = DEFAULT_EXCLUDE[:] + self.fail_under = 0.0 + self.ignore_errors = False + self.report_include = None + self.report_omit = None + self.partial_always_list = DEFAULT_PARTIAL_ALWAYS[:] + self.partial_list = DEFAULT_PARTIAL[:] + self.precision = 0 + self.report_contexts = None + self.show_missing = False + self.skip_covered = False + self.skip_empty = False + self.sort = None + + # Defaults for [html] + self.extra_css = None + self.html_dir = "htmlcov" + self.html_skip_covered = None + self.html_skip_empty = None + self.html_title = "Coverage report" + self.show_contexts = False + + # Defaults for [xml] + self.xml_output = "coverage.xml" + self.xml_package_depth = 99 + + # Defaults for [json] + self.json_output = "coverage.json" + self.json_pretty_print = False + self.json_show_contexts = False + + # Defaults for [paths] + self.paths = collections.OrderedDict() + + # Options for plugins + self.plugin_options = {} + + MUST_BE_LIST = [ + "debug", "concurrency", "plugins", + "report_omit", "report_include", + "run_omit", "run_include", + ] + + def from_args(self, **kwargs): + """Read config values from `kwargs`.""" + for k, v in iitems(kwargs): + if v is not None: + if k in self.MUST_BE_LIST and isinstance(v, string_class): + v = [v] + setattr(self, k, v) + + @contract(filename=str) + def from_file(self, filename, our_file): + """Read configuration from a .rc file. + + `filename` is a file name to read. + + `our_file` is True if this config file is specifically for coverage, + False if we are examining another config file (tox.ini, setup.cfg) + for possible settings. + + Returns True or False, whether the file could be read, and it had some + coverage.py settings in it. + + """ + _, ext = os.path.splitext(filename) + if ext == '.toml': + cp = TomlConfigParser(our_file) + else: + cp = HandyConfigParser(our_file) + + self.attempted_config_files.append(filename) + + try: + files_read = cp.read(filename) + except (configparser.Error, TomlDecodeError) as err: + raise CoverageException("Couldn't read config file %s: %s" % (filename, err)) + if not files_read: + return False + + self.config_files_read.extend(map(os.path.abspath, files_read)) + + any_set = False + try: + for option_spec in self.CONFIG_FILE_OPTIONS: + was_set = self._set_attr_from_config_option(cp, *option_spec) + if was_set: + any_set = True + except ValueError as err: + raise CoverageException("Couldn't read config file %s: %s" % (filename, err)) + + # Check that there are no unrecognized options. + all_options = collections.defaultdict(set) + for option_spec in self.CONFIG_FILE_OPTIONS: + section, option = option_spec[1].split(":") + all_options[section].add(option) + + for section, options in iitems(all_options): + real_section = cp.has_section(section) + if real_section: + for unknown in set(cp.options(section)) - options: + raise CoverageException( + "Unrecognized option '[%s] %s=' in config file %s" % ( + real_section, unknown, filename + ) + ) + + # [paths] is special + if cp.has_section('paths'): + for option in cp.options('paths'): + self.paths[option] = cp.getlist('paths', option) + any_set = True + + # plugins can have options + for plugin in self.plugins: + if cp.has_section(plugin): + self.plugin_options[plugin] = cp.get_section(plugin) + any_set = True + + # Was this file used as a config file? If it's specifically our file, + # then it was used. If we're piggybacking on someone else's file, + # then it was only used if we found some settings in it. + if our_file: + used = True + else: + used = any_set + + if used: + self.config_file = os.path.abspath(filename) + with open(filename, "rb") as f: + self._config_contents = f.read() + + return used + + def copy(self): + """Return a copy of the configuration.""" + return copy.deepcopy(self) + + CONFIG_FILE_OPTIONS = [ + # These are *args for _set_attr_from_config_option: + # (attr, where, type_="") + # + # attr is the attribute to set on the CoverageConfig object. + # where is the section:name to read from the configuration file. + # type_ is the optional type to apply, by using .getTYPE to read the + # configuration value from the file. + + # [run] + ('branch', 'run:branch', 'boolean'), + ('command_line', 'run:command_line'), + ('concurrency', 'run:concurrency', 'list'), + ('context', 'run:context'), + ('cover_pylib', 'run:cover_pylib', 'boolean'), + ('data_file', 'run:data_file'), + ('debug', 'run:debug', 'list'), + ('disable_warnings', 'run:disable_warnings', 'list'), + ('dynamic_context', 'run:dynamic_context'), + ('note', 'run:note'), + ('parallel', 'run:parallel', 'boolean'), + ('plugins', 'run:plugins', 'list'), + ('relative_files', 'run:relative_files', 'boolean'), + ('run_include', 'run:include', 'list'), + ('run_omit', 'run:omit', 'list'), + ('source', 'run:source', 'list'), + ('source_pkgs', 'run:source_pkgs', 'list'), + ('timid', 'run:timid', 'boolean'), + ('_crash', 'run:_crash'), + + # [report] + ('exclude_list', 'report:exclude_lines', 'regexlist'), + ('fail_under', 'report:fail_under', 'float'), + ('ignore_errors', 'report:ignore_errors', 'boolean'), + ('partial_always_list', 'report:partial_branches_always', 'regexlist'), + ('partial_list', 'report:partial_branches', 'regexlist'), + ('precision', 'report:precision', 'int'), + ('report_contexts', 'report:contexts', 'list'), + ('report_include', 'report:include', 'list'), + ('report_omit', 'report:omit', 'list'), + ('show_missing', 'report:show_missing', 'boolean'), + ('skip_covered', 'report:skip_covered', 'boolean'), + ('skip_empty', 'report:skip_empty', 'boolean'), + ('sort', 'report:sort'), + + # [html] + ('extra_css', 'html:extra_css'), + ('html_dir', 'html:directory'), + ('html_skip_covered', 'html:skip_covered', 'boolean'), + ('html_skip_empty', 'html:skip_empty', 'boolean'), + ('html_title', 'html:title'), + ('show_contexts', 'html:show_contexts', 'boolean'), + + # [xml] + ('xml_output', 'xml:output'), + ('xml_package_depth', 'xml:package_depth', 'int'), + + # [json] + ('json_output', 'json:output'), + ('json_pretty_print', 'json:pretty_print', 'boolean'), + ('json_show_contexts', 'json:show_contexts', 'boolean'), + ] + + def _set_attr_from_config_option(self, cp, attr, where, type_=''): + """Set an attribute on self if it exists in the ConfigParser. + + Returns True if the attribute was set. + + """ + section, option = where.split(":") + if cp.has_option(section, option): + method = getattr(cp, 'get' + type_) + setattr(self, attr, method(section, option)) + return True + return False + + def get_plugin_options(self, plugin): + """Get a dictionary of options for the plugin named `plugin`.""" + return self.plugin_options.get(plugin, {}) + + def set_option(self, option_name, value): + """Set an option in the configuration. + + `option_name` is a colon-separated string indicating the section and + option name. For example, the ``branch`` option in the ``[run]`` + section of the config file would be indicated with `"run:branch"`. + + `value` is the new value for the option. + + """ + # Special-cased options. + if option_name == "paths": + self.paths = value + return + + # Check all the hard-coded options. + for option_spec in self.CONFIG_FILE_OPTIONS: + attr, where = option_spec[:2] + if where == option_name: + setattr(self, attr, value) + return + + # See if it's a plugin option. + plugin_name, _, key = option_name.partition(":") + if key and plugin_name in self.plugins: + self.plugin_options.setdefault(plugin_name, {})[key] = value + return + + # If we get here, we didn't find the option. + raise CoverageException("No such option: %r" % option_name) + + def get_option(self, option_name): + """Get an option from the configuration. + + `option_name` is a colon-separated string indicating the section and + option name. For example, the ``branch`` option in the ``[run]`` + section of the config file would be indicated with `"run:branch"`. + + Returns the value of the option. + + """ + # Special-cased options. + if option_name == "paths": + return self.paths + + # Check all the hard-coded options. + for option_spec in self.CONFIG_FILE_OPTIONS: + attr, where = option_spec[:2] + if where == option_name: + return getattr(self, attr) + + # See if it's a plugin option. + plugin_name, _, key = option_name.partition(":") + if key and plugin_name in self.plugins: + return self.plugin_options.get(plugin_name, {}).get(key) + + # If we get here, we didn't find the option. + raise CoverageException("No such option: %r" % option_name) + + def post_process_file(self, path): + """Make final adjustments to a file path to make it usable.""" + return os.path.expanduser(path) + + def post_process(self): + """Make final adjustments to settings to make them usable.""" + self.data_file = self.post_process_file(self.data_file) + self.html_dir = self.post_process_file(self.html_dir) + self.xml_output = self.post_process_file(self.xml_output) + self.paths = collections.OrderedDict( + (k, [self.post_process_file(f) for f in v]) + for k, v in self.paths.items() + ) + + +def config_files_to_try(config_file): + """What config files should we try to read? + + Returns a list of tuples: + (filename, is_our_file, was_file_specified) + """ + + # Some API users were specifying ".coveragerc" to mean the same as + # True, so make it so. + if config_file == ".coveragerc": + config_file = True + specified_file = (config_file is not True) + if not specified_file: + # No file was specified. Check COVERAGE_RCFILE. + config_file = os.environ.get('COVERAGE_RCFILE') + if config_file: + specified_file = True + if not specified_file: + # Still no file specified. Default to .coveragerc + config_file = ".coveragerc" + files_to_try = [ + (config_file, True, specified_file), + ("setup.cfg", False, False), + ("tox.ini", False, False), + ("pyproject.toml", False, False), + ] + return files_to_try + + +def read_coverage_config(config_file, **kwargs): + """Read the coverage.py configuration. + + Arguments: + config_file: a boolean or string, see the `Coverage` class for the + tricky details. + all others: keyword arguments from the `Coverage` class, used for + setting values in the configuration. + + Returns: + config: + config is a CoverageConfig object read from the appropriate + configuration file. + + """ + # Build the configuration from a number of sources: + # 1) defaults: + config = CoverageConfig() + + # 2) from a file: + if config_file: + files_to_try = config_files_to_try(config_file) + + for fname, our_file, specified_file in files_to_try: + config_read = config.from_file(fname, our_file=our_file) + if config_read: + break + if specified_file: + raise CoverageException("Couldn't read '%s' as a config file" % fname) + + # $set_env.py: COVERAGE_DEBUG - Options for --debug. + # 3) from environment variables: + env_data_file = os.environ.get('COVERAGE_FILE') + if env_data_file: + config.data_file = env_data_file + debugs = os.environ.get('COVERAGE_DEBUG') + if debugs: + config.debug.extend(d.strip() for d in debugs.split(",")) + + # 4) from constructor arguments: + config.from_args(**kwargs) + + # Once all the config has been collected, there's a little post-processing + # to do. + config.post_process() + + return config diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/context.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/context.py new file mode 100644 index 000000000..ea13da21e --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/context.py @@ -0,0 +1,91 @@ +# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 +# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt + +"""Determine contexts for coverage.py""" + + +def combine_context_switchers(context_switchers): + """Create a single context switcher from multiple switchers. + + `context_switchers` is a list of functions that take a frame as an + argument and return a string to use as the new context label. + + Returns a function that composites `context_switchers` functions, or None + if `context_switchers` is an empty list. + + When invoked, the combined switcher calls `context_switchers` one-by-one + until a string is returned. The combined switcher returns None if all + `context_switchers` return None. + """ + if not context_switchers: + return None + + if len(context_switchers) == 1: + return context_switchers[0] + + def should_start_context(frame): + """The combiner for multiple context switchers.""" + for switcher in context_switchers: + new_context = switcher(frame) + if new_context is not None: + return new_context + return None + + return should_start_context + + +def should_start_context_test_function(frame): + """Is this frame calling a test_* function?""" + co_name = frame.f_code.co_name + if co_name.startswith("test") or co_name == "runTest": + return qualname_from_frame(frame) + return None + + +def qualname_from_frame(frame): + """Get a qualified name for the code running in `frame`.""" + co = frame.f_code + fname = co.co_name + method = None + if co.co_argcount and co.co_varnames[0] == "self": + self = frame.f_locals["self"] + method = getattr(self, fname, None) + + if method is None: + func = frame.f_globals.get(fname) + if func is None: + return None + return func.__module__ + '.' + fname + + func = getattr(method, '__func__', None) + if func is None: + cls = self.__class__ + return cls.__module__ + '.' + cls.__name__ + "." + fname + + if hasattr(func, '__qualname__'): + qname = func.__module__ + '.' + func.__qualname__ + else: + for cls in getattr(self.__class__, '__mro__', ()): + f = cls.__dict__.get(fname, None) + if f is None: + continue + if f is func: + qname = cls.__module__ + '.' + cls.__name__ + "." + fname + break + else: + # Support for old-style classes. + def mro(bases): + for base in bases: + f = base.__dict__.get(fname, None) + if f is func: + return base.__module__ + '.' + base.__name__ + "." + fname + for base in bases: + qname = mro(base.__bases__) + if qname is not None: + return qname + return None + qname = mro([self.__class__]) + if qname is None: + qname = func.__module__ + '.' + fname + + return qname diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/control.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/control.py new file mode 100644 index 000000000..1623b0932 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/control.py @@ -0,0 +1,1145 @@ +# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 +# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt + +"""Core control stuff for coverage.py.""" + +import atexit +import collections +import contextlib +import os +import os.path +import platform +import sys +import time + +from coverage import env +from coverage.annotate import AnnotateReporter +from coverage.backward import string_class, iitems +from coverage.collector import Collector, CTracer +from coverage.config import read_coverage_config +from coverage.context import should_start_context_test_function, combine_context_switchers +from coverage.data import CoverageData, combine_parallel_data +from coverage.debug import DebugControl, short_stack, write_formatted_info +from coverage.disposition import disposition_debug_msg +from coverage.files import PathAliases, abs_file, relative_filename, set_relative_directory +from coverage.html import HtmlReporter +from coverage.inorout import InOrOut +from coverage.jsonreport import JsonReporter +from coverage.misc import CoverageException, bool_or_none, join_regex +from coverage.misc import DefaultValue, ensure_dir_for_file, isolate_module +from coverage.plugin import FileReporter +from coverage.plugin_support import Plugins +from coverage.python import PythonFileReporter +from coverage.report import render_report +from coverage.results import Analysis, Numbers +from coverage.summary import SummaryReporter +from coverage.xmlreport import XmlReporter + +try: + from coverage.multiproc import patch_multiprocessing +except ImportError: # pragma: only jython + # Jython has no multiprocessing module. + patch_multiprocessing = None + +os = isolate_module(os) + +@contextlib.contextmanager +def override_config(cov, **kwargs): + """Temporarily tweak the configuration of `cov`. + + The arguments are applied to `cov.config` with the `from_args` method. + At the end of the with-statement, the old configuration is restored. + """ + original_config = cov.config + cov.config = cov.config.copy() + try: + cov.config.from_args(**kwargs) + yield + finally: + cov.config = original_config + + +_DEFAULT_DATAFILE = DefaultValue("MISSING") + +class Coverage(object): + """Programmatic access to coverage.py. + + To use:: + + from coverage import Coverage + + cov = Coverage() + cov.start() + #.. call your code .. + cov.stop() + cov.html_report(directory='covhtml') + + Note: in keeping with Python custom, names starting with underscore are + not part of the public API. They might stop working at any point. Please + limit yourself to documented methods to avoid problems. + + """ + + # The stack of started Coverage instances. + _instances = [] + + @classmethod + def current(cls): + """Get the latest started `Coverage` instance, if any. + + Returns: a `Coverage` instance, or None. + + .. versionadded:: 5.0 + + """ + if cls._instances: + return cls._instances[-1] + else: + return None + + def __init__( + self, data_file=_DEFAULT_DATAFILE, data_suffix=None, cover_pylib=None, + auto_data=False, timid=None, branch=None, config_file=True, + source=None, source_pkgs=None, omit=None, include=None, debug=None, + concurrency=None, check_preimported=False, context=None, + ): # pylint: disable=too-many-arguments + """ + Many of these arguments duplicate and override values that can be + provided in a configuration file. Parameters that are missing here + will use values from the config file. + + `data_file` is the base name of the data file to use. The config value + defaults to ".coverage". None can be provided to prevent writing a data + file. `data_suffix` is appended (with a dot) to `data_file` to create + the final file name. If `data_suffix` is simply True, then a suffix is + created with the machine and process identity included. + + `cover_pylib` is a boolean determining whether Python code installed + with the Python interpreter is measured. This includes the Python + standard library and any packages installed with the interpreter. + + If `auto_data` is true, then any existing data file will be read when + coverage measurement starts, and data will be saved automatically when + measurement stops. + + If `timid` is true, then a slower and simpler trace function will be + used. This is important for some environments where manipulation of + tracing functions breaks the faster trace function. + + If `branch` is true, then branch coverage will be measured in addition + to the usual statement coverage. + + `config_file` determines what configuration file to read: + + * If it is ".coveragerc", it is interpreted as if it were True, + for backward compatibility. + + * If it is a string, it is the name of the file to read. If the + file can't be read, it is an error. + + * If it is True, then a few standard files names are tried + (".coveragerc", "setup.cfg", "tox.ini"). It is not an error for + these files to not be found. + + * If it is False, then no configuration file is read. + + `source` is a list of file paths or package names. Only code located + in the trees indicated by the file paths or package names will be + measured. + + `source_pkgs` is a list of package names. It works the same as + `source`, but can be used to name packages where the name can also be + interpreted as a file path. + + `include` and `omit` are lists of file name patterns. Files that match + `include` will be measured, files that match `omit` will not. Each + will also accept a single string argument. + + `debug` is a list of strings indicating what debugging information is + desired. + + `concurrency` is a string indicating the concurrency library being used + in the measured code. Without this, coverage.py will get incorrect + results if these libraries are in use. Valid strings are "greenlet", + "eventlet", "gevent", "multiprocessing", or "thread" (the default). + This can also be a list of these strings. + + If `check_preimported` is true, then when coverage is started, the + already-imported files will be checked to see if they should be + measured by coverage. Importing measured files before coverage is + started can mean that code is missed. + + `context` is a string to use as the :ref:`static context + ` label for collected data. + + .. versionadded:: 4.0 + The `concurrency` parameter. + + .. versionadded:: 4.2 + The `concurrency` parameter can now be a list of strings. + + .. versionadded:: 5.0 + The `check_preimported` and `context` parameters. + + .. versionadded:: 5.3 + The `source_pkgs` parameter. + + """ + # data_file=None means no disk file at all. data_file missing means + # use the value from the config file. + self._no_disk = data_file is None + if data_file is _DEFAULT_DATAFILE: + data_file = None + + # Build our configuration from a number of sources. + self.config = read_coverage_config( + config_file=config_file, + data_file=data_file, cover_pylib=cover_pylib, timid=timid, + branch=branch, parallel=bool_or_none(data_suffix), + source=source, source_pkgs=source_pkgs, run_omit=omit, run_include=include, debug=debug, + report_omit=omit, report_include=include, + concurrency=concurrency, context=context, + ) + + # This is injectable by tests. + self._debug_file = None + + self._auto_load = self._auto_save = auto_data + self._data_suffix_specified = data_suffix + + # Is it ok for no data to be collected? + self._warn_no_data = True + self._warn_unimported_source = True + self._warn_preimported_source = check_preimported + self._no_warn_slugs = None + + # A record of all the warnings that have been issued. + self._warnings = [] + + # Other instance attributes, set later. + self._data = self._collector = None + self._plugins = None + self._inorout = None + self._data_suffix = self._run_suffix = None + self._exclude_re = None + self._debug = None + self._file_mapper = None + + # State machine variables: + # Have we initialized everything? + self._inited = False + self._inited_for_start = False + # Have we started collecting and not stopped it? + self._started = False + # Should we write the debug output? + self._should_write_debug = True + + # If we have sub-process measurement happening automatically, then we + # want any explicit creation of a Coverage object to mean, this process + # is already coverage-aware, so don't auto-measure it. By now, the + # auto-creation of a Coverage object has already happened. But we can + # find it and tell it not to save its data. + if not env.METACOV: + _prevent_sub_process_measurement() + + def _init(self): + """Set all the initial state. + + This is called by the public methods to initialize state. This lets us + construct a :class:`Coverage` object, then tweak its state before this + function is called. + + """ + if self._inited: + return + + self._inited = True + + # Create and configure the debugging controller. COVERAGE_DEBUG_FILE + # is an environment variable, the name of a file to append debug logs + # to. + self._debug = DebugControl(self.config.debug, self._debug_file) + + if "multiprocessing" in (self.config.concurrency or ()): + # Multi-processing uses parallel for the subprocesses, so also use + # it for the main process. + self.config.parallel = True + + # _exclude_re is a dict that maps exclusion list names to compiled regexes. + self._exclude_re = {} + + set_relative_directory() + self._file_mapper = relative_filename if self.config.relative_files else abs_file + + # Load plugins + self._plugins = Plugins.load_plugins(self.config.plugins, self.config, self._debug) + + # Run configuring plugins. + for plugin in self._plugins.configurers: + # We need an object with set_option and get_option. Either self or + # self.config will do. Choosing randomly stops people from doing + # other things with those objects, against the public API. Yes, + # this is a bit childish. :) + plugin.configure([self, self.config][int(time.time()) % 2]) + + def _post_init(self): + """Stuff to do after everything is initialized.""" + if self._should_write_debug: + self._should_write_debug = False + self._write_startup_debug() + + # '[run] _crash' will raise an exception if the value is close by in + # the call stack, for testing error handling. + if self.config._crash and self.config._crash in short_stack(limit=4): + raise Exception("Crashing because called by {}".format(self.config._crash)) + + def _write_startup_debug(self): + """Write out debug info at startup if needed.""" + wrote_any = False + with self._debug.without_callers(): + if self._debug.should('config'): + config_info = sorted(self.config.__dict__.items()) + config_info = [(k, v) for k, v in config_info if not k.startswith('_')] + write_formatted_info(self._debug, "config", config_info) + wrote_any = True + + if self._debug.should('sys'): + write_formatted_info(self._debug, "sys", self.sys_info()) + for plugin in self._plugins: + header = "sys: " + plugin._coverage_plugin_name + info = plugin.sys_info() + write_formatted_info(self._debug, header, info) + wrote_any = True + + if wrote_any: + write_formatted_info(self._debug, "end", ()) + + def _should_trace(self, filename, frame): + """Decide whether to trace execution in `filename`. + + Calls `_should_trace_internal`, and returns the FileDisposition. + + """ + disp = self._inorout.should_trace(filename, frame) + if self._debug.should('trace'): + self._debug.write(disposition_debug_msg(disp)) + return disp + + def _check_include_omit_etc(self, filename, frame): + """Check a file name against the include/omit/etc, rules, verbosely. + + Returns a boolean: True if the file should be traced, False if not. + + """ + reason = self._inorout.check_include_omit_etc(filename, frame) + if self._debug.should('trace'): + if not reason: + msg = "Including %r" % (filename,) + else: + msg = "Not including %r: %s" % (filename, reason) + self._debug.write(msg) + + return not reason + + def _warn(self, msg, slug=None, once=False): + """Use `msg` as a warning. + + For warning suppression, use `slug` as the shorthand. + + If `once` is true, only show this warning once (determined by the + slug.) + + """ + if self._no_warn_slugs is None: + self._no_warn_slugs = list(self.config.disable_warnings) + + if slug in self._no_warn_slugs: + # Don't issue the warning + return + + self._warnings.append(msg) + if slug: + msg = "%s (%s)" % (msg, slug) + if self._debug.should('pid'): + msg = "[%d] %s" % (os.getpid(), msg) + sys.stderr.write("Coverage.py warning: %s\n" % msg) + + if once: + self._no_warn_slugs.append(slug) + + def get_option(self, option_name): + """Get an option from the configuration. + + `option_name` is a colon-separated string indicating the section and + option name. For example, the ``branch`` option in the ``[run]`` + section of the config file would be indicated with `"run:branch"`. + + Returns the value of the option. The type depends on the option + selected. + + As a special case, an `option_name` of ``"paths"`` will return an + OrderedDict with the entire ``[paths]`` section value. + + .. versionadded:: 4.0 + + """ + return self.config.get_option(option_name) + + def set_option(self, option_name, value): + """Set an option in the configuration. + + `option_name` is a colon-separated string indicating the section and + option name. For example, the ``branch`` option in the ``[run]`` + section of the config file would be indicated with ``"run:branch"``. + + `value` is the new value for the option. This should be an + appropriate Python value. For example, use True for booleans, not the + string ``"True"``. + + As an example, calling:: + + cov.set_option("run:branch", True) + + has the same effect as this configuration file:: + + [run] + branch = True + + As a special case, an `option_name` of ``"paths"`` will replace the + entire ``[paths]`` section. The value should be an OrderedDict. + + .. versionadded:: 4.0 + + """ + self.config.set_option(option_name, value) + + def load(self): + """Load previously-collected coverage data from the data file.""" + self._init() + if self._collector: + self._collector.reset() + should_skip = self.config.parallel and not os.path.exists(self.config.data_file) + if not should_skip: + self._init_data(suffix=None) + self._post_init() + if not should_skip: + self._data.read() + + def _init_for_start(self): + """Initialization for start()""" + # Construct the collector. + concurrency = self.config.concurrency or () + if "multiprocessing" in concurrency: + if not patch_multiprocessing: + raise CoverageException( # pragma: only jython + "multiprocessing is not supported on this Python" + ) + patch_multiprocessing(rcfile=self.config.config_file) + + dycon = self.config.dynamic_context + if not dycon or dycon == "none": + context_switchers = [] + elif dycon == "test_function": + context_switchers = [should_start_context_test_function] + else: + raise CoverageException( + "Don't understand dynamic_context setting: {!r}".format(dycon) + ) + + context_switchers.extend( + plugin.dynamic_context for plugin in self._plugins.context_switchers + ) + + should_start_context = combine_context_switchers(context_switchers) + + self._collector = Collector( + should_trace=self._should_trace, + check_include=self._check_include_omit_etc, + should_start_context=should_start_context, + file_mapper=self._file_mapper, + timid=self.config.timid, + branch=self.config.branch, + warn=self._warn, + concurrency=concurrency, + ) + + suffix = self._data_suffix_specified + if suffix or self.config.parallel: + if not isinstance(suffix, string_class): + # if data_suffix=True, use .machinename.pid.random + suffix = True + else: + suffix = None + + self._init_data(suffix) + + self._collector.use_data(self._data, self.config.context) + + # Early warning if we aren't going to be able to support plugins. + if self._plugins.file_tracers and not self._collector.supports_plugins: + self._warn( + "Plugin file tracers (%s) aren't supported with %s" % ( + ", ".join( + plugin._coverage_plugin_name + for plugin in self._plugins.file_tracers + ), + self._collector.tracer_name(), + ) + ) + for plugin in self._plugins.file_tracers: + plugin._coverage_enabled = False + + # Create the file classifying substructure. + self._inorout = InOrOut( + warn=self._warn, + debug=(self._debug if self._debug.should('trace') else None), + ) + self._inorout.configure(self.config) + self._inorout.plugins = self._plugins + self._inorout.disp_class = self._collector.file_disposition_class + + # It's useful to write debug info after initing for start. + self._should_write_debug = True + + atexit.register(self._atexit) + + def _init_data(self, suffix): + """Create a data file if we don't have one yet.""" + if self._data is None: + # Create the data file. We do this at construction time so that the + # data file will be written into the directory where the process + # started rather than wherever the process eventually chdir'd to. + ensure_dir_for_file(self.config.data_file) + self._data = CoverageData( + basename=self.config.data_file, + suffix=suffix, + warn=self._warn, + debug=self._debug, + no_disk=self._no_disk, + ) + + def start(self): + """Start measuring code coverage. + + Coverage measurement only occurs in functions called after + :meth:`start` is invoked. Statements in the same scope as + :meth:`start` won't be measured. + + Once you invoke :meth:`start`, you must also call :meth:`stop` + eventually, or your process might not shut down cleanly. + + """ + self._init() + if not self._inited_for_start: + self._inited_for_start = True + self._init_for_start() + self._post_init() + + # Issue warnings for possible problems. + self._inorout.warn_conflicting_settings() + + # See if we think some code that would eventually be measured has + # already been imported. + if self._warn_preimported_source: + self._inorout.warn_already_imported_files() + + if self._auto_load: + self.load() + + self._collector.start() + self._started = True + self._instances.append(self) + + def stop(self): + """Stop measuring code coverage.""" + if self._instances: + if self._instances[-1] is self: + self._instances.pop() + if self._started: + self._collector.stop() + self._started = False + + def _atexit(self): + """Clean up on process shutdown.""" + if self._debug.should("process"): + self._debug.write("atexit: pid: {}, instance: {!r}".format(os.getpid(), self)) + if self._started: + self.stop() + if self._auto_save: + self.save() + + def erase(self): + """Erase previously collected coverage data. + + This removes the in-memory data collected in this session as well as + discarding the data file. + + """ + self._init() + self._post_init() + if self._collector: + self._collector.reset() + self._init_data(suffix=None) + self._data.erase(parallel=self.config.parallel) + self._data = None + self._inited_for_start = False + + def switch_context(self, new_context): + """Switch to a new dynamic context. + + `new_context` is a string to use as the :ref:`dynamic context + ` label for collected data. If a :ref:`static + context ` is in use, the static and dynamic context + labels will be joined together with a pipe character. + + Coverage collection must be started already. + + .. versionadded:: 5.0 + + """ + if not self._started: # pragma: part started + raise CoverageException( + "Cannot switch context, coverage is not started" + ) + + if self._collector.should_start_context: + self._warn("Conflicting dynamic contexts", slug="dynamic-conflict", once=True) + + self._collector.switch_context(new_context) + + def clear_exclude(self, which='exclude'): + """Clear the exclude list.""" + self._init() + setattr(self.config, which + "_list", []) + self._exclude_regex_stale() + + def exclude(self, regex, which='exclude'): + """Exclude source lines from execution consideration. + + A number of lists of regular expressions are maintained. Each list + selects lines that are treated differently during reporting. + + `which` determines which list is modified. The "exclude" list selects + lines that are not considered executable at all. The "partial" list + indicates lines with branches that are not taken. + + `regex` is a regular expression. The regex is added to the specified + list. If any of the regexes in the list is found in a line, the line + is marked for special treatment during reporting. + + """ + self._init() + excl_list = getattr(self.config, which + "_list") + excl_list.append(regex) + self._exclude_regex_stale() + + def _exclude_regex_stale(self): + """Drop all the compiled exclusion regexes, a list was modified.""" + self._exclude_re.clear() + + def _exclude_regex(self, which): + """Return a compiled regex for the given exclusion list.""" + if which not in self._exclude_re: + excl_list = getattr(self.config, which + "_list") + self._exclude_re[which] = join_regex(excl_list) + return self._exclude_re[which] + + def get_exclude_list(self, which='exclude'): + """Return a list of excluded regex patterns. + + `which` indicates which list is desired. See :meth:`exclude` for the + lists that are available, and their meaning. + + """ + self._init() + return getattr(self.config, which + "_list") + + def save(self): + """Save the collected coverage data to the data file.""" + data = self.get_data() + data.write() + + def combine(self, data_paths=None, strict=False, keep=False): + """Combine together a number of similarly-named coverage data files. + + All coverage data files whose name starts with `data_file` (from the + coverage() constructor) will be read, and combined together into the + current measurements. + + `data_paths` is a list of files or directories from which data should + be combined. If no list is passed, then the data files from the + directory indicated by the current data file (probably the current + directory) will be combined. + + If `strict` is true, then it is an error to attempt to combine when + there are no data files to combine. + + If `keep` is true, then original input data files won't be deleted. + + .. versionadded:: 4.0 + The `data_paths` parameter. + + .. versionadded:: 4.3 + The `strict` parameter. + + .. versionadded: 5.5 + The `keep` parameter. + """ + self._init() + self._init_data(suffix=None) + self._post_init() + self.get_data() + + aliases = None + if self.config.paths: + aliases = PathAliases() + for paths in self.config.paths.values(): + result = paths[0] + for pattern in paths[1:]: + aliases.add(pattern, result) + + combine_parallel_data( + self._data, + aliases=aliases, + data_paths=data_paths, + strict=strict, + keep=keep, + ) + + def get_data(self): + """Get the collected data. + + Also warn about various problems collecting data. + + Returns a :class:`coverage.CoverageData`, the collected coverage data. + + .. versionadded:: 4.0 + + """ + self._init() + self._init_data(suffix=None) + self._post_init() + + for plugin in self._plugins: + if not plugin._coverage_enabled: + self._collector.plugin_was_disabled(plugin) + + if self._collector and self._collector.flush_data(): + self._post_save_work() + + return self._data + + def _post_save_work(self): + """After saving data, look for warnings, post-work, etc. + + Warn about things that should have happened but didn't. + Look for unexecuted files. + + """ + # If there are still entries in the source_pkgs_unmatched list, + # then we never encountered those packages. + if self._warn_unimported_source: + self._inorout.warn_unimported_source() + + # Find out if we got any data. + if not self._data and self._warn_no_data: + self._warn("No data was collected.", slug="no-data-collected") + + # Touch all the files that could have executed, so that we can + # mark completely unexecuted files as 0% covered. + if self._data is not None: + file_paths = collections.defaultdict(list) + for file_path, plugin_name in self._inorout.find_possibly_unexecuted_files(): + file_path = self._file_mapper(file_path) + file_paths[plugin_name].append(file_path) + for plugin_name, paths in file_paths.items(): + self._data.touch_files(paths, plugin_name) + + if self.config.note: + self._warn("The '[run] note' setting is no longer supported.") + + # Backward compatibility with version 1. + def analysis(self, morf): + """Like `analysis2` but doesn't return excluded line numbers.""" + f, s, _, m, mf = self.analysis2(morf) + return f, s, m, mf + + def analysis2(self, morf): + """Analyze a module. + + `morf` is a module or a file name. It will be analyzed to determine + its coverage statistics. The return value is a 5-tuple: + + * The file name for the module. + * A list of line numbers of executable statements. + * A list of line numbers of excluded statements. + * A list of line numbers of statements not run (missing from + execution). + * A readable formatted string of the missing line numbers. + + The analysis uses the source file itself and the current measured + coverage data. + + """ + analysis = self._analyze(morf) + return ( + analysis.filename, + sorted(analysis.statements), + sorted(analysis.excluded), + sorted(analysis.missing), + analysis.missing_formatted(), + ) + + def _analyze(self, it): + """Analyze a single morf or code unit. + + Returns an `Analysis` object. + + """ + # All reporting comes through here, so do reporting initialization. + self._init() + Numbers.set_precision(self.config.precision) + self._post_init() + + data = self.get_data() + if not isinstance(it, FileReporter): + it = self._get_file_reporter(it) + + return Analysis(data, it, self._file_mapper) + + def _get_file_reporter(self, morf): + """Get a FileReporter for a module or file name.""" + plugin = None + file_reporter = "python" + + if isinstance(morf, string_class): + mapped_morf = self._file_mapper(morf) + plugin_name = self._data.file_tracer(mapped_morf) + if plugin_name: + plugin = self._plugins.get(plugin_name) + + if plugin: + file_reporter = plugin.file_reporter(mapped_morf) + if file_reporter is None: + raise CoverageException( + "Plugin %r did not provide a file reporter for %r." % ( + plugin._coverage_plugin_name, morf + ) + ) + + if file_reporter == "python": + file_reporter = PythonFileReporter(morf, self) + + return file_reporter + + def _get_file_reporters(self, morfs=None): + """Get a list of FileReporters for a list of modules or file names. + + For each module or file name in `morfs`, find a FileReporter. Return + the list of FileReporters. + + If `morfs` is a single module or file name, this returns a list of one + FileReporter. If `morfs` is empty or None, then the list of all files + measured is used to find the FileReporters. + + """ + if not morfs: + morfs = self._data.measured_files() + + # Be sure we have a collection. + if not isinstance(morfs, (list, tuple, set)): + morfs = [morfs] + + file_reporters = [self._get_file_reporter(morf) for morf in morfs] + return file_reporters + + def report( + self, morfs=None, show_missing=None, ignore_errors=None, + file=None, omit=None, include=None, skip_covered=None, + contexts=None, skip_empty=None, precision=None, sort=None + ): + """Write a textual summary report to `file`. + + Each module in `morfs` is listed, with counts of statements, executed + statements, missing statements, and a list of lines missed. + + If `show_missing` is true, then details of which lines or branches are + missing will be included in the report. If `ignore_errors` is true, + then a failure while reporting a single file will not stop the entire + report. + + `file` is a file-like object, suitable for writing. + + `include` is a list of file name patterns. Files that match will be + included in the report. Files matching `omit` will not be included in + the report. + + If `skip_covered` is true, don't report on files with 100% coverage. + + If `skip_empty` is true, don't report on empty files (those that have + no statements). + + `contexts` is a list of regular expressions. Only data from + :ref:`dynamic contexts ` that match one of those + expressions (using :func:`re.search `) will be + included in the report. + + `precision` is the number of digits to display after the decimal + point for percentages. + + All of the arguments default to the settings read from the + :ref:`configuration file `. + + Returns a float, the total percentage covered. + + .. versionadded:: 4.0 + The `skip_covered` parameter. + + .. versionadded:: 5.0 + The `contexts` and `skip_empty` parameters. + + .. versionadded:: 5.2 + The `precision` parameter. + + """ + with override_config( + self, + ignore_errors=ignore_errors, report_omit=omit, report_include=include, + show_missing=show_missing, skip_covered=skip_covered, + report_contexts=contexts, skip_empty=skip_empty, precision=precision, + sort=sort + ): + reporter = SummaryReporter(self) + return reporter.report(morfs, outfile=file) + + def annotate( + self, morfs=None, directory=None, ignore_errors=None, + omit=None, include=None, contexts=None, + ): + """Annotate a list of modules. + + Each module in `morfs` is annotated. The source is written to a new + file, named with a ",cover" suffix, with each line prefixed with a + marker to indicate the coverage of the line. Covered lines have ">", + excluded lines have "-", and missing lines have "!". + + See :meth:`report` for other arguments. + + """ + with override_config(self, + ignore_errors=ignore_errors, report_omit=omit, + report_include=include, report_contexts=contexts, + ): + reporter = AnnotateReporter(self) + reporter.report(morfs, directory=directory) + + def html_report( + self, morfs=None, directory=None, ignore_errors=None, + omit=None, include=None, extra_css=None, title=None, + skip_covered=None, show_contexts=None, contexts=None, + skip_empty=None, precision=None, + ): + """Generate an HTML report. + + The HTML is written to `directory`. The file "index.html" is the + overview starting point, with links to more detailed pages for + individual modules. + + `extra_css` is a path to a file of other CSS to apply on the page. + It will be copied into the HTML directory. + + `title` is a text string (not HTML) to use as the title of the HTML + report. + + See :meth:`report` for other arguments. + + Returns a float, the total percentage covered. + + .. note:: + The HTML report files are generated incrementally based on the + source files and coverage results. If you modify the report files, + the changes will not be considered. You should be careful about + changing the files in the report folder. + + """ + with override_config(self, + ignore_errors=ignore_errors, report_omit=omit, report_include=include, + html_dir=directory, extra_css=extra_css, html_title=title, + html_skip_covered=skip_covered, show_contexts=show_contexts, report_contexts=contexts, + html_skip_empty=skip_empty, precision=precision, + ): + reporter = HtmlReporter(self) + return reporter.report(morfs) + + def xml_report( + self, morfs=None, outfile=None, ignore_errors=None, + omit=None, include=None, contexts=None, skip_empty=None, + ): + """Generate an XML report of coverage results. + + The report is compatible with Cobertura reports. + + Each module in `morfs` is included in the report. `outfile` is the + path to write the file to, "-" will write to stdout. + + See :meth:`report` for other arguments. + + Returns a float, the total percentage covered. + + """ + with override_config(self, + ignore_errors=ignore_errors, report_omit=omit, report_include=include, + xml_output=outfile, report_contexts=contexts, skip_empty=skip_empty, + ): + return render_report(self.config.xml_output, XmlReporter(self), morfs) + + def json_report( + self, morfs=None, outfile=None, ignore_errors=None, + omit=None, include=None, contexts=None, pretty_print=None, + show_contexts=None + ): + """Generate a JSON report of coverage results. + + Each module in `morfs` is included in the report. `outfile` is the + path to write the file to, "-" will write to stdout. + + See :meth:`report` for other arguments. + + Returns a float, the total percentage covered. + + .. versionadded:: 5.0 + + """ + with override_config(self, + ignore_errors=ignore_errors, report_omit=omit, report_include=include, + json_output=outfile, report_contexts=contexts, json_pretty_print=pretty_print, + json_show_contexts=show_contexts + ): + return render_report(self.config.json_output, JsonReporter(self), morfs) + + def sys_info(self): + """Return a list of (key, value) pairs showing internal information.""" + + import coverage as covmod + + self._init() + self._post_init() + + def plugin_info(plugins): + """Make an entry for the sys_info from a list of plug-ins.""" + entries = [] + for plugin in plugins: + entry = plugin._coverage_plugin_name + if not plugin._coverage_enabled: + entry += " (disabled)" + entries.append(entry) + return entries + + info = [ + ('version', covmod.__version__), + ('coverage', covmod.__file__), + ('tracer', self._collector.tracer_name() if self._collector else "-none-"), + ('CTracer', 'available' if CTracer else "unavailable"), + ('plugins.file_tracers', plugin_info(self._plugins.file_tracers)), + ('plugins.configurers', plugin_info(self._plugins.configurers)), + ('plugins.context_switchers', plugin_info(self._plugins.context_switchers)), + ('configs_attempted', self.config.attempted_config_files), + ('configs_read', self.config.config_files_read), + ('config_file', self.config.config_file), + ('config_contents', + repr(self.config._config_contents) + if self.config._config_contents + else '-none-' + ), + ('data_file', self._data.data_filename() if self._data is not None else "-none-"), + ('python', sys.version.replace('\n', '')), + ('platform', platform.platform()), + ('implementation', platform.python_implementation()), + ('executable', sys.executable), + ('def_encoding', sys.getdefaultencoding()), + ('fs_encoding', sys.getfilesystemencoding()), + ('pid', os.getpid()), + ('cwd', os.getcwd()), + ('path', sys.path), + ('environment', sorted( + ("%s = %s" % (k, v)) + for k, v in iitems(os.environ) + if any(slug in k for slug in ("COV", "PY")) + )), + ('command_line', " ".join(getattr(sys, 'argv', ['-none-']))), + ] + + if self._inorout: + info.extend(self._inorout.sys_info()) + + info.extend(CoverageData.sys_info()) + + return info + + +# Mega debugging... +# $set_env.py: COVERAGE_DEBUG_CALLS - Lots and lots of output about calls to Coverage. +if int(os.environ.get("COVERAGE_DEBUG_CALLS", 0)): # pragma: debugging + from coverage.debug import decorate_methods, show_calls + + Coverage = decorate_methods(show_calls(show_args=True), butnot=['get_data'])(Coverage) + + +def process_startup(): + """Call this at Python start-up to perhaps measure coverage. + + If the environment variable COVERAGE_PROCESS_START is defined, coverage + measurement is started. The value of the variable is the config file + to use. + + There are two ways to configure your Python installation to invoke this + function when Python starts: + + #. Create or append to sitecustomize.py to add these lines:: + + import coverage + coverage.process_startup() + + #. Create a .pth file in your Python installation containing:: + + import coverage; coverage.process_startup() + + Returns the :class:`Coverage` instance that was started, or None if it was + not started by this call. + + """ + cps = os.environ.get("COVERAGE_PROCESS_START") + if not cps: + # No request for coverage, nothing to do. + return None + + # This function can be called more than once in a process. This happens + # because some virtualenv configurations make the same directory visible + # twice in sys.path. This means that the .pth file will be found twice, + # and executed twice, executing this function twice. We set a global + # flag (an attribute on this function) to indicate that coverage.py has + # already been started, so we can avoid doing it twice. + # + # https://github.com/nedbat/coveragepy/issues/340 has more details. + + if hasattr(process_startup, "coverage"): + # We've annotated this function before, so we must have already + # started coverage.py in this process. Nothing to do. + return None + + cov = Coverage(config_file=cps) + process_startup.coverage = cov + cov._warn_no_data = False + cov._warn_unimported_source = False + cov._warn_preimported_source = False + cov._auto_save = True + cov.start() + + return cov + + +def _prevent_sub_process_measurement(): + """Stop any subprocess auto-measurement from writing data.""" + auto_created_coverage = getattr(process_startup, "coverage", None) + if auto_created_coverage is not None: + auto_created_coverage._auto_save = False diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/data.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/data.py new file mode 100644 index 000000000..5dd1dfe3f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/data.py @@ -0,0 +1,125 @@ +# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 +# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt + +"""Coverage data for coverage.py. + +This file had the 4.x JSON data support, which is now gone. This file still +has storage-agnostic helpers, and is kept to avoid changing too many imports. +CoverageData is now defined in sqldata.py, and imported here to keep the +imports working. + +""" + +import glob +import os.path + +from coverage.misc import CoverageException, file_be_gone +from coverage.sqldata import CoverageData + + +def line_counts(data, fullpath=False): + """Return a dict summarizing the line coverage data. + + Keys are based on the file names, and values are the number of executed + lines. If `fullpath` is true, then the keys are the full pathnames of + the files, otherwise they are the basenames of the files. + + Returns a dict mapping file names to counts of lines. + + """ + summ = {} + if fullpath: + filename_fn = lambda f: f + else: + filename_fn = os.path.basename + for filename in data.measured_files(): + summ[filename_fn(filename)] = len(data.lines(filename)) + return summ + + +def add_data_to_hash(data, filename, hasher): + """Contribute `filename`'s data to the `hasher`. + + `hasher` is a `coverage.misc.Hasher` instance to be updated with + the file's data. It should only get the results data, not the run + data. + + """ + if data.has_arcs(): + hasher.update(sorted(data.arcs(filename) or [])) + else: + hasher.update(sorted(data.lines(filename) or [])) + hasher.update(data.file_tracer(filename)) + + +def combine_parallel_data(data, aliases=None, data_paths=None, strict=False, keep=False): + """Combine a number of data files together. + + Treat `data.filename` as a file prefix, and combine the data from all + of the data files starting with that prefix plus a dot. + + If `aliases` is provided, it's a `PathAliases` object that is used to + re-map paths to match the local machine's. + + If `data_paths` is provided, it is a list of directories or files to + combine. Directories are searched for files that start with + `data.filename` plus dot as a prefix, and those files are combined. + + If `data_paths` is not provided, then the directory portion of + `data.filename` is used as the directory to search for data files. + + Unless `keep` is True every data file found and combined is then deleted from disk. If a file + cannot be read, a warning will be issued, and the file will not be + deleted. + + If `strict` is true, and no files are found to combine, an error is + raised. + + """ + # Because of the os.path.abspath in the constructor, data_dir will + # never be an empty string. + data_dir, local = os.path.split(data.base_filename()) + localdot = local + '.*' + + data_paths = data_paths or [data_dir] + files_to_combine = [] + for p in data_paths: + if os.path.isfile(p): + files_to_combine.append(os.path.abspath(p)) + elif os.path.isdir(p): + pattern = os.path.join(os.path.abspath(p), localdot) + files_to_combine.extend(glob.glob(pattern)) + else: + raise CoverageException("Couldn't combine from non-existent path '%s'" % (p,)) + + if strict and not files_to_combine: + raise CoverageException("No data to combine") + + files_combined = 0 + for f in files_to_combine: + if f == data.data_filename(): + # Sometimes we are combining into a file which is one of the + # parallel files. Skip that file. + if data._debug.should('dataio'): + data._debug.write("Skipping combining ourself: %r" % (f,)) + continue + if data._debug.should('dataio'): + data._debug.write("Combining data file %r" % (f,)) + try: + new_data = CoverageData(f, debug=data._debug) + new_data.read() + except CoverageException as exc: + if data._warn: + # The CoverageException has the file name in it, so just + # use the message as the warning. + data._warn(str(exc)) + else: + data.update(new_data, aliases=aliases) + files_combined += 1 + if not keep: + if data._debug.should('dataio'): + data._debug.write("Deleting combined data file %r" % (f,)) + file_be_gone(f) + + if strict and not files_combined: + raise CoverageException("No usable data files") diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/debug.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/debug.py new file mode 100644 index 000000000..194f16f50 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/debug.py @@ -0,0 +1,406 @@ +# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 +# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt + +"""Control of and utilities for debugging.""" + +import contextlib +import functools +import inspect +import itertools +import os +import pprint +import sys +try: + import _thread +except ImportError: + import thread as _thread + +from coverage.backward import reprlib, StringIO +from coverage.misc import isolate_module + +os = isolate_module(os) + + +# When debugging, it can be helpful to force some options, especially when +# debugging the configuration mechanisms you usually use to control debugging! +# This is a list of forced debugging options. +FORCED_DEBUG = [] +FORCED_DEBUG_FILE = None + + +class DebugControl(object): + """Control and output for debugging.""" + + show_repr_attr = False # For SimpleReprMixin + + def __init__(self, options, output): + """Configure the options and output file for debugging.""" + self.options = list(options) + FORCED_DEBUG + self.suppress_callers = False + + filters = [] + if self.should('pid'): + filters.append(add_pid_and_tid) + self.output = DebugOutputFile.get_one( + output, + show_process=self.should('process'), + filters=filters, + ) + self.raw_output = self.output.outfile + + def __repr__(self): + return "" % (self.options, self.raw_output) + + def should(self, option): + """Decide whether to output debug information in category `option`.""" + if option == "callers" and self.suppress_callers: + return False + return (option in self.options) + + @contextlib.contextmanager + def without_callers(self): + """A context manager to prevent call stacks from being logged.""" + old = self.suppress_callers + self.suppress_callers = True + try: + yield + finally: + self.suppress_callers = old + + def write(self, msg): + """Write a line of debug output. + + `msg` is the line to write. A newline will be appended. + + """ + self.output.write(msg+"\n") + if self.should('self'): + caller_self = inspect.stack()[1][0].f_locals.get('self') + if caller_self is not None: + self.output.write("self: {!r}\n".format(caller_self)) + if self.should('callers'): + dump_stack_frames(out=self.output, skip=1) + self.output.flush() + + +class DebugControlString(DebugControl): + """A `DebugControl` that writes to a StringIO, for testing.""" + def __init__(self, options): + super(DebugControlString, self).__init__(options, StringIO()) + + def get_output(self): + """Get the output text from the `DebugControl`.""" + return self.raw_output.getvalue() + + +class NoDebugging(object): + """A replacement for DebugControl that will never try to do anything.""" + def should(self, option): # pylint: disable=unused-argument + """Should we write debug messages? Never.""" + return False + + +def info_header(label): + """Make a nice header string.""" + return "--{:-<60s}".format(" "+label+" ") + + +def info_formatter(info): + """Produce a sequence of formatted lines from info. + + `info` is a sequence of pairs (label, data). The produced lines are + nicely formatted, ready to print. + + """ + info = list(info) + if not info: + return + label_len = 30 + assert all(len(l) < label_len for l, _ in info) + for label, data in info: + if data == []: + data = "-none-" + if isinstance(data, (list, set, tuple)): + prefix = "%*s:" % (label_len, label) + for e in data: + yield "%*s %s" % (label_len+1, prefix, e) + prefix = "" + else: + yield "%*s: %s" % (label_len, label, data) + + +def write_formatted_info(writer, header, info): + """Write a sequence of (label,data) pairs nicely.""" + writer.write(info_header(header)) + for line in info_formatter(info): + writer.write(" %s" % line) + + +def short_stack(limit=None, skip=0): + """Return a string summarizing the call stack. + + The string is multi-line, with one line per stack frame. Each line shows + the function name, the file name, and the line number: + + ... + start_import_stop : /Users/ned/coverage/trunk/tests/coveragetest.py @95 + import_local_file : /Users/ned/coverage/trunk/tests/coveragetest.py @81 + import_local_file : /Users/ned/coverage/trunk/coverage/backward.py @159 + ... + + `limit` is the number of frames to include, defaulting to all of them. + + `skip` is the number of frames to skip, so that debugging functions can + call this and not be included in the result. + + """ + stack = inspect.stack()[limit:skip:-1] + return "\n".join("%30s : %s:%d" % (t[3], t[1], t[2]) for t in stack) + + +def dump_stack_frames(limit=None, out=None, skip=0): + """Print a summary of the stack to stdout, or someplace else.""" + out = out or sys.stdout + out.write(short_stack(limit=limit, skip=skip+1)) + out.write("\n") + + +def clipped_repr(text, numchars=50): + """`repr(text)`, but limited to `numchars`.""" + r = reprlib.Repr() + r.maxstring = numchars + return r.repr(text) + + +def short_id(id64): + """Given a 64-bit id, make a shorter 16-bit one.""" + id16 = 0 + for offset in range(0, 64, 16): + id16 ^= id64 >> offset + return id16 & 0xFFFF + + +def add_pid_and_tid(text): + """A filter to add pid and tid to debug messages.""" + # Thread ids are useful, but too long. Make a shorter one. + tid = "{:04x}".format(short_id(_thread.get_ident())) + text = "{:5d}.{}: {}".format(os.getpid(), tid, text) + return text + + +class SimpleReprMixin(object): + """A mixin implementing a simple __repr__.""" + simple_repr_ignore = ['simple_repr_ignore', '$coverage.object_id'] + + def __repr__(self): + show_attrs = ( + (k, v) for k, v in self.__dict__.items() + if getattr(v, "show_repr_attr", True) + and not callable(v) + and k not in self.simple_repr_ignore + ) + return "<{klass} @0x{id:x} {attrs}>".format( + klass=self.__class__.__name__, + id=id(self), + attrs=" ".join("{}={!r}".format(k, v) for k, v in show_attrs), + ) + + +def simplify(v): # pragma: debugging + """Turn things which are nearly dict/list/etc into dict/list/etc.""" + if isinstance(v, dict): + return {k:simplify(vv) for k, vv in v.items()} + elif isinstance(v, (list, tuple)): + return type(v)(simplify(vv) for vv in v) + elif hasattr(v, "__dict__"): + return simplify({'.'+k: v for k, v in v.__dict__.items()}) + else: + return v + + +def pp(v): # pragma: debugging + """Debug helper to pretty-print data, including SimpleNamespace objects.""" + # Might not be needed in 3.9+ + pprint.pprint(simplify(v)) + + +def filter_text(text, filters): + """Run `text` through a series of filters. + + `filters` is a list of functions. Each takes a string and returns a + string. Each is run in turn. + + Returns: the final string that results after all of the filters have + run. + + """ + clean_text = text.rstrip() + ending = text[len(clean_text):] + text = clean_text + for fn in filters: + lines = [] + for line in text.splitlines(): + lines.extend(fn(line).splitlines()) + text = "\n".join(lines) + return text + ending + + +class CwdTracker(object): # pragma: debugging + """A class to add cwd info to debug messages.""" + def __init__(self): + self.cwd = None + + def filter(self, text): + """Add a cwd message for each new cwd.""" + cwd = os.getcwd() + if cwd != self.cwd: + text = "cwd is now {!r}\n".format(cwd) + text + self.cwd = cwd + return text + + +class DebugOutputFile(object): # pragma: debugging + """A file-like object that includes pid and cwd information.""" + def __init__(self, outfile, show_process, filters): + self.outfile = outfile + self.show_process = show_process + self.filters = list(filters) + + if self.show_process: + self.filters.insert(0, CwdTracker().filter) + self.write("New process: executable: %r\n" % (sys.executable,)) + self.write("New process: cmd: %r\n" % (getattr(sys, 'argv', None),)) + if hasattr(os, 'getppid'): + self.write("New process: pid: %r, parent pid: %r\n" % (os.getpid(), os.getppid())) + + SYS_MOD_NAME = '$coverage.debug.DebugOutputFile.the_one' + + @classmethod + def get_one(cls, fileobj=None, show_process=True, filters=(), interim=False): + """Get a DebugOutputFile. + + If `fileobj` is provided, then a new DebugOutputFile is made with it. + + If `fileobj` isn't provided, then a file is chosen + (COVERAGE_DEBUG_FILE, or stderr), and a process-wide singleton + DebugOutputFile is made. + + `show_process` controls whether the debug file adds process-level + information, and filters is a list of other message filters to apply. + + `filters` are the text filters to apply to the stream to annotate with + pids, etc. + + If `interim` is true, then a future `get_one` can replace this one. + + """ + if fileobj is not None: + # Make DebugOutputFile around the fileobj passed. + return cls(fileobj, show_process, filters) + + # Because of the way igor.py deletes and re-imports modules, + # this class can be defined more than once. But we really want + # a process-wide singleton. So stash it in sys.modules instead of + # on a class attribute. Yes, this is aggressively gross. + the_one, is_interim = sys.modules.get(cls.SYS_MOD_NAME, (None, True)) + if the_one is None or is_interim: + if fileobj is None: + debug_file_name = os.environ.get("COVERAGE_DEBUG_FILE", FORCED_DEBUG_FILE) + if debug_file_name: + fileobj = open(debug_file_name, "a") + else: + fileobj = sys.stderr + the_one = cls(fileobj, show_process, filters) + sys.modules[cls.SYS_MOD_NAME] = (the_one, interim) + return the_one + + def write(self, text): + """Just like file.write, but filter through all our filters.""" + self.outfile.write(filter_text(text, self.filters)) + self.outfile.flush() + + def flush(self): + """Flush our file.""" + self.outfile.flush() + + +def log(msg, stack=False): # pragma: debugging + """Write a log message as forcefully as possible.""" + out = DebugOutputFile.get_one(interim=True) + out.write(msg+"\n") + if stack: + dump_stack_frames(out=out, skip=1) + + +def decorate_methods(decorator, butnot=(), private=False): # pragma: debugging + """A class decorator to apply a decorator to methods.""" + def _decorator(cls): + for name, meth in inspect.getmembers(cls, inspect.isroutine): + if name not in cls.__dict__: + continue + if name != "__init__": + if not private and name.startswith("_"): + continue + if name in butnot: + continue + setattr(cls, name, decorator(meth)) + return cls + return _decorator + + +def break_in_pudb(func): # pragma: debugging + """A function decorator to stop in the debugger for each call.""" + @functools.wraps(func) + def _wrapper(*args, **kwargs): + import pudb + sys.stdout = sys.__stdout__ + pudb.set_trace() + return func(*args, **kwargs) + return _wrapper + + +OBJ_IDS = itertools.count() +CALLS = itertools.count() +OBJ_ID_ATTR = "$coverage.object_id" + +def show_calls(show_args=True, show_stack=False, show_return=False): # pragma: debugging + """A method decorator to debug-log each call to the function.""" + def _decorator(func): + @functools.wraps(func) + def _wrapper(self, *args, **kwargs): + oid = getattr(self, OBJ_ID_ATTR, None) + if oid is None: + oid = "{:08d} {:04d}".format(os.getpid(), next(OBJ_IDS)) + setattr(self, OBJ_ID_ATTR, oid) + extra = "" + if show_args: + eargs = ", ".join(map(repr, args)) + ekwargs = ", ".join("{}={!r}".format(*item) for item in kwargs.items()) + extra += "(" + extra += eargs + if eargs and ekwargs: + extra += ", " + extra += ekwargs + extra += ")" + if show_stack: + extra += " @ " + extra += "; ".join(_clean_stack_line(l) for l in short_stack().splitlines()) + callid = next(CALLS) + msg = "{} {:04d} {}{}\n".format(oid, callid, func.__name__, extra) + DebugOutputFile.get_one(interim=True).write(msg) + ret = func(self, *args, **kwargs) + if show_return: + msg = "{} {:04d} {} return {!r}\n".format(oid, callid, func.__name__, ret) + DebugOutputFile.get_one(interim=True).write(msg) + return ret + return _wrapper + return _decorator + + +def _clean_stack_line(s): # pragma: debugging + """Simplify some paths in a stack trace, for compactness.""" + s = s.strip() + s = s.replace(os.path.dirname(__file__) + '/', '') + s = s.replace(os.path.dirname(os.__file__) + '/', '') + s = s.replace(sys.prefix + '/', '') + return s diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/disposition.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/disposition.py new file mode 100644 index 000000000..9b9a997d8 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/disposition.py @@ -0,0 +1,37 @@ +# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 +# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt + +"""Simple value objects for tracking what to do with files.""" + + +class FileDisposition(object): + """A simple value type for recording what to do with a file.""" + pass + + +# FileDisposition "methods": FileDisposition is a pure value object, so it can +# be implemented in either C or Python. Acting on them is done with these +# functions. + +def disposition_init(cls, original_filename): + """Construct and initialize a new FileDisposition object.""" + disp = cls() + disp.original_filename = original_filename + disp.canonical_filename = original_filename + disp.source_filename = None + disp.trace = False + disp.reason = "" + disp.file_tracer = None + disp.has_dynamic_filename = False + return disp + + +def disposition_debug_msg(disp): + """Make a nice debug message of what the FileDisposition is doing.""" + if disp.trace: + msg = "Tracing %r" % (disp.original_filename,) + if disp.file_tracer: + msg += ": will be traced by %r" % disp.file_tracer + else: + msg = "Not tracing %r: %s" % (disp.original_filename, disp.reason) + return msg diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/env.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/env.py new file mode 100644 index 000000000..ea78a5be8 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/env.py @@ -0,0 +1,130 @@ +# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 +# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt + +"""Determine facts about the environment.""" + +import os +import platform +import sys + +# Operating systems. +WINDOWS = sys.platform == "win32" +LINUX = sys.platform.startswith("linux") + +# Python implementations. +CPYTHON = (platform.python_implementation() == "CPython") +PYPY = (platform.python_implementation() == "PyPy") +JYTHON = (platform.python_implementation() == "Jython") +IRONPYTHON = (platform.python_implementation() == "IronPython") + +# Python versions. We amend version_info with one more value, a zero if an +# official version, or 1 if built from source beyond an official version. +PYVERSION = sys.version_info + (int(platform.python_version()[-1] == "+"),) +PY2 = PYVERSION < (3, 0) +PY3 = PYVERSION >= (3, 0) + +if PYPY: + PYPYVERSION = sys.pypy_version_info + +PYPY2 = PYPY and PY2 +PYPY3 = PYPY and PY3 + +# Python behavior. +class PYBEHAVIOR(object): + """Flags indicating this Python's behavior.""" + + pep626 = CPYTHON and (PYVERSION > (3, 10, 0, 'alpha', 4)) + + # Is "if __debug__" optimized away? + if PYPY3: + optimize_if_debug = True + elif PYPY2: + optimize_if_debug = False + else: + optimize_if_debug = not pep626 + + # Is "if not __debug__" optimized away? + optimize_if_not_debug = (not PYPY) and (PYVERSION >= (3, 7, 0, 'alpha', 4)) + if pep626: + optimize_if_not_debug = False + if PYPY3: + optimize_if_not_debug = True + + # Is "if not __debug__" optimized away even better? + optimize_if_not_debug2 = (not PYPY) and (PYVERSION >= (3, 8, 0, 'beta', 1)) + if pep626: + optimize_if_not_debug2 = False + + # Do we have yield-from? + yield_from = (PYVERSION >= (3, 3)) + + # Do we have PEP 420 namespace packages? + namespaces_pep420 = (PYVERSION >= (3, 3)) + + # Do .pyc files have the source file size recorded in them? + size_in_pyc = (PYVERSION >= (3, 3)) + + # Do we have async and await syntax? + async_syntax = (PYVERSION >= (3, 5)) + + # PEP 448 defined additional unpacking generalizations + unpackings_pep448 = (PYVERSION >= (3, 5)) + + # Can co_lnotab have negative deltas? + negative_lnotab = (PYVERSION >= (3, 6)) and not (PYPY and PYPYVERSION < (7, 2)) + + # Do .pyc files conform to PEP 552? Hash-based pyc's. + hashed_pyc_pep552 = (PYVERSION >= (3, 7, 0, 'alpha', 4)) + + # Python 3.7.0b3 changed the behavior of the sys.path[0] entry for -m. It + # used to be an empty string (meaning the current directory). It changed + # to be the actual path to the current directory, so that os.chdir wouldn't + # affect the outcome. + actual_syspath0_dash_m = CPYTHON and (PYVERSION >= (3, 7, 0, 'beta', 3)) + + # 3.7 changed how functions with only docstrings are numbered. + docstring_only_function = (not PYPY) and ((3, 7, 0, 'beta', 5) <= PYVERSION <= (3, 10)) + + # When a break/continue/return statement in a try block jumps to a finally + # block, does the finally block do the break/continue/return (pre-3.8), or + # does the finally jump back to the break/continue/return (3.8) to do the + # work? + finally_jumps_back = ((3, 8) <= PYVERSION < (3, 10)) + + # When a function is decorated, does the trace function get called for the + # @-line and also the def-line (new behavior in 3.8)? Or just the @-line + # (old behavior)? + trace_decorated_def = (PYVERSION >= (3, 8)) + + # Are while-true loops optimized into absolute jumps with no loop setup? + nix_while_true = (PYVERSION >= (3, 8)) + + # Python 3.9a1 made sys.argv[0] and other reported files absolute paths. + report_absolute_files = (PYVERSION >= (3, 9)) + + # Lines after break/continue/return/raise are no longer compiled into the + # bytecode. They used to be marked as missing, now they aren't executable. + omit_after_jump = pep626 + + # PyPy has always omitted statements after return. + omit_after_return = omit_after_jump or PYPY + + # Modules used to have firstlineno equal to the line number of the first + # real line of code. Now they always start at 1. + module_firstline_1 = pep626 + + # Are "if 0:" lines (and similar) kept in the compiled code? + keep_constant_test = pep626 + +# Coverage.py specifics. + +# Are we using the C-implemented trace function? +C_TRACER = os.getenv('COVERAGE_TEST_TRACER', 'c') == 'c' + +# Are we coverage-measuring ourselves? +METACOV = os.getenv('COVERAGE_COVERAGE', '') != '' + +# Are we running our test suite? +# Even when running tests, you can use COVERAGE_TESTING=0 to disable the +# test-specific behavior like contracts. +TESTING = os.getenv('COVERAGE_TESTING', '') == 'True' diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/execfile.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/execfile.py new file mode 100644 index 000000000..29409d517 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/execfile.py @@ -0,0 +1,362 @@ +# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 +# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt + +"""Execute files of Python code.""" + +import inspect +import marshal +import os +import struct +import sys +import types + +from coverage import env +from coverage.backward import BUILTINS +from coverage.backward import PYC_MAGIC_NUMBER, imp, importlib_util_find_spec +from coverage.files import canonical_filename, python_reported_file +from coverage.misc import CoverageException, ExceptionDuringRun, NoCode, NoSource, isolate_module +from coverage.phystokens import compile_unicode +from coverage.python import get_python_source + +os = isolate_module(os) + + +class DummyLoader(object): + """A shim for the pep302 __loader__, emulating pkgutil.ImpLoader. + + Currently only implements the .fullname attribute + """ + def __init__(self, fullname, *_args): + self.fullname = fullname + + +if importlib_util_find_spec: + def find_module(modulename): + """Find the module named `modulename`. + + Returns the file path of the module, the name of the enclosing + package, and the spec. + """ + try: + spec = importlib_util_find_spec(modulename) + except ImportError as err: + raise NoSource(str(err)) + if not spec: + raise NoSource("No module named %r" % (modulename,)) + pathname = spec.origin + packagename = spec.name + if spec.submodule_search_locations: + mod_main = modulename + ".__main__" + spec = importlib_util_find_spec(mod_main) + if not spec: + raise NoSource( + "No module named %s; " + "%r is a package and cannot be directly executed" + % (mod_main, modulename) + ) + pathname = spec.origin + packagename = spec.name + packagename = packagename.rpartition(".")[0] + return pathname, packagename, spec +else: + def find_module(modulename): + """Find the module named `modulename`. + + Returns the file path of the module, the name of the enclosing + package, and None (where a spec would have been). + """ + openfile = None + glo, loc = globals(), locals() + try: + # Search for the module - inside its parent package, if any - using + # standard import mechanics. + if '.' in modulename: + packagename, name = modulename.rsplit('.', 1) + package = __import__(packagename, glo, loc, ['__path__']) + searchpath = package.__path__ + else: + packagename, name = None, modulename + searchpath = None # "top-level search" in imp.find_module() + openfile, pathname, _ = imp.find_module(name, searchpath) + + # Complain if this is a magic non-file module. + if openfile is None and pathname is None: + raise NoSource( + "module does not live in a file: %r" % modulename + ) + + # If `modulename` is actually a package, not a mere module, then we + # pretend to be Python 2.7 and try running its __main__.py script. + if openfile is None: + packagename = modulename + name = '__main__' + package = __import__(packagename, glo, loc, ['__path__']) + searchpath = package.__path__ + openfile, pathname, _ = imp.find_module(name, searchpath) + except ImportError as err: + raise NoSource(str(err)) + finally: + if openfile: + openfile.close() + + return pathname, packagename, None + + +class PyRunner(object): + """Multi-stage execution of Python code. + + This is meant to emulate real Python execution as closely as possible. + + """ + def __init__(self, args, as_module=False): + self.args = args + self.as_module = as_module + + self.arg0 = args[0] + self.package = self.modulename = self.pathname = self.loader = self.spec = None + + def prepare(self): + """Set sys.path properly. + + This needs to happen before any importing, and without importing anything. + """ + if self.as_module: + if env.PYBEHAVIOR.actual_syspath0_dash_m: + path0 = os.getcwd() + else: + path0 = "" + elif os.path.isdir(self.arg0): + # Running a directory means running the __main__.py file in that + # directory. + path0 = self.arg0 + else: + path0 = os.path.abspath(os.path.dirname(self.arg0)) + + if os.path.isdir(sys.path[0]): + # sys.path fakery. If we are being run as a command, then sys.path[0] + # is the directory of the "coverage" script. If this is so, replace + # sys.path[0] with the directory of the file we're running, or the + # current directory when running modules. If it isn't so, then we + # don't know what's going on, and just leave it alone. + top_file = inspect.stack()[-1][0].f_code.co_filename + sys_path_0_abs = os.path.abspath(sys.path[0]) + top_file_dir_abs = os.path.abspath(os.path.dirname(top_file)) + sys_path_0_abs = canonical_filename(sys_path_0_abs) + top_file_dir_abs = canonical_filename(top_file_dir_abs) + if sys_path_0_abs != top_file_dir_abs: + path0 = None + + else: + # sys.path[0] is a file. Is the next entry the directory containing + # that file? + if sys.path[1] == os.path.dirname(sys.path[0]): + # Can it be right to always remove that? + del sys.path[1] + + if path0 is not None: + sys.path[0] = python_reported_file(path0) + + def _prepare2(self): + """Do more preparation to run Python code. + + Includes finding the module to run and adjusting sys.argv[0]. + This method is allowed to import code. + + """ + if self.as_module: + self.modulename = self.arg0 + pathname, self.package, self.spec = find_module(self.modulename) + if self.spec is not None: + self.modulename = self.spec.name + self.loader = DummyLoader(self.modulename) + self.pathname = os.path.abspath(pathname) + self.args[0] = self.arg0 = self.pathname + elif os.path.isdir(self.arg0): + # Running a directory means running the __main__.py file in that + # directory. + for ext in [".py", ".pyc", ".pyo"]: + try_filename = os.path.join(self.arg0, "__main__" + ext) + if os.path.exists(try_filename): + self.arg0 = try_filename + break + else: + raise NoSource("Can't find '__main__' module in '%s'" % self.arg0) + + if env.PY2: + self.arg0 = os.path.abspath(self.arg0) + + # Make a spec. I don't know if this is the right way to do it. + try: + import importlib.machinery + except ImportError: + pass + else: + try_filename = python_reported_file(try_filename) + self.spec = importlib.machinery.ModuleSpec("__main__", None, origin=try_filename) + self.spec.has_location = True + self.package = "" + self.loader = DummyLoader("__main__") + else: + if env.PY3: + self.loader = DummyLoader("__main__") + + self.arg0 = python_reported_file(self.arg0) + + def run(self): + """Run the Python code!""" + + self._prepare2() + + # Create a module to serve as __main__ + main_mod = types.ModuleType('__main__') + + from_pyc = self.arg0.endswith((".pyc", ".pyo")) + main_mod.__file__ = self.arg0 + if from_pyc: + main_mod.__file__ = main_mod.__file__[:-1] + if self.package is not None: + main_mod.__package__ = self.package + main_mod.__loader__ = self.loader + if self.spec is not None: + main_mod.__spec__ = self.spec + + main_mod.__builtins__ = BUILTINS + + sys.modules['__main__'] = main_mod + + # Set sys.argv properly. + sys.argv = self.args + + try: + # Make a code object somehow. + if from_pyc: + code = make_code_from_pyc(self.arg0) + else: + code = make_code_from_py(self.arg0) + except CoverageException: + raise + except Exception as exc: + msg = "Couldn't run '{filename}' as Python code: {exc.__class__.__name__}: {exc}" + raise CoverageException(msg.format(filename=self.arg0, exc=exc)) + + # Execute the code object. + # Return to the original directory in case the test code exits in + # a non-existent directory. + cwd = os.getcwd() + try: + exec(code, main_mod.__dict__) + except SystemExit: # pylint: disable=try-except-raise + # The user called sys.exit(). Just pass it along to the upper + # layers, where it will be handled. + raise + except Exception: + # Something went wrong while executing the user code. + # Get the exc_info, and pack them into an exception that we can + # throw up to the outer loop. We peel one layer off the traceback + # so that the coverage.py code doesn't appear in the final printed + # traceback. + typ, err, tb = sys.exc_info() + + # PyPy3 weirdness. If I don't access __context__, then somehow it + # is non-None when the exception is reported at the upper layer, + # and a nested exception is shown to the user. This getattr fixes + # it somehow? https://bitbucket.org/pypy/pypy/issue/1903 + getattr(err, '__context__', None) + + # Call the excepthook. + try: + if hasattr(err, "__traceback__"): + err.__traceback__ = err.__traceback__.tb_next + sys.excepthook(typ, err, tb.tb_next) + except SystemExit: # pylint: disable=try-except-raise + raise + except Exception: + # Getting the output right in the case of excepthook + # shenanigans is kind of involved. + sys.stderr.write("Error in sys.excepthook:\n") + typ2, err2, tb2 = sys.exc_info() + err2.__suppress_context__ = True + if hasattr(err2, "__traceback__"): + err2.__traceback__ = err2.__traceback__.tb_next + sys.__excepthook__(typ2, err2, tb2.tb_next) + sys.stderr.write("\nOriginal exception was:\n") + raise ExceptionDuringRun(typ, err, tb.tb_next) + else: + sys.exit(1) + finally: + os.chdir(cwd) + + +def run_python_module(args): + """Run a Python module, as though with ``python -m name args...``. + + `args` is the argument array to present as sys.argv, including the first + element naming the module being executed. + + This is a helper for tests, to encapsulate how to use PyRunner. + + """ + runner = PyRunner(args, as_module=True) + runner.prepare() + runner.run() + + +def run_python_file(args): + """Run a Python file as if it were the main program on the command line. + + `args` is the argument array to present as sys.argv, including the first + element naming the file being executed. `package` is the name of the + enclosing package, if any. + + This is a helper for tests, to encapsulate how to use PyRunner. + + """ + runner = PyRunner(args, as_module=False) + runner.prepare() + runner.run() + + +def make_code_from_py(filename): + """Get source from `filename` and make a code object of it.""" + # Open the source file. + try: + source = get_python_source(filename) + except (IOError, NoSource): + raise NoSource("No file to run: '%s'" % filename) + + code = compile_unicode(source, filename, "exec") + return code + + +def make_code_from_pyc(filename): + """Get a code object from a .pyc file.""" + try: + fpyc = open(filename, "rb") + except IOError: + raise NoCode("No file to run: '%s'" % filename) + + with fpyc: + # First four bytes are a version-specific magic number. It has to + # match or we won't run the file. + magic = fpyc.read(4) + if magic != PYC_MAGIC_NUMBER: + raise NoCode("Bad magic number in .pyc file: {} != {}".format(magic, PYC_MAGIC_NUMBER)) + + date_based = True + if env.PYBEHAVIOR.hashed_pyc_pep552: + flags = struct.unpack(' MAX_FLAT: + h = hashlib.sha1(name.encode('UTF-8')).hexdigest() + name = name[-(MAX_FLAT-len(h)-1):] + '_' + h + return name + + +if env.WINDOWS: + + _ACTUAL_PATH_CACHE = {} + _ACTUAL_PATH_LIST_CACHE = {} + + def actual_path(path): + """Get the actual path of `path`, including the correct case.""" + if env.PY2 and isinstance(path, unicode_class): + path = path.encode(sys.getfilesystemencoding()) + if path in _ACTUAL_PATH_CACHE: + return _ACTUAL_PATH_CACHE[path] + + head, tail = os.path.split(path) + if not tail: + # This means head is the drive spec: normalize it. + actpath = head.upper() + elif not head: + actpath = tail + else: + head = actual_path(head) + if head in _ACTUAL_PATH_LIST_CACHE: + files = _ACTUAL_PATH_LIST_CACHE[head] + else: + try: + files = os.listdir(head) + except Exception: + # This will raise OSError, or this bizarre TypeError: + # https://bugs.python.org/issue1776160 + files = [] + _ACTUAL_PATH_LIST_CACHE[head] = files + normtail = os.path.normcase(tail) + for f in files: + if os.path.normcase(f) == normtail: + tail = f + break + actpath = os.path.join(head, tail) + _ACTUAL_PATH_CACHE[path] = actpath + return actpath + +else: + def actual_path(filename): + """The actual path for non-Windows platforms.""" + return filename + + +if env.PY2: + @contract(returns='unicode') + def unicode_filename(filename): + """Return a Unicode version of `filename`.""" + if isinstance(filename, str): + encoding = sys.getfilesystemencoding() or sys.getdefaultencoding() + filename = filename.decode(encoding, "replace") + return filename +else: + @contract(filename='unicode', returns='unicode') + def unicode_filename(filename): + """Return a Unicode version of `filename`.""" + return filename + + +@contract(returns='unicode') +def abs_file(path): + """Return the absolute normalized form of `path`.""" + try: + path = os.path.realpath(path) + except UnicodeError: + pass + path = os.path.abspath(path) + path = actual_path(path) + path = unicode_filename(path) + return path + + +def python_reported_file(filename): + """Return the string as Python would describe this file name.""" + if env.PYBEHAVIOR.report_absolute_files: + filename = os.path.abspath(filename) + return filename + + +RELATIVE_DIR = None +CANONICAL_FILENAME_CACHE = None +set_relative_directory() + + +def isabs_anywhere(filename): + """Is `filename` an absolute path on any OS?""" + return ntpath.isabs(filename) or posixpath.isabs(filename) + + +def prep_patterns(patterns): + """Prepare the file patterns for use in a `FnmatchMatcher`. + + If a pattern starts with a wildcard, it is used as a pattern + as-is. If it does not start with a wildcard, then it is made + absolute with the current directory. + + If `patterns` is None, an empty list is returned. + + """ + prepped = [] + for p in patterns or []: + if p.startswith(("*", "?")): + prepped.append(p) + else: + prepped.append(abs_file(p)) + return prepped + + +class TreeMatcher(object): + """A matcher for files in a tree. + + Construct with a list of paths, either files or directories. Paths match + with the `match` method if they are one of the files, or if they are + somewhere in a subtree rooted at one of the directories. + + """ + def __init__(self, paths): + self.paths = list(paths) + + def __repr__(self): + return "" % self.paths + + def info(self): + """A list of strings for displaying when dumping state.""" + return self.paths + + def match(self, fpath): + """Does `fpath` indicate a file in one of our trees?""" + for p in self.paths: + if fpath.startswith(p): + if fpath == p: + # This is the same file! + return True + if fpath[len(p)] == os.sep: + # This is a file in the directory + return True + return False + + +class ModuleMatcher(object): + """A matcher for modules in a tree.""" + def __init__(self, module_names): + self.modules = list(module_names) + + def __repr__(self): + return "" % (self.modules) + + def info(self): + """A list of strings for displaying when dumping state.""" + return self.modules + + def match(self, module_name): + """Does `module_name` indicate a module in one of our packages?""" + if not module_name: + return False + + for m in self.modules: + if module_name.startswith(m): + if module_name == m: + return True + if module_name[len(m)] == '.': + # This is a module in the package + return True + + return False + + +class FnmatchMatcher(object): + """A matcher for files by file name pattern.""" + def __init__(self, pats): + self.pats = list(pats) + self.re = fnmatches_to_regex(self.pats, case_insensitive=env.WINDOWS) + + def __repr__(self): + return "" % self.pats + + def info(self): + """A list of strings for displaying when dumping state.""" + return self.pats + + def match(self, fpath): + """Does `fpath` match one of our file name patterns?""" + return self.re.match(fpath) is not None + + +def sep(s): + """Find the path separator used in this string, or os.sep if none.""" + sep_match = re.search(r"[\\/]", s) + if sep_match: + the_sep = sep_match.group(0) + else: + the_sep = os.sep + return the_sep + + +def fnmatches_to_regex(patterns, case_insensitive=False, partial=False): + """Convert fnmatch patterns to a compiled regex that matches any of them. + + Slashes are always converted to match either slash or backslash, for + Windows support, even when running elsewhere. + + If `partial` is true, then the pattern will match if the target string + starts with the pattern. Otherwise, it must match the entire string. + + Returns: a compiled regex object. Use the .match method to compare target + strings. + + """ + regexes = (fnmatch.translate(pattern) for pattern in patterns) + # Python3.7 fnmatch translates "/" as "/". Before that, it translates as "\/", + # so we have to deal with maybe a backslash. + regexes = (re.sub(r"\\?/", r"[\\\\/]", regex) for regex in regexes) + + if partial: + # fnmatch always adds a \Z to match the whole string, which we don't + # want, so we remove the \Z. While removing it, we only replace \Z if + # followed by paren (introducing flags), or at end, to keep from + # destroying a literal \Z in the pattern. + regexes = (re.sub(r'\\Z(\(\?|$)', r'\1', regex) for regex in regexes) + + flags = 0 + if case_insensitive: + flags |= re.IGNORECASE + compiled = re.compile(join_regex(regexes), flags=flags) + + return compiled + + +class PathAliases(object): + """A collection of aliases for paths. + + When combining data files from remote machines, often the paths to source + code are different, for example, due to OS differences, or because of + serialized checkouts on continuous integration machines. + + A `PathAliases` object tracks a list of pattern/result pairs, and can + map a path through those aliases to produce a unified path. + + """ + def __init__(self): + self.aliases = [] + + def pprint(self): # pragma: debugging + """Dump the important parts of the PathAliases, for debugging.""" + for regex, result in self.aliases: + print("{!r} --> {!r}".format(regex.pattern, result)) + + def add(self, pattern, result): + """Add the `pattern`/`result` pair to the list of aliases. + + `pattern` is an `fnmatch`-style pattern. `result` is a simple + string. When mapping paths, if a path starts with a match against + `pattern`, then that match is replaced with `result`. This models + isomorphic source trees being rooted at different places on two + different machines. + + `pattern` can't end with a wildcard component, since that would + match an entire tree, and not just its root. + + """ + pattern_sep = sep(pattern) + + if len(pattern) > 1: + pattern = pattern.rstrip(r"\/") + + # The pattern can't end with a wildcard component. + if pattern.endswith("*"): + raise CoverageException("Pattern must not end with wildcards.") + + # The pattern is meant to match a filepath. Let's make it absolute + # unless it already is, or is meant to match any prefix. + if not pattern.startswith('*') and not isabs_anywhere(pattern + + pattern_sep): + pattern = abs_file(pattern) + if not pattern.endswith(pattern_sep): + pattern += pattern_sep + + # Make a regex from the pattern. + regex = fnmatches_to_regex([pattern], case_insensitive=True, partial=True) + + # Normalize the result: it must end with a path separator. + result_sep = sep(result) + result = result.rstrip(r"\/") + result_sep + self.aliases.append((regex, result)) + + def map(self, path): + """Map `path` through the aliases. + + `path` is checked against all of the patterns. The first pattern to + match is used to replace the root of the path with the result root. + Only one pattern is ever used. If no patterns match, `path` is + returned unchanged. + + The separator style in the result is made to match that of the result + in the alias. + + Returns the mapped path. If a mapping has happened, this is a + canonical path. If no mapping has happened, it is the original value + of `path` unchanged. + + """ + for regex, result in self.aliases: + m = regex.match(path) + if m: + new = path.replace(m.group(0), result) + new = new.replace(sep(path), sep(result)) + new = canonical_filename(new) + return new + return path + + +def find_python_files(dirname): + """Yield all of the importable Python files in `dirname`, recursively. + + To be importable, the files have to be in a directory with a __init__.py, + except for `dirname` itself, which isn't required to have one. The + assumption is that `dirname` was specified directly, so the user knows + best, but sub-directories are checked for a __init__.py to be sure we only + find the importable files. + + """ + for i, (dirpath, dirnames, filenames) in enumerate(os.walk(dirname)): + if i > 0 and '__init__.py' not in filenames: + # If a directory doesn't have __init__.py, then it isn't + # importable and neither are its files + del dirnames[:] + continue + for filename in filenames: + # We're only interested in files that look like reasonable Python + # files: Must end with .py or .pyw, and must not have certain funny + # characters that probably mean they are editor junk. + if re.match(r"^[^.#~!$@%^&*()+=,]+\.pyw?$", filename): + yield os.path.join(dirpath, filename) diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/fullcoverage/encodings.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/fullcoverage/encodings.py new file mode 100644 index 000000000..aeb416e40 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/fullcoverage/encodings.py @@ -0,0 +1,60 @@ +# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 +# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt + +"""Imposter encodings module that installs a coverage-style tracer. + +This is NOT the encodings module; it is an imposter that sets up tracing +instrumentation and then replaces itself with the real encodings module. + +If the directory that holds this file is placed first in the PYTHONPATH when +using "coverage" to run Python's tests, then this file will become the very +first module imported by the internals of Python 3. It installs a +coverage.py-compatible trace function that can watch Standard Library modules +execute from the very earliest stages of Python's own boot process. This fixes +a problem with coverage.py - that it starts too late to trace the coverage of +many of the most fundamental modules in the Standard Library. + +""" + +import sys + +class FullCoverageTracer(object): + def __init__(self): + # `traces` is a list of trace events. Frames are tricky: the same + # frame object is used for a whole scope, with new line numbers + # written into it. So in one scope, all the frame objects are the + # same object, and will eventually all will point to the last line + # executed. So we keep the line numbers alongside the frames. + # The list looks like: + # + # traces = [ + # ((frame, event, arg), lineno), ... + # ] + # + self.traces = [] + + def fullcoverage_trace(self, *args): + frame, event, arg = args + self.traces.append((args, frame.f_lineno)) + return self.fullcoverage_trace + +sys.settrace(FullCoverageTracer().fullcoverage_trace) + +# In coverage/files.py is actual_filename(), which uses glob.glob. I don't +# understand why, but that use of glob borks everything if fullcoverage is in +# effect. So here we make an ugly hail-mary pass to switch off glob.glob over +# there. This means when using fullcoverage, Windows path names will not be +# their actual case. + +#sys.fullcoverage = True + +# Finally, remove our own directory from sys.path; remove ourselves from +# sys.modules; and re-import "encodings", which will be the real package +# this time. Note that the delete from sys.modules dictionary has to +# happen last, since all of the symbols in this module will become None +# at that exact moment, including "sys". + +parentdir = max(filter(__file__.startswith, sys.path), key=len) +sys.path.remove(parentdir) +del sys.modules['encodings'] +import encodings diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/html.py b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/html.py new file mode 100644 index 000000000..0dfee7ca8 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/html.py @@ -0,0 +1,520 @@ +# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 +# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt + +"""HTML reporting for coverage.py.""" + +import datetime +import json +import os +import re +import shutil + +import coverage +from coverage import env +from coverage.backward import iitems, SimpleNamespace, format_local_datetime +from coverage.data import add_data_to_hash +from coverage.files import flat_rootname +from coverage.misc import CoverageException, ensure_dir, file_be_gone, Hasher, isolate_module +from coverage.report import get_analysis_to_report +from coverage.results import Numbers +from coverage.templite import Templite + +os = isolate_module(os) + + +# Static files are looked for in a list of places. +STATIC_PATH = [ + # The place Debian puts system Javascript libraries. + "/usr/share/javascript", + + # Our htmlfiles directory. + os.path.join(os.path.dirname(__file__), "htmlfiles"), +] + + +def data_filename(fname, pkgdir=""): + """Return the path to a data file of ours. + + The file is searched for on `STATIC_PATH`, and the first place it's found, + is returned. + + Each directory in `STATIC_PATH` is searched as-is, and also, if `pkgdir` + is provided, at that sub-directory. + + """ + tried = [] + for static_dir in STATIC_PATH: + static_filename = os.path.join(static_dir, fname) + if os.path.exists(static_filename): + return static_filename + else: + tried.append(static_filename) + if pkgdir: + static_filename = os.path.join(static_dir, pkgdir, fname) + if os.path.exists(static_filename): + return static_filename + else: + tried.append(static_filename) + raise CoverageException( + "Couldn't find static file %r from %r, tried: %r" % (fname, os.getcwd(), tried) + ) + + +def read_data(fname): + """Return the contents of a data file of ours.""" + with open(data_filename(fname)) as data_file: + return data_file.read() + + +def write_html(fname, html): + """Write `html` to `fname`, properly encoded.""" + html = re.sub(r"(\A\s+)|(\s+$)", "", html, flags=re.MULTILINE) + "\n" + with open(fname, "wb") as fout: + fout.write(html.encode('ascii', 'xmlcharrefreplace')) + + +class HtmlDataGeneration(object): + """Generate structured data to be turned into HTML reports.""" + + EMPTY = "(empty)" + + def __init__(self, cov): + self.coverage = cov + self.config = self.coverage.config + data = self.coverage.get_data() + self.has_arcs = data.has_arcs() + if self.config.show_contexts: + if data.measured_contexts() == {""}: + self.coverage._warn("No contexts were measured") + data.set_query_contexts(self.config.report_contexts) + + def data_for_file(self, fr, analysis): + """Produce the data needed for one file's report.""" + if self.has_arcs: + missing_branch_arcs = analysis.missing_branch_arcs() + arcs_executed = analysis.arcs_executed() + + if self.config.show_contexts: + contexts_by_lineno = analysis.data.contexts_by_lineno(analysis.filename) + + lines = [] + + for lineno, tokens in enumerate(fr.source_token_lines(), start=1): + # Figure out how to mark this line. + category = None + short_annotations = [] + long_annotations = [] + + if lineno in analysis.excluded: + category = 'exc' + elif lineno in analysis.missing: + category = 'mis' + elif self.has_arcs and lineno in missing_branch_arcs: + category = 'par' + for b in missing_branch_arcs[lineno]: + if b < 0: + short_annotations.append("exit") + else: + short_annotations.append(b) + long_annotations.append(fr.missing_arc_description(lineno, b, arcs_executed)) + elif lineno in analysis.statements: + category = 'run' + + contexts = contexts_label = None + context_list = None + if category and self.config.show_contexts: + contexts = sorted(c or self.EMPTY for c in contexts_by_lineno[lineno]) + if contexts == [self.EMPTY]: + contexts_label = self.EMPTY + else: + contexts_label = "{} ctx".format(len(contexts)) + context_list = contexts + + lines.append(SimpleNamespace( + tokens=tokens, + number=lineno, + category=category, + statement=(lineno in analysis.statements), + contexts=contexts, + contexts_label=contexts_label, + context_list=context_list, + short_annotations=short_annotations, + long_annotations=long_annotations, + )) + + file_data = SimpleNamespace( + relative_filename=fr.relative_filename(), + nums=analysis.numbers, + lines=lines, + ) + + return file_data + + +class HtmlReporter(object): + """HTML reporting.""" + + # These files will be copied from the htmlfiles directory to the output + # directory. + STATIC_FILES = [ + ("style.css", ""), + ("jquery.min.js", "jquery"), + ("jquery.ba-throttle-debounce.min.js", "jquery-throttle-debounce"), + ("jquery.hotkeys.js", "jquery-hotkeys"), + ("jquery.isonscreen.js", "jquery-isonscreen"), + ("jquery.tablesorter.min.js", "jquery-tablesorter"), + ("coverage_html.js", ""), + ("keybd_closed.png", ""), + ("keybd_open.png", ""), + ("favicon_32.png", ""), + ] + + def __init__(self, cov): + self.coverage = cov + self.config = self.coverage.config + self.directory = self.config.html_dir + + self.skip_covered = self.config.html_skip_covered + if self.skip_covered is None: + self.skip_covered = self.config.skip_covered + self.skip_empty = self.config.html_skip_empty + if self.skip_empty is None: + self.skip_empty= self.config.skip_empty + + title = self.config.html_title + if env.PY2: + title = title.decode("utf8") + + if self.config.extra_css: + self.extra_css = os.path.basename(self.config.extra_css) + else: + self.extra_css = None + + self.data = self.coverage.get_data() + self.has_arcs = self.data.has_arcs() + + self.file_summaries = [] + self.all_files_nums = [] + self.incr = IncrementalChecker(self.directory) + self.datagen = HtmlDataGeneration(self.coverage) + self.totals = Numbers() + + self.template_globals = { + # Functions available in the templates. + 'escape': escape, + 'pair': pair, + 'len': len, + + # Constants for this report. + '__url__': coverage.__url__, + '__version__': coverage.__version__, + 'title': title, + 'time_stamp': format_local_datetime(datetime.datetime.now()), + 'extra_css': self.extra_css, + 'has_arcs': self.has_arcs, + 'show_contexts': self.config.show_contexts, + + # Constants for all reports. + # These css classes determine which lines are highlighted by default. + 'category': { + 'exc': 'exc show_exc', + 'mis': 'mis show_mis', + 'par': 'par run show_par', + 'run': 'run', + } + } + self.pyfile_html_source = read_data("pyfile.html") + self.source_tmpl = Templite(self.pyfile_html_source, self.template_globals) + + def report(self, morfs): + """Generate an HTML report for `morfs`. + + `morfs` is a list of modules or file names. + + """ + # Read the status data and check that this run used the same + # global data as the last run. + self.incr.read() + self.incr.check_global_data(self.config, self.pyfile_html_source) + + # Process all the files. + for fr, analysis in get_analysis_to_report(self.coverage, morfs): + self.html_file(fr, analysis) + + if not self.all_files_nums: + raise CoverageException("No data to report.") + + self.totals = sum(self.all_files_nums) + + # Write the index file. + self.index_file() + + self.make_local_static_report_files() + return self.totals.n_statements and self.totals.pc_covered + + def make_local_static_report_files(self): + """Make local instances of static files for HTML report.""" + # The files we provide must always be copied. + for static, pkgdir in self.STATIC_FILES: + shutil.copyfile( + data_filename(static, pkgdir), + os.path.join(self.directory, static) + ) + + # The user may have extra CSS they want copied. + if self.extra_css: + shutil.copyfile( + self.config.extra_css, + os.path.join(self.directory, self.extra_css) + ) + + def html_file(self, fr, analysis): + """Generate an HTML file for one source file.""" + rootname = flat_rootname(fr.relative_filename()) + html_filename = rootname + ".html" + ensure_dir(self.directory) + html_path = os.path.join(self.directory, html_filename) + + # Get the numbers for this file. + nums = analysis.numbers + self.all_files_nums.append(nums) + + if self.skip_covered: + # Don't report on 100% files. + no_missing_lines = (nums.n_missing == 0) + no_missing_branches = (nums.n_partial_branches == 0) + if no_missing_lines and no_missing_branches: + # If there's an existing file, remove it. + file_be_gone(html_path) + return + + if self.skip_empty: + # Don't report on empty files. + if nums.n_statements == 0: + file_be_gone(html_path) + return + + # Find out if the file on disk is already correct. + if self.incr.can_skip_file(self.data, fr, rootname): + self.file_summaries.append(self.incr.index_info(rootname)) + return + + # Write the HTML page for this file. + file_data = self.datagen.data_for_file(fr, analysis) + for ldata in file_data.lines: + # Build the HTML for the line. + html = [] + for tok_type, tok_text in ldata.tokens: + if tok_type == "ws": + html.append(escape(tok_text)) + else: + tok_html = escape(tok_text) or ' ' + html.append( + u'{}'.format(tok_type, tok_html) + ) + ldata.html = ''.join(html) + + if ldata.short_annotations: + # 202F is NARROW NO-BREAK SPACE. + # 219B is RIGHTWARDS ARROW WITH STROKE. + ldata.annotate = u",   ".join( + u"{} ↛ {}".format(ldata.number, d) + for d in ldata.short_annotations + ) + else: + ldata.annotate = None + + if ldata.long_annotations: + longs = ldata.long_annotations + if len(longs) == 1: + ldata.annotate_long = longs[0] + else: + ldata.annotate_long = u"{:d} missed branches: {}".format( + len(longs), + u", ".join( + u"{:d}) {}".format(num, ann_long) + for num, ann_long in enumerate(longs, start=1) + ), + ) + else: + ldata.annotate_long = None + + css_classes = [] + if ldata.category: + css_classes.append(self.template_globals['category'][ldata.category]) + ldata.css_class = ' '.join(css_classes) or "pln" + + html = self.source_tmpl.render(file_data.__dict__) + write_html(html_path, html) + + # Save this file's information for the index file. + index_info = { + 'nums': nums, + 'html_filename': html_filename, + 'relative_filename': fr.relative_filename(), + } + self.file_summaries.append(index_info) + self.incr.set_index_info(rootname, index_info) + + def index_file(self): + """Write the index.html file for this report.""" + index_tmpl = Templite(read_data("index.html"), self.template_globals) + + html = index_tmpl.render({ + 'files': self.file_summaries, + 'totals': self.totals, + }) + + write_html(os.path.join(self.directory, "index.html"), html) + + # Write the latest hashes for next time. + self.incr.write() + + +class IncrementalChecker(object): + """Logic and data to support incremental reporting.""" + + STATUS_FILE = "status.json" + STATUS_FORMAT = 2 + + # pylint: disable=wrong-spelling-in-comment,useless-suppression + # The data looks like: + # + # { + # "format": 2, + # "globals": "540ee119c15d52a68a53fe6f0897346d", + # "version": "4.0a1", + # "files": { + # "cogapp___init__": { + # "hash": "e45581a5b48f879f301c0f30bf77a50c", + # "index": { + # "html_filename": "cogapp___init__.html", + # "relative_filename": "cogapp/__init__", + # "nums": [ 1, 14, 0, 0, 0, 0, 0 ] + # } + # }, + # ... + # "cogapp_whiteutils": { + # "hash": "8504bb427fc488c4176809ded0277d51", + # "index": { + # "html_filename": "cogapp_whiteutils.html", + # "relative_filename": "cogapp/whiteutils", + # "nums": [ 1, 59, 0, 1, 28, 2, 2 ] + # } + # } + # } + # } + + def __init__(self, directory): + self.directory = directory + self.reset() + + def reset(self): + """Initialize to empty. Causes all files to be reported.""" + self.globals = '' + self.files = {} + + def read(self): + """Read the information we stored last time.""" + usable = False + try: + status_file = os.path.join(self.directory, self.STATUS_FILE) + with open(status_file) as fstatus: + status = json.load(fstatus) + except (IOError, ValueError): + usable = False + else: + usable = True + if status['format'] != self.STATUS_FORMAT: + usable = False + elif status['version'] != coverage.__version__: + usable = False + + if usable: + self.files = {} + for filename, fileinfo in iitems(status['files']): + fileinfo['index']['nums'] = Numbers(*fileinfo['index']['nums']) + self.files[filename] = fileinfo + self.globals = status['globals'] + else: + self.reset() + + def write(self): + """Write the current status.""" + status_file = os.path.join(self.directory, self.STATUS_FILE) + files = {} + for filename, fileinfo in iitems(self.files): + fileinfo['index']['nums'] = fileinfo['index']['nums'].init_args() + files[filename] = fileinfo + + status = { + 'format': self.STATUS_FORMAT, + 'version': coverage.__version__, + 'globals': self.globals, + 'files': files, + } + with open(status_file, "w") as fout: + json.dump(status, fout, separators=(',', ':')) + + def check_global_data(self, *data): + """Check the global data that can affect incremental reporting.""" + m = Hasher() + for d in data: + m.update(d) + these_globals = m.hexdigest() + if self.globals != these_globals: + self.reset() + self.globals = these_globals + + def can_skip_file(self, data, fr, rootname): + """Can we skip reporting this file? + + `data` is a CoverageData object, `fr` is a `FileReporter`, and + `rootname` is the name being used for the file. + """ + m = Hasher() + m.update(fr.source().encode('utf-8')) + add_data_to_hash(data, fr.filename, m) + this_hash = m.hexdigest() + + that_hash = self.file_hash(rootname) + + if this_hash == that_hash: + # Nothing has changed to require the file to be reported again. + return True + else: + self.set_file_hash(rootname, this_hash) + return False + + def file_hash(self, fname): + """Get the hash of `fname`'s contents.""" + return self.files.get(fname, {}).get('hash', '') + + def set_file_hash(self, fname, val): + """Set the hash of `fname`'s contents.""" + self.files.setdefault(fname, {})['hash'] = val + + def index_info(self, fname): + """Get the information for index.html for `fname`.""" + return self.files.get(fname, {}).get('index', {}) + + def set_index_info(self, fname, info): + """Set the information for index.html for `fname`.""" + self.files.setdefault(fname, {})['index'] = info + + +# Helpers for templates and generating HTML + +def escape(t): + """HTML-escape the text in `t`. + + This is only suitable for HTML text, not attributes. + + """ + # Convert HTML special chars into HTML entities. + return t.replace("&", "&").replace("<", "<") + + +def pair(ratio): + """Format a pair of numbers so JavaScript can read them in an attribute.""" + return "%s %s" % ratio diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/htmlfiles/coverage_html.js b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/htmlfiles/coverage_html.js new file mode 100644 index 000000000..27b49b36f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/htmlfiles/coverage_html.js @@ -0,0 +1,616 @@ +// Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 +// For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt + +// Coverage.py HTML report browser code. +/*jslint browser: true, sloppy: true, vars: true, plusplus: true, maxerr: 50, indent: 4 */ +/*global coverage: true, document, window, $ */ + +coverage = {}; + +// Find all the elements with shortkey_* class, and use them to assign a shortcut key. +coverage.assign_shortkeys = function () { + $("*[class*='shortkey_']").each(function (i, e) { + $.each($(e).attr("class").split(" "), function (i, c) { + if (/^shortkey_/.test(c)) { + $(document).bind('keydown', c.substr(9), function () { + $(e).click(); + }); + } + }); + }); +}; + +// Create the events for the help panel. +coverage.wire_up_help_panel = function () { + $("#keyboard_icon").click(function () { + // Show the help panel, and position it so the keyboard icon in the + // panel is in the same place as the keyboard icon in the header. + $(".help_panel").show(); + var koff = $("#keyboard_icon").offset(); + var poff = $("#panel_icon").position(); + $(".help_panel").offset({ + top: koff.top-poff.top, + left: koff.left-poff.left + }); + }); + $("#panel_icon").click(function () { + $(".help_panel").hide(); + }); +}; + +// Create the events for the filter box. +coverage.wire_up_filter = function () { + // Cache elements. + var table = $("table.index"); + var table_rows = table.find("tbody tr"); + var table_row_names = table_rows.find("td.name a"); + var no_rows = $("#no_rows"); + + // Create a duplicate table footer that we can modify with dynamic summed values. + var table_footer = $("table.index tfoot tr"); + var table_dynamic_footer = table_footer.clone(); + table_dynamic_footer.attr('class', 'total_dynamic hidden'); + table_footer.after(table_dynamic_footer); + + // Observe filter keyevents. + $("#filter").on("keyup change", $.debounce(150, function (event) { + var filter_value = $(this).val(); + + if (filter_value === "") { + // Filter box is empty, remove all filtering. + table_rows.removeClass("hidden"); + + // Show standard footer, hide dynamic footer. + table_footer.removeClass("hidden"); + table_dynamic_footer.addClass("hidden"); + + // Hide placeholder, show table. + if (no_rows.length > 0) { + no_rows.hide(); + } + table.show(); + + } + else { + // Filter table items by value. + var hidden = 0; + var shown = 0; + + // Hide / show elements. + $.each(table_row_names, function () { + var element = $(this).parents("tr"); + + if ($(this).text().indexOf(filter_value) === -1) { + // hide + element.addClass("hidden"); + hidden++; + } + else { + // show + element.removeClass("hidden"); + shown++; + } + }); + + // Show placeholder if no rows will be displayed. + if (no_rows.length > 0) { + if (shown === 0) { + // Show placeholder, hide table. + no_rows.show(); + table.hide(); + } + else { + // Hide placeholder, show table. + no_rows.hide(); + table.show(); + } + } + + // Manage dynamic header: + if (hidden > 0) { + // Calculate new dynamic sum values based on visible rows. + for (var column = 2; column < 20; column++) { + // Calculate summed value. + var cells = table_rows.find('td:nth-child(' + column + ')'); + if (!cells.length) { + // No more columns...! + break; + } + + var sum = 0, numer = 0, denom = 0; + $.each(cells.filter(':visible'), function () { + var ratio = $(this).data("ratio"); + if (ratio) { + var splitted = ratio.split(" "); + numer += parseInt(splitted[0], 10); + denom += parseInt(splitted[1], 10); + } + else { + sum += parseInt(this.innerHTML, 10); + } + }); + + // Get footer cell element. + var footer_cell = table_dynamic_footer.find('td:nth-child(' + column + ')'); + + // Set value into dynamic footer cell element. + if (cells[0].innerHTML.indexOf('%') > -1) { + // Percentage columns use the numerator and denominator, + // and adapt to the number of decimal places. + var match = /\.([0-9]+)/.exec(cells[0].innerHTML); + var places = 0; + if (match) { + places = match[1].length; + } + var pct = numer * 100 / denom; + footer_cell.text(pct.toFixed(places) + '%'); + } + else { + footer_cell.text(sum); + } + } + + // Hide standard footer, show dynamic footer. + table_footer.addClass("hidden"); + table_dynamic_footer.removeClass("hidden"); + } + else { + // Show standard footer, hide dynamic footer. + table_footer.removeClass("hidden"); + table_dynamic_footer.addClass("hidden"); + } + } + })); + + // Trigger change event on setup, to force filter on page refresh + // (filter value may still be present). + $("#filter").trigger("change"); +}; + +// Loaded on index.html +coverage.index_ready = function ($) { + // Look for a localStorage item containing previous sort settings: + var sort_list = []; + var storage_name = "COVERAGE_INDEX_SORT"; + var stored_list = undefined; + try { + stored_list = localStorage.getItem(storage_name); + } catch(err) {} + + if (stored_list) { + sort_list = JSON.parse('[[' + stored_list + ']]'); + } + + // Create a new widget which exists only to save and restore + // the sort order: + $.tablesorter.addWidget({ + id: "persistentSort", + + // Format is called by the widget before displaying: + format: function (table) { + if (table.config.sortList.length === 0 && sort_list.length > 0) { + // This table hasn't been sorted before - we'll use + // our stored settings: + $(table).trigger('sorton', [sort_list]); + } + else { + // This is not the first load - something has + // already defined sorting so we'll just update + // our stored value to match: + sort_list = table.config.sortList; + } + } + }); + + // Configure our tablesorter to handle the variable number of + // columns produced depending on report options: + var headers = []; + var col_count = $("table.index > thead > tr > th").length; + + headers[0] = { sorter: 'text' }; + for (i = 1; i < col_count-1; i++) { + headers[i] = { sorter: 'digit' }; + } + headers[col_count-1] = { sorter: 'percent' }; + + // Enable the table sorter: + $("table.index").tablesorter({ + widgets: ['persistentSort'], + headers: headers + }); + + coverage.assign_shortkeys(); + coverage.wire_up_help_panel(); + coverage.wire_up_filter(); + + // Watch for page unload events so we can save the final sort settings: + $(window).on("unload", function () { + try { + localStorage.setItem(storage_name, sort_list.toString()) + } catch(err) {} + }); +}; + +// -- pyfile stuff -- + +coverage.LINE_FILTERS_STORAGE = "COVERAGE_LINE_FILTERS"; + +coverage.pyfile_ready = function ($) { + // If we're directed to a particular line number, highlight the line. + var frag = location.hash; + if (frag.length > 2 && frag[1] === 't') { + $(frag).addClass('highlight'); + coverage.set_sel(parseInt(frag.substr(2), 10)); + } + else { + coverage.set_sel(0); + } + + $(document) + .bind('keydown', 'j', coverage.to_next_chunk_nicely) + .bind('keydown', 'k', coverage.to_prev_chunk_nicely) + .bind('keydown', '0', coverage.to_top) + .bind('keydown', '1', coverage.to_first_chunk) + ; + + $(".button_toggle_run").click(function (evt) {coverage.toggle_lines(evt.target, "run");}); + $(".button_toggle_exc").click(function (evt) {coverage.toggle_lines(evt.target, "exc");}); + $(".button_toggle_mis").click(function (evt) {coverage.toggle_lines(evt.target, "mis");}); + $(".button_toggle_par").click(function (evt) {coverage.toggle_lines(evt.target, "par");}); + + coverage.filters = undefined; + try { + coverage.filters = localStorage.getItem(coverage.LINE_FILTERS_STORAGE); + } catch(err) {} + + if (coverage.filters) { + coverage.filters = JSON.parse(coverage.filters); + } + else { + coverage.filters = {run: false, exc: true, mis: true, par: true}; + } + + for (cls in coverage.filters) { + coverage.set_line_visibilty(cls, coverage.filters[cls]); + } + + coverage.assign_shortkeys(); + coverage.wire_up_help_panel(); + + coverage.init_scroll_markers(); + + // Rebuild scroll markers when the window height changes. + $(window).resize(coverage.build_scroll_markers); +}; + +coverage.toggle_lines = function (btn, cls) { + var onoff = !$(btn).hasClass("show_" + cls); + coverage.set_line_visibilty(cls, onoff); + coverage.build_scroll_markers(); + coverage.filters[cls] = onoff; + try { + localStorage.setItem(coverage.LINE_FILTERS_STORAGE, JSON.stringify(coverage.filters)); + } catch(err) {} +}; + +coverage.set_line_visibilty = function (cls, onoff) { + var show = "show_" + cls; + var btn = $(".button_toggle_" + cls); + if (onoff) { + $("#source ." + cls).addClass(show); + btn.addClass(show); + } + else { + $("#source ." + cls).removeClass(show); + btn.removeClass(show); + } +}; + +// Return the nth line div. +coverage.line_elt = function (n) { + return $("#t" + n); +}; + +// Return the nth line number div. +coverage.num_elt = function (n) { + return $("#n" + n); +}; + +// Set the selection. b and e are line numbers. +coverage.set_sel = function (b, e) { + // The first line selected. + coverage.sel_begin = b; + // The next line not selected. + coverage.sel_end = (e === undefined) ? b+1 : e; +}; + +coverage.to_top = function () { + coverage.set_sel(0, 1); + coverage.scroll_window(0); +}; + +coverage.to_first_chunk = function () { + coverage.set_sel(0, 1); + coverage.to_next_chunk(); +}; + +// Return a string indicating what kind of chunk this line belongs to, +// or null if not a chunk. +coverage.chunk_indicator = function (line_elt) { + var klass = line_elt.attr('class'); + if (klass) { + var m = klass.match(/\bshow_\w+\b/); + if (m) { + return m[0]; + } + } + return null; +}; + +coverage.to_next_chunk = function () { + var c = coverage; + + // Find the start of the next colored chunk. + var probe = c.sel_end; + var chunk_indicator, probe_line; + while (true) { + probe_line = c.line_elt(probe); + if (probe_line.length === 0) { + return; + } + chunk_indicator = c.chunk_indicator(probe_line); + if (chunk_indicator) { + break; + } + probe++; + } + + // There's a next chunk, `probe` points to it. + var begin = probe; + + // Find the end of this chunk. + var next_indicator = chunk_indicator; + while (next_indicator === chunk_indicator) { + probe++; + probe_line = c.line_elt(probe); + next_indicator = c.chunk_indicator(probe_line); + } + c.set_sel(begin, probe); + c.show_selection(); +}; + +coverage.to_prev_chunk = function () { + var c = coverage; + + // Find the end of the prev colored chunk. + var probe = c.sel_begin-1; + var probe_line = c.line_elt(probe); + if (probe_line.length === 0) { + return; + } + var chunk_indicator = c.chunk_indicator(probe_line); + while (probe > 0 && !chunk_indicator) { + probe--; + probe_line = c.line_elt(probe); + if (probe_line.length === 0) { + return; + } + chunk_indicator = c.chunk_indicator(probe_line); + } + + // There's a prev chunk, `probe` points to its last line. + var end = probe+1; + + // Find the beginning of this chunk. + var prev_indicator = chunk_indicator; + while (prev_indicator === chunk_indicator) { + probe--; + probe_line = c.line_elt(probe); + prev_indicator = c.chunk_indicator(probe_line); + } + c.set_sel(probe+1, end); + c.show_selection(); +}; + +// Return the line number of the line nearest pixel position pos +coverage.line_at_pos = function (pos) { + var l1 = coverage.line_elt(1), + l2 = coverage.line_elt(2), + result; + if (l1.length && l2.length) { + var l1_top = l1.offset().top, + line_height = l2.offset().top - l1_top, + nlines = (pos - l1_top) / line_height; + if (nlines < 1) { + result = 1; + } + else { + result = Math.ceil(nlines); + } + } + else { + result = 1; + } + return result; +}; + +// Returns 0, 1, or 2: how many of the two ends of the selection are on +// the screen right now? +coverage.selection_ends_on_screen = function () { + if (coverage.sel_begin === 0) { + return 0; + } + + var top = coverage.line_elt(coverage.sel_begin); + var next = coverage.line_elt(coverage.sel_end-1); + + return ( + (top.isOnScreen() ? 1 : 0) + + (next.isOnScreen() ? 1 : 0) + ); +}; + +coverage.to_next_chunk_nicely = function () { + coverage.finish_scrolling(); + if (coverage.selection_ends_on_screen() === 0) { + // The selection is entirely off the screen: select the top line on + // the screen. + var win = $(window); + coverage.select_line_or_chunk(coverage.line_at_pos(win.scrollTop())); + } + coverage.to_next_chunk(); +}; + +coverage.to_prev_chunk_nicely = function () { + coverage.finish_scrolling(); + if (coverage.selection_ends_on_screen() === 0) { + var win = $(window); + coverage.select_line_or_chunk(coverage.line_at_pos(win.scrollTop() + win.height())); + } + coverage.to_prev_chunk(); +}; + +// Select line number lineno, or if it is in a colored chunk, select the +// entire chunk +coverage.select_line_or_chunk = function (lineno) { + var c = coverage; + var probe_line = c.line_elt(lineno); + if (probe_line.length === 0) { + return; + } + var the_indicator = c.chunk_indicator(probe_line); + if (the_indicator) { + // The line is in a highlighted chunk. + // Search backward for the first line. + var probe = lineno; + var indicator = the_indicator; + while (probe > 0 && indicator === the_indicator) { + probe--; + probe_line = c.line_elt(probe); + if (probe_line.length === 0) { + break; + } + indicator = c.chunk_indicator(probe_line); + } + var begin = probe + 1; + + // Search forward for the last line. + probe = lineno; + indicator = the_indicator; + while (indicator === the_indicator) { + probe++; + probe_line = c.line_elt(probe); + indicator = c.chunk_indicator(probe_line); + } + + coverage.set_sel(begin, probe); + } + else { + coverage.set_sel(lineno); + } +}; + +coverage.show_selection = function () { + var c = coverage; + + // Highlight the lines in the chunk + $(".linenos .highlight").removeClass("highlight"); + for (var probe = c.sel_begin; probe > 0 && probe < c.sel_end; probe++) { + c.num_elt(probe).addClass("highlight"); + } + + c.scroll_to_selection(); +}; + +coverage.scroll_to_selection = function () { + // Scroll the page if the chunk isn't fully visible. + if (coverage.selection_ends_on_screen() < 2) { + // Need to move the page. The html,body trick makes it scroll in all + // browsers, got it from http://stackoverflow.com/questions/3042651 + var top = coverage.line_elt(coverage.sel_begin); + var top_pos = parseInt(top.offset().top, 10); + coverage.scroll_window(top_pos - 30); + } +}; + +coverage.scroll_window = function (to_pos) { + $("html,body").animate({scrollTop: to_pos}, 200); +}; + +coverage.finish_scrolling = function () { + $("html,body").stop(true, true); +}; + +coverage.init_scroll_markers = function () { + var c = coverage; + // Init some variables + c.lines_len = $('#source p').length; + c.body_h = $('body').height(); + c.header_h = $('div#header').height(); + + // Build html + c.build_scroll_markers(); +}; + +coverage.build_scroll_markers = function () { + var c = coverage, + min_line_height = 3, + max_line_height = 10, + visible_window_h = $(window).height(); + + c.lines_to_mark = $('#source').find('p.show_run, p.show_mis, p.show_exc, p.show_exc, p.show_par'); + $('#scroll_marker').remove(); + // Don't build markers if the window has no scroll bar. + if (c.body_h <= visible_window_h) { + return; + } + + $("body").append("
     
    "); + var scroll_marker = $('#scroll_marker'), + marker_scale = scroll_marker.height() / c.body_h, + line_height = scroll_marker.height() / c.lines_len; + + // Line height must be between the extremes. + if (line_height > min_line_height) { + if (line_height > max_line_height) { + line_height = max_line_height; + } + } + else { + line_height = min_line_height; + } + + var previous_line = -99, + last_mark, + last_top, + offsets = {}; + + // Calculate line offsets outside loop to prevent relayouts + c.lines_to_mark.each(function() { + offsets[this.id] = $(this).offset().top; + }); + c.lines_to_mark.each(function () { + var id_name = $(this).attr('id'), + line_top = Math.round(offsets[id_name] * marker_scale), + line_number = parseInt(id_name.substring(1, id_name.length)); + + if (line_number === previous_line + 1) { + // If this solid missed block just make previous mark higher. + last_mark.css({ + 'height': line_top + line_height - last_top + }); + } + else { + // Add colored line in scroll_marker block. + scroll_marker.append('
    '); + last_mark = $('#m' + line_number); + last_mark.css({ + 'height': line_height, + 'top': line_top + }); + last_top = line_top; + } + + previous_line = line_number; + }); +}; diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/htmlfiles/favicon_32.png b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/htmlfiles/favicon_32.png new file mode 100644 index 000000000..8649f0475 Binary files /dev/null and b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/htmlfiles/favicon_32.png differ diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/htmlfiles/index.html b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/htmlfiles/index.html new file mode 100644 index 000000000..983db0612 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/htmlfiles/index.html @@ -0,0 +1,119 @@ +{# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 #} +{# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt #} + + + + + + {{ title|escape }} + + + {% if extra_css %} + + {% endif %} + + + + + + + + + + + +
    + Hide keyboard shortcuts +

    Hot-keys on this page

    +
    +

    + n + s + m + x + {% if has_arcs %} + b + p + {% endif %} + c   change column sorting +

    +
    +
    + +
    + + + {# The title="" attr doesn"t work in Safari. #} + + + + + + {% if has_arcs %} + + + {% endif %} + + + + {# HTML syntax requires thead, tfoot, tbody #} + + + + + + + {% if has_arcs %} + + + {% endif %} + + + + + {% for file in files %} + + + + + + {% if has_arcs %} + + + {% endif %} + + + {% endfor %} + +
    Modulestatementsmissingexcludedbranchespartialcoverage
    Total{{totals.n_statements}}{{totals.n_missing}}{{totals.n_excluded}}{{totals.n_branches}}{{totals.n_partial_branches}}{{totals.pc_covered_str}}%
    {{file.relative_filename}}{{file.nums.n_statements}}{{file.nums.n_missing}}{{file.nums.n_excluded}}{{file.nums.n_branches}}{{file.nums.n_partial_branches}}{{file.nums.pc_covered_str}}%
    + +

    + No items found using the specified filter. +

    +
    + + + + + diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/htmlfiles/jquery.ba-throttle-debounce.min.js b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/htmlfiles/jquery.ba-throttle-debounce.min.js new file mode 100644 index 000000000..648fe5d3c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/htmlfiles/jquery.ba-throttle-debounce.min.js @@ -0,0 +1,9 @@ +/* + * jQuery throttle / debounce - v1.1 - 3/7/2010 + * http://benalman.com/projects/jquery-throttle-debounce-plugin/ + * + * Copyright (c) 2010 "Cowboy" Ben Alman + * Dual licensed under the MIT and GPL licenses. + * http://benalman.com/about/license/ + */ +(function(b,c){var $=b.jQuery||b.Cowboy||(b.Cowboy={}),a;$.throttle=a=function(e,f,j,i){var h,d=0;if(typeof f!=="boolean"){i=j;j=f;f=c}function g(){var o=this,m=+new Date()-d,n=arguments;function l(){d=+new Date();j.apply(o,n)}function k(){h=c}if(i&&!h){l()}h&&clearTimeout(h);if(i===c&&m>e){l()}else{if(f!==true){h=setTimeout(i?k:l,i===c?e-m:e)}}}if($.guid){g.guid=j.guid=j.guid||$.guid++}return g};$.debounce=function(d,e,f){return f===c?a(d,e,false):a(d,f,e!==false)}})(this); diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/htmlfiles/jquery.hotkeys.js b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/htmlfiles/jquery.hotkeys.js new file mode 100644 index 000000000..09b21e03c --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/htmlfiles/jquery.hotkeys.js @@ -0,0 +1,99 @@ +/* + * jQuery Hotkeys Plugin + * Copyright 2010, John Resig + * Dual licensed under the MIT or GPL Version 2 licenses. + * + * Based upon the plugin by Tzury Bar Yochay: + * http://github.com/tzuryby/hotkeys + * + * Original idea by: + * Binny V A, http://www.openjs.com/scripts/events/keyboard_shortcuts/ +*/ + +(function(jQuery){ + + jQuery.hotkeys = { + version: "0.8", + + specialKeys: { + 8: "backspace", 9: "tab", 13: "return", 16: "shift", 17: "ctrl", 18: "alt", 19: "pause", + 20: "capslock", 27: "esc", 32: "space", 33: "pageup", 34: "pagedown", 35: "end", 36: "home", + 37: "left", 38: "up", 39: "right", 40: "down", 45: "insert", 46: "del", + 96: "0", 97: "1", 98: "2", 99: "3", 100: "4", 101: "5", 102: "6", 103: "7", + 104: "8", 105: "9", 106: "*", 107: "+", 109: "-", 110: ".", 111 : "/", + 112: "f1", 113: "f2", 114: "f3", 115: "f4", 116: "f5", 117: "f6", 118: "f7", 119: "f8", + 120: "f9", 121: "f10", 122: "f11", 123: "f12", 144: "numlock", 145: "scroll", 191: "/", 224: "meta" + }, + + shiftNums: { + "`": "~", "1": "!", "2": "@", "3": "#", "4": "$", "5": "%", "6": "^", "7": "&", + "8": "*", "9": "(", "0": ")", "-": "_", "=": "+", ";": ": ", "'": "\"", ",": "<", + ".": ">", "/": "?", "\\": "|" + } + }; + + function keyHandler( handleObj ) { + // Only care when a possible input has been specified + if ( typeof handleObj.data !== "string" ) { + return; + } + + var origHandler = handleObj.handler, + keys = handleObj.data.toLowerCase().split(" "); + + handleObj.handler = function( event ) { + // Don't fire in text-accepting inputs that we didn't directly bind to + if ( this !== event.target && (/textarea|select/i.test( event.target.nodeName ) || + event.target.type === "text") ) { + return; + } + + // Keypress represents characters, not special keys + var special = event.type !== "keypress" && jQuery.hotkeys.specialKeys[ event.which ], + character = String.fromCharCode( event.which ).toLowerCase(), + key, modif = "", possible = {}; + + // check combinations (alt|ctrl|shift+anything) + if ( event.altKey && special !== "alt" ) { + modif += "alt+"; + } + + if ( event.ctrlKey && special !== "ctrl" ) { + modif += "ctrl+"; + } + + // TODO: Need to make sure this works consistently across platforms + if ( event.metaKey && !event.ctrlKey && special !== "meta" ) { + modif += "meta+"; + } + + if ( event.shiftKey && special !== "shift" ) { + modif += "shift+"; + } + + if ( special ) { + possible[ modif + special ] = true; + + } else { + possible[ modif + character ] = true; + possible[ modif + jQuery.hotkeys.shiftNums[ character ] ] = true; + + // "$" can be triggered as "Shift+4" or "Shift+$" or just "$" + if ( modif === "shift+" ) { + possible[ jQuery.hotkeys.shiftNums[ character ] ] = true; + } + } + + for ( var i = 0, l = keys.length; i < l; i++ ) { + if ( possible[ keys[i] ] ) { + return origHandler.apply( this, arguments ); + } + } + }; + } + + jQuery.each([ "keydown", "keyup", "keypress" ], function() { + jQuery.event.special[ this ] = { add: keyHandler }; + }); + +})( jQuery ); diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/htmlfiles/jquery.isonscreen.js b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/htmlfiles/jquery.isonscreen.js new file mode 100644 index 000000000..0182ebd21 --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/htmlfiles/jquery.isonscreen.js @@ -0,0 +1,53 @@ +/* Copyright (c) 2010 + * @author Laurence Wheway + * Dual licensed under the MIT (http://www.opensource.org/licenses/mit-license.php) + * and GPL (http://www.opensource.org/licenses/gpl-license.php) licenses. + * + * @version 1.2.0 + */ +(function($) { + jQuery.extend({ + isOnScreen: function(box, container) { + //ensure numbers come in as intgers (not strings) and remove 'px' is it's there + for(var i in box){box[i] = parseFloat(box[i])}; + for(var i in container){container[i] = parseFloat(container[i])}; + + if(!container){ + container = { + left: $(window).scrollLeft(), + top: $(window).scrollTop(), + width: $(window).width(), + height: $(window).height() + } + } + + if( box.left+box.width-container.left > 0 && + box.left < container.width+container.left && + box.top+box.height-container.top > 0 && + box.top < container.height+container.top + ) return true; + return false; + } + }) + + + jQuery.fn.isOnScreen = function (container) { + for(var i in container){container[i] = parseFloat(container[i])}; + + if(!container){ + container = { + left: $(window).scrollLeft(), + top: $(window).scrollTop(), + width: $(window).width(), + height: $(window).height() + } + } + + if( $(this).offset().left+$(this).width()-container.left > 0 && + $(this).offset().left < container.width+container.left && + $(this).offset().top+$(this).height()-container.top > 0 && + $(this).offset().top < container.height+container.top + ) return true; + return false; + } +})(jQuery); diff --git a/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/htmlfiles/jquery.min.js b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/htmlfiles/jquery.min.js new file mode 100644 index 000000000..d1608e37f --- /dev/null +++ b/.direnv/python-3.8.20/lib/python3.8/site-packages/coverage/htmlfiles/jquery.min.js @@ -0,0 +1,4 @@ +/*! jQuery v1.11.1 | (c) 2005, 2014 jQuery Foundation, Inc. | jquery.org/license */ +!function(a,b){"object"==typeof module&&"object"==typeof module.exports?module.exports=a.document?b(a,!0):function(a){if(!a.document)throw new Error("jQuery requires a window with a document");return b(a)}:b(a)}("undefined"!=typeof window?window:this,function(a,b){var c=[],d=c.slice,e=c.concat,f=c.push,g=c.indexOf,h={},i=h.toString,j=h.hasOwnProperty,k={},l="1.11.1",m=function(a,b){return new m.fn.init(a,b)},n=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,o=/^-ms-/,p=/-([\da-z])/gi,q=function(a,b){return b.toUpperCase()};m.fn=m.prototype={jquery:l,constructor:m,selector:"",length:0,toArray:function(){return d.call(this)},get:function(a){return null!=a?0>a?this[a+this.length]:this[a]:d.call(this)},pushStack:function(a){var b=m.merge(this.constructor(),a);return b.prevObject=this,b.context=this.context,b},each:function(a,b){return m.each(this,a,b)},map:function(a){return this.pushStack(m.map(this,function(b,c){return a.call(b,c,b)}))},slice:function(){return this.pushStack(d.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(a){var b=this.length,c=+a+(0>a?b:0);return this.pushStack(c>=0&&b>c?[this[c]]:[])},end:function(){return this.prevObject||this.constructor(null)},push:f,sort:c.sort,splice:c.splice},m.extend=m.fn.extend=function(){var a,b,c,d,e,f,g=arguments[0]||{},h=1,i=arguments.length,j=!1;for("boolean"==typeof g&&(j=g,g=arguments[h]||{},h++),"object"==typeof g||m.isFunction(g)||(g={}),h===i&&(g=this,h--);i>h;h++)if(null!=(e=arguments[h]))for(d in e)a=g[d],c=e[d],g!==c&&(j&&c&&(m.isPlainObject(c)||(b=m.isArray(c)))?(b?(b=!1,f=a&&m.isArray(a)?a:[]):f=a&&m.isPlainObject(a)?a:{},g[d]=m.extend(j,f,c)):void 0!==c&&(g[d]=c));return g},m.extend({expando:"jQuery"+(l+Math.random()).replace(/\D/g,""),isReady:!0,error:function(a){throw new Error(a)},noop:function(){},isFunction:function(a){return"function"===m.type(a)},isArray:Array.isArray||function(a){return"array"===m.type(a)},isWindow:function(a){return null!=a&&a==a.window},isNumeric:function(a){return!m.isArray(a)&&a-parseFloat(a)>=0},isEmptyObject:function(a){var b;for(b in a)return!1;return!0},isPlainObject:function(a){var b;if(!a||"object"!==m.type(a)||a.nodeType||m.isWindow(a))return!1;try{if(a.constructor&&!j.call(a,"constructor")&&!j.call(a.constructor.prototype,"isPrototypeOf"))return!1}catch(c){return!1}if(k.ownLast)for(b in a)return j.call(a,b);for(b in a);return void 0===b||j.call(a,b)},type:function(a){return null==a?a+"":"object"==typeof a||"function"==typeof a?h[i.call(a)]||"object":typeof a},globalEval:function(b){b&&m.trim(b)&&(a.execScript||function(b){a.eval.call(a,b)})(b)},camelCase:function(a){return a.replace(o,"ms-").replace(p,q)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toLowerCase()===b.toLowerCase()},each:function(a,b,c){var d,e=0,f=a.length,g=r(a);if(c){if(g){for(;f>e;e++)if(d=b.apply(a[e],c),d===!1)break}else for(e in a)if(d=b.apply(a[e],c),d===!1)break}else if(g){for(;f>e;e++)if(d=b.call(a[e],e,a[e]),d===!1)break}else for(e in a)if(d=b.call(a[e],e,a[e]),d===!1)break;return a},trim:function(a){return null==a?"":(a+"").replace(n,"")},makeArray:function(a,b){var c=b||[];return null!=a&&(r(Object(a))?m.merge(c,"string"==typeof a?[a]:a):f.call(c,a)),c},inArray:function(a,b,c){var d;if(b){if(g)return g.call(b,a,c);for(d=b.length,c=c?0>c?Math.max(0,d+c):c:0;d>c;c++)if(c in b&&b[c]===a)return c}return-1},merge:function(a,b){var c=+b.length,d=0,e=a.length;while(c>d)a[e++]=b[d++];if(c!==c)while(void 0!==b[d])a[e++]=b[d++];return a.length=e,a},grep:function(a,b,c){for(var d,e=[],f=0,g=a.length,h=!c;g>f;f++)d=!b(a[f],f),d!==h&&e.push(a[f]);return e},map:function(a,b,c){var d,f=0,g=a.length,h=r(a),i=[];if(h)for(;g>f;f++)d=b(a[f],f,c),null!=d&&i.push(d);else for(f in a)d=b(a[f],f,c),null!=d&&i.push(d);return e.apply([],i)},guid:1,proxy:function(a,b){var c,e,f;return"string"==typeof b&&(f=a[b],b=a,a=f),m.isFunction(a)?(c=d.call(arguments,2),e=function(){return a.apply(b||this,c.concat(d.call(arguments)))},e.guid=a.guid=a.guid||m.guid++,e):void 0},now:function(){return+new Date},support:k}),m.each("Boolean Number String Function Array Date RegExp Object Error".split(" "),function(a,b){h["[object "+b+"]"]=b.toLowerCase()});function r(a){var b=a.length,c=m.type(a);return"function"===c||m.isWindow(a)?!1:1===a.nodeType&&b?!0:"array"===c||0===b||"number"==typeof b&&b>0&&b-1 in a}var s=function(a){var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u="sizzle"+-new Date,v=a.document,w=0,x=0,y=gb(),z=gb(),A=gb(),B=function(a,b){return a===b&&(l=!0),0},C="undefined",D=1<<31,E={}.hasOwnProperty,F=[],G=F.pop,H=F.push,I=F.push,J=F.slice,K=F.indexOf||function(a){for(var b=0,c=this.length;c>b;b++)if(this[b]===a)return b;return-1},L="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",M="[\\x20\\t\\r\\n\\f]",N="(?:\\\\.|[\\w-]|[^\\x00-\\xa0])+",O=N.replace("w","w#"),P="\\["+M+"*("+N+")(?:"+M+"*([*^$|!~]?=)"+M+"*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|("+O+"))|)"+M+"*\\]",Q=":("+N+")(?:\\((('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|((?:\\\\.|[^\\\\()[\\]]|"+P+")*)|.*)\\)|)",R=new RegExp("^"+M+"+|((?:^|[^\\\\])(?:\\\\.)*)"+M+"+$","g"),S=new RegExp("^"+M+"*,"+M+"*"),T=new RegExp("^"+M+"*([>+~]|"+M+")"+M+"*"),U=new RegExp("="+M+"*([^\\]'\"]*?)"+M+"*\\]","g"),V=new RegExp(Q),W=new RegExp("^"+O+"$"),X={ID:new RegExp("^#("+N+")"),CLASS:new RegExp("^\\.("+N+")"),TAG:new RegExp("^("+N.replace("w","w*")+")"),ATTR:new RegExp("^"+P),PSEUDO:new RegExp("^"+Q),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new RegExp("^(?:"+L+")$","i"),needsContext:new RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},Y=/^(?:input|select|textarea|button)$/i,Z=/^h\d$/i,$=/^[^{]+\{\s*\[native \w/,_=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ab=/[+~]/,bb=/'|\\/g,cb=new RegExp("\\\\([\\da-f]{1,6}"+M+"?|("+M+")|.)","ig"),db=function(a,b,c){var d="0x"+b-65536;return d!==d||c?b:0>d?String.fromCharCode(d+65536):String.fromCharCode(d>>10|55296,1023&d|56320)};try{I.apply(F=J.call(v.childNodes),v.childNodes),F[v.childNodes.length].nodeType}catch(eb){I={apply:F.length?function(a,b){H.apply(a,J.call(b))}:function(a,b){var c=a.length,d=0;while(a[c++]=b[d++]);a.length=c-1}}}function fb(a,b,d,e){var f,h,j,k,l,o,r,s,w,x;if((b?b.ownerDocument||b:v)!==n&&m(b),b=b||n,d=d||[],!a||"string"!=typeof a)return d;if(1!==(k=b.nodeType)&&9!==k)return[];if(p&&!e){if(f=_.exec(a))if(j=f[1]){if(9===k){if(h=b.getElementById(j),!h||!h.parentNode)return d;if(h.id===j)return d.push(h),d}else if(b.ownerDocument&&(h=b.ownerDocument.getElementById(j))&&t(b,h)&&h.id===j)return d.push(h),d}else{if(f[2])return I.apply(d,b.getElementsByTagName(a)),d;if((j=f[3])&&c.getElementsByClassName&&b.getElementsByClassName)return I.apply(d,b.getElementsByClassName(j)),d}if(c.qsa&&(!q||!q.test(a))){if(s=r=u,w=b,x=9===k&&a,1===k&&"object"!==b.nodeName.toLowerCase()){o=g(a),(r=b.getAttribute("id"))?s=r.replace(bb,"\\$&"):b.setAttribute("id",s),s="[id='"+s+"'] ",l=o.length;while(l--)o[l]=s+qb(o[l]);w=ab.test(a)&&ob(b.parentNode)||b,x=o.join(",")}if(x)try{return I.apply(d,w.querySelectorAll(x)),d}catch(y){}finally{r||b.removeAttribute("id")}}}return i(a.replace(R,"$1"),b,d,e)}function gb(){var a=[];function b(c,e){return a.push(c+" ")>d.cacheLength&&delete b[a.shift()],b[c+" "]=e}return b}function hb(a){return a[u]=!0,a}function ib(a){var b=n.createElement("div");try{return!!a(b)}catch(c){return!1}finally{b.parentNode&&b.parentNode.removeChild(b),b=null}}function jb(a,b){var c=a.split("|"),e=a.length;while(e--)d.attrHandle[c[e]]=b}function kb(a,b){var c=b&&a,d=c&&1===a.nodeType&&1===b.nodeType&&(~b.sourceIndex||D)-(~a.sourceIndex||D);if(d)return d;if(c)while(c=c.nextSibling)if(c===b)return-1;return a?1:-1}function lb(a){return function(b){var c=b.nodeName.toLowerCase();return"input"===c&&b.type===a}}function mb(a){return function(b){var c=b.nodeName.toLowerCase();return("input"===c||"button"===c)&&b.type===a}}function nb(a){return hb(function(b){return b=+b,hb(function(c,d){var e,f=a([],c.length,b),g=f.length;while(g--)c[e=f[g]]&&(c[e]=!(d[e]=c[e]))})})}function ob(a){return a&&typeof a.getElementsByTagName!==C&&a}c=fb.support={},f=fb.isXML=function(a){var b=a&&(a.ownerDocument||a).documentElement;return b?"HTML"!==b.nodeName:!1},m=fb.setDocument=function(a){var b,e=a?a.ownerDocument||a:v,g=e.defaultView;return e!==n&&9===e.nodeType&&e.documentElement?(n=e,o=e.documentElement,p=!f(e),g&&g!==g.top&&(g.addEventListener?g.addEventListener("unload",function(){m()},!1):g.attachEvent&&g.attachEvent("onunload",function(){m()})),c.attributes=ib(function(a){return a.className="i",!a.getAttribute("className")}),c.getElementsByTagName=ib(function(a){return a.appendChild(e.createComment("")),!a.getElementsByTagName("*").length}),c.getElementsByClassName=$.test(e.getElementsByClassName)&&ib(function(a){return a.innerHTML="
    ",a.firstChild.className="i",2===a.getElementsByClassName("i").length}),c.getById=ib(function(a){return o.appendChild(a).id=u,!e.getElementsByName||!e.getElementsByName(u).length}),c.getById?(d.find.ID=function(a,b){if(typeof b.getElementById!==C&&p){var c=b.getElementById(a);return c&&c.parentNode?[c]:[]}},d.filter.ID=function(a){var b=a.replace(cb,db);return function(a){return a.getAttribute("id")===b}}):(delete d.find.ID,d.filter.ID=function(a){var b=a.replace(cb,db);return function(a){var c=typeof a.getAttributeNode!==C&&a.getAttributeNode("id");return c&&c.value===b}}),d.find.TAG=c.getElementsByTagName?function(a,b){return typeof b.getElementsByTagName!==C?b.getElementsByTagName(a):void 0}:function(a,b){var c,d=[],e=0,f=b.getElementsByTagName(a);if("*"===a){while(c=f[e++])1===c.nodeType&&d.push(c);return d}return f},d.find.CLASS=c.getElementsByClassName&&function(a,b){return typeof b.getElementsByClassName!==C&&p?b.getElementsByClassName(a):void 0},r=[],q=[],(c.qsa=$.test(e.querySelectorAll))&&(ib(function(a){a.innerHTML="",a.querySelectorAll("[msallowclip^='']").length&&q.push("[*^$]="+M+"*(?:''|\"\")"),a.querySelectorAll("[selected]").length||q.push("\\["+M+"*(?:value|"+L+")"),a.querySelectorAll(":checked").length||q.push(":checked")}),ib(function(a){var b=e.createElement("input");b.setAttribute("type","hidden"),a.appendChild(b).setAttribute("name","D"),a.querySelectorAll("[name=d]").length&&q.push("name"+M+"*[*^$|!~]?="),a.querySelectorAll(":enabled").length||q.push(":enabled",":disabled"),a.querySelectorAll("*,:x"),q.push(",.*:")})),(c.matchesSelector=$.test(s=o.matches||o.webkitMatchesSelector||o.mozMatchesSelector||o.oMatchesSelector||o.msMatchesSelector))&&ib(function(a){c.disconnectedMatch=s.call(a,"div"),s.call(a,"[s!='']:x"),r.push("!=",Q)}),q=q.length&&new RegExp(q.join("|")),r=r.length&&new RegExp(r.join("|")),b=$.test(o.compareDocumentPosition),t=b||$.test(o.contains)?function(a,b){var c=9===a.nodeType?a.documentElement:a,d=b&&b.parentNode;return a===d||!(!d||1!==d.nodeType||!(c.contains?c.contains(d):a.compareDocumentPosition&&16&a.compareDocumentPosition(d)))}:function(a,b){if(b)while(b=b.parentNode)if(b===a)return!0;return!1},B=b?function(a,b){if(a===b)return l=!0,0;var d=!a.compareDocumentPosition-!b.compareDocumentPosition;return d?d:(d=(a.ownerDocument||a)===(b.ownerDocument||b)?a.compareDocumentPosition(b):1,1&d||!c.sortDetached&&b.compareDocumentPosition(a)===d?a===e||a.ownerDocument===v&&t(v,a)?-1:b===e||b.ownerDocument===v&&t(v,b)?1:k?K.call(k,a)-K.call(k,b):0:4&d?-1:1)}:function(a,b){if(a===b)return l=!0,0;var c,d=0,f=a.parentNode,g=b.parentNode,h=[a],i=[b];if(!f||!g)return a===e?-1:b===e?1:f?-1:g?1:k?K.call(k,a)-K.call(k,b):0;if(f===g)return kb(a,b);c=a;while(c=c.parentNode)h.unshift(c);c=b;while(c=c.parentNode)i.unshift(c);while(h[d]===i[d])d++;return d?kb(h[d],i[d]):h[d]===v?-1:i[d]===v?1:0},e):n},fb.matches=function(a,b){return fb(a,null,null,b)},fb.matchesSelector=function(a,b){if((a.ownerDocument||a)!==n&&m(a),b=b.replace(U,"='$1']"),!(!c.matchesSelector||!p||r&&r.test(b)||q&&q.test(b)))try{var d=s.call(a,b);if(d||c.disconnectedMatch||a.document&&11!==a.document.nodeType)return d}catch(e){}return fb(b,n,null,[a]).length>0},fb.contains=function(a,b){return(a.ownerDocument||a)!==n&&m(a),t(a,b)},fb.attr=function(a,b){(a.ownerDocument||a)!==n&&m(a);var e=d.attrHandle[b.toLowerCase()],f=e&&E.call(d.attrHandle,b.toLowerCase())?e(a,b,!p):void 0;return void 0!==f?f:c.attributes||!p?a.getAttribute(b):(f=a.getAttributeNode(b))&&f.specified?f.value:null},fb.error=function(a){throw new Error("Syntax error, unrecognized expression: "+a)},fb.uniqueSort=function(a){var b,d=[],e=0,f=0;if(l=!c.detectDuplicates,k=!c.sortStable&&a.slice(0),a.sort(B),l){while(b=a[f++])b===a[f]&&(e=d.push(f));while(e--)a.splice(d[e],1)}return k=null,a},e=fb.getText=function(a){var b,c="",d=0,f=a.nodeType;if(f){if(1===f||9===f||11===f){if("string"==typeof a.textContent)return a.textContent;for(a=a.firstChild;a;a=a.nextSibling)c+=e(a)}else if(3===f||4===f)return a.nodeValue}else while(b=a[d++])c+=e(b);return c},d=fb.selectors={cacheLength:50,createPseudo:hb,match:X,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(a){return a[1]=a[1].replace(cb,db),a[3]=(a[3]||a[4]||a[5]||"").replace(cb,db),"~="===a[2]&&(a[3]=" "+a[3]+" "),a.slice(0,4)},CHILD:function(a){return a[1]=a[1].toLowerCase(),"nth"===a[1].slice(0,3)?(a[3]||fb.error(a[0]),a[4]=+(a[4]?a[5]+(a[6]||1):2*("even"===a[3]||"odd"===a[3])),a[5]=+(a[7]+a[8]||"odd"===a[3])):a[3]&&fb.error(a[0]),a},PSEUDO:function(a){var b,c=!a[6]&&a[2];return X.CHILD.test(a[0])?null:(a[3]?a[2]=a[4]||a[5]||"":c&&V.test(c)&&(b=g(c,!0))&&(b=c.indexOf(")",c.length-b)-c.length)&&(a[0]=a[0].slice(0,b),a[2]=c.slice(0,b)),a.slice(0,3))}},filter:{TAG:function(a){var b=a.replace(cb,db).toLowerCase();return"*"===a?function(){return!0}:function(a){return a.nodeName&&a.nodeName.toLowerCase()===b}},CLASS:function(a){var b=y[a+" "];return b||(b=new RegExp("(^|"+M+")"+a+"("+M+"|$)"))&&y(a,function(a){return b.test("string"==typeof a.className&&a.className||typeof a.getAttribute!==C&&a.getAttribute("class")||"")})},ATTR:function(a,b,c){return function(d){var e=fb.attr(d,a);return null==e?"!="===b:b?(e+="","="===b?e===c:"!="===b?e!==c:"^="===b?c&&0===e.indexOf(c):"*="===b?c&&e.indexOf(c)>-1:"$="===b?c&&e.slice(-c.length)===c:"~="===b?(" "+e+" ").indexOf(c)>-1:"|="===b?e===c||e.slice(0,c.length+1)===c+"-":!1):!0}},CHILD:function(a,b,c,d,e){var f="nth"!==a.slice(0,3),g="last"!==a.slice(-4),h="of-type"===b;return 1===d&&0===e?function(a){return!!a.parentNode}:function(b,c,i){var j,k,l,m,n,o,p=f!==g?"nextSibling":"previousSibling",q=b.parentNode,r=h&&b.nodeName.toLowerCase(),s=!i&&!h;if(q){if(f){while(p){l=b;while(l=l[p])if(h?l.nodeName.toLowerCase()===r:1===l.nodeType)return!1;o=p="only"===a&&!o&&"nextSibling"}return!0}if(o=[g?q.firstChild:q.lastChild],g&&s){k=q[u]||(q[u]={}),j=k[a]||[],n=j[0]===w&&j[1],m=j[0]===w&&j[2],l=n&&q.childNodes[n];while(l=++n&&l&&l[p]||(m=n=0)||o.pop())if(1===l.nodeType&&++m&&l===b){k[a]=[w,n,m];break}}else if(s&&(j=(b[u]||(b[u]={}))[a])&&j[0]===w)m=j[1];else while(l=++n&&l&&l[p]||(m=n=0)||o.pop())if((h?l.nodeName.toLowerCase()===r:1===l.nodeType)&&++m&&(s&&((l[u]||(l[u]={}))[a]=[w,m]),l===b))break;return m-=e,m===d||m%d===0&&m/d>=0}}},PSEUDO:function(a,b){var c,e=d.pseudos[a]||d.setFilters[a.toLowerCase()]||fb.error("unsupported pseudo: "+a);return e[u]?e(b):e.length>1?(c=[a,a,"",b],d.setFilters.hasOwnProperty(a.toLowerCase())?hb(function(a,c){var d,f=e(a,b),g=f.length;while(g--)d=K.call(a,f[g]),a[d]=!(c[d]=f[g])}):function(a){return e(a,0,c)}):e}},pseudos:{not:hb(function(a){var b=[],c=[],d=h(a.replace(R,"$1"));return d[u]?hb(function(a,b,c,e){var f,g=d(a,null,e,[]),h=a.length;while(h--)(f=g[h])&&(a[h]=!(b[h]=f))}):function(a,e,f){return b[0]=a,d(b,null,f,c),!c.pop()}}),has:hb(function(a){return function(b){return fb(a,b).length>0}}),contains:hb(function(a){return function(b){return(b.textContent||b.innerText||e(b)).indexOf(a)>-1}}),lang:hb(function(a){return W.test(a||"")||fb.error("unsupported lang: "+a),a=a.replace(cb,db).toLowerCase(),function(b){var c;do if(c=p?b.lang:b.getAttribute("xml:lang")||b.getAttribute("lang"))return c=c.toLowerCase(),c===a||0===c.indexOf(a+"-");while((b=b.parentNode)&&1===b.nodeType);return!1}}),target:function(b){var c=a.location&&a.location.hash;return c&&c.slice(1)===b.id},root:function(a){return a===o},focus:function(a){return a===n.activeElement&&(!n.hasFocus||n.hasFocus())&&!!(a.type||a.href||~a.tabIndex)},enabled:function(a){return a.disabled===!1},disabled:function(a){return a.disabled===!0},checked:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&!!a.checked||"option"===b&&!!a.selected},selected:function(a){return a.parentNode&&a.parentNode.selectedIndex,a.selected===!0},empty:function(a){for(a=a.firstChild;a;a=a.nextSibling)if(a.nodeType<6)return!1;return!0},parent:function(a){return!d.pseudos.empty(a)},header:function(a){return Z.test(a.nodeName)},input:function(a){return Y.test(a.nodeName)},button:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&"button"===a.type||"button"===b},text:function(a){var b;return"input"===a.nodeName.toLowerCase()&&"text"===a.type&&(null==(b=a.getAttribute("type"))||"text"===b.toLowerCase())},first:nb(function(){return[0]}),last:nb(function(a,b){return[b-1]}),eq:nb(function(a,b,c){return[0>c?c+b:c]}),even:nb(function(a,b){for(var c=0;b>c;c+=2)a.push(c);return a}),odd:nb(function(a,b){for(var c=1;b>c;c+=2)a.push(c);return a}),lt:nb(function(a,b,c){for(var d=0>c?c+b:c;--d>=0;)a.push(d);return a}),gt:nb(function(a,b,c){for(var d=0>c?c+b:c;++db;b++)d+=a[b].value;return d}function rb(a,b,c){var d=b.dir,e=c&&"parentNode"===d,f=x++;return b.first?function(b,c,f){while(b=b[d])if(1===b.nodeType||e)return a(b,c,f)}:function(b,c,g){var h,i,j=[w,f];if(g){while(b=b[d])if((1===b.nodeType||e)&&a(b,c,g))return!0}else while(b=b[d])if(1===b.nodeType||e){if(i=b[u]||(b[u]={}),(h=i[d])&&h[0]===w&&h[1]===f)return j[2]=h[2];if(i[d]=j,j[2]=a(b,c,g))return!0}}}function sb(a){return a.length>1?function(b,c,d){var e=a.length;while(e--)if(!a[e](b,c,d))return!1;return!0}:a[0]}function tb(a,b,c){for(var d=0,e=b.length;e>d;d++)fb(a,b[d],c);return c}function ub(a,b,c,d,e){for(var f,g=[],h=0,i=a.length,j=null!=b;i>h;h++)(f=a[h])&&(!c||c(f,d,e))&&(g.push(f),j&&b.push(h));return g}function vb(a,b,c,d,e,f){return d&&!d[u]&&(d=vb(d)),e&&!e[u]&&(e=vb(e,f)),hb(function(f,g,h,i){var j,k,l,m=[],n=[],o=g.length,p=f||tb(b||"*",h.nodeType?[h]:h,[]),q=!a||!f&&b?p:ub(p,m,a,h,i),r=c?e||(f?a:o||d)?[]:g:q;if(c&&c(q,r,h,i),d){j=ub(r,n),d(j,[],h,i),k=j.length;while(k--)(l=j[k])&&(r[n[k]]=!(q[n[k]]=l))}if(f){if(e||a){if(e){j=[],k=r.length;while(k--)(l=r[k])&&j.push(q[k]=l);e(null,r=[],j,i)}k=r.length;while(k--)(l=r[k])&&(j=e?K.call(f,l):m[k])>-1&&(f[j]=!(g[j]=l))}}else r=ub(r===g?r.splice(o,r.length):r),e?e(null,g,r,i):I.apply(g,r)})}function wb(a){for(var b,c,e,f=a.length,g=d.relative[a[0].type],h=g||d.relative[" "],i=g?1:0,k=rb(function(a){return a===b},h,!0),l=rb(function(a){return K.call(b,a)>-1},h,!0),m=[function(a,c,d){return!g&&(d||c!==j)||((b=c).nodeType?k(a,c,d):l(a,c,d))}];f>i;i++)if(c=d.relative[a[i].type])m=[rb(sb(m),c)];else{if(c=d.filter[a[i].type].apply(null,a[i].matches),c[u]){for(e=++i;f>e;e++)if(d.relative[a[e].type])break;return vb(i>1&&sb(m),i>1&&qb(a.slice(0,i-1).concat({value:" "===a[i-2].type?"*":""})).replace(R,"$1"),c,e>i&&wb(a.slice(i,e)),f>e&&wb(a=a.slice(e)),f>e&&qb(a))}m.push(c)}return sb(m)}function xb(a,b){var c=b.length>0,e=a.length>0,f=function(f,g,h,i,k){var l,m,o,p=0,q="0",r=f&&[],s=[],t=j,u=f||e&&d.find.TAG("*",k),v=w+=null==t?1:Math.random()||.1,x=u.length;for(k&&(j=g!==n&&g);q!==x&&null!=(l=u[q]);q++){if(e&&l){m=0;while(o=a[m++])if(o(l,g,h)){i.push(l);break}k&&(w=v)}c&&((l=!o&&l)&&p--,f&&r.push(l))}if(p+=q,c&&q!==p){m=0;while(o=b[m++])o(r,s,g,h);if(f){if(p>0)while(q--)r[q]||s[q]||(s[q]=G.call(i));s=ub(s)}I.apply(i,s),k&&!f&&s.length>0&&p+b.length>1&&fb.uniqueSort(i)}return k&&(w=v,j=t),r};return c?hb(f):f}return h=fb.compile=function(a,b){var c,d=[],e=[],f=A[a+" "];if(!f){b||(b=g(a)),c=b.length;while(c--)f=wb(b[c]),f[u]?d.push(f):e.push(f);f=A(a,xb(e,d)),f.selector=a}return f},i=fb.select=function(a,b,e,f){var i,j,k,l,m,n="function"==typeof a&&a,o=!f&&g(a=n.selector||a);if(e=e||[],1===o.length){if(j=o[0]=o[0].slice(0),j.length>2&&"ID"===(k=j[0]).type&&c.getById&&9===b.nodeType&&p&&d.relative[j[1].type]){if(b=(d.find.ID(k.matches[0].replace(cb,db),b)||[])[0],!b)return e;n&&(b=b.parentNode),a=a.slice(j.shift().value.length)}i=X.needsContext.test(a)?0:j.length;while(i--){if(k=j[i],d.relative[l=k.type])break;if((m=d.find[l])&&(f=m(k.matches[0].replace(cb,db),ab.test(j[0].type)&&ob(b.parentNode)||b))){if(j.splice(i,1),a=f.length&&qb(j),!a)return I.apply(e,f),e;break}}}return(n||h(a,o))(f,b,!p,e,ab.test(a)&&ob(b.parentNode)||b),e},c.sortStable=u.split("").sort(B).join("")===u,c.detectDuplicates=!!l,m(),c.sortDetached=ib(function(a){return 1&a.compareDocumentPosition(n.createElement("div"))}),ib(function(a){return a.innerHTML="","#"===a.firstChild.getAttribute("href")})||jb("type|href|height|width",function(a,b,c){return c?void 0:a.getAttribute(b,"type"===b.toLowerCase()?1:2)}),c.attributes&&ib(function(a){return a.innerHTML="",a.firstChild.setAttribute("value",""),""===a.firstChild.getAttribute("value")})||jb("value",function(a,b,c){return c||"input"!==a.nodeName.toLowerCase()?void 0:a.defaultValue}),ib(function(a){return null==a.getAttribute("disabled")})||jb(L,function(a,b,c){var d;return c?void 0:a[b]===!0?b.toLowerCase():(d=a.getAttributeNode(b))&&d.specified?d.value:null}),fb}(a);m.find=s,m.expr=s.selectors,m.expr[":"]=m.expr.pseudos,m.unique=s.uniqueSort,m.text=s.getText,m.isXMLDoc=s.isXML,m.contains=s.contains;var t=m.expr.match.needsContext,u=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,v=/^.[^:#\[\.,]*$/;function w(a,b,c){if(m.isFunction(b))return m.grep(a,function(a,d){return!!b.call(a,d,a)!==c});if(b.nodeType)return m.grep(a,function(a){return a===b!==c});if("string"==typeof b){if(v.test(b))return m.filter(b,a,c);b=m.filter(b,a)}return m.grep(a,function(a){return m.inArray(a,b)>=0!==c})}m.filter=function(a,b,c){var d=b[0];return c&&(a=":not("+a+")"),1===b.length&&1===d.nodeType?m.find.matchesSelector(d,a)?[d]:[]:m.find.matches(a,m.grep(b,function(a){return 1===a.nodeType}))},m.fn.extend({find:function(a){var b,c=[],d=this,e=d.length;if("string"!=typeof a)return this.pushStack(m(a).filter(function(){for(b=0;e>b;b++)if(m.contains(d[b],this))return!0}));for(b=0;e>b;b++)m.find(a,d[b],c);return c=this.pushStack(e>1?m.unique(c):c),c.selector=this.selector?this.selector+" "+a:a,c},filter:function(a){return this.pushStack(w(this,a||[],!1))},not:function(a){return this.pushStack(w(this,a||[],!0))},is:function(a){return!!w(this,"string"==typeof a&&t.test(a)?m(a):a||[],!1).length}});var x,y=a.document,z=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/,A=m.fn.init=function(a,b){var c,d;if(!a)return this;if("string"==typeof a){if(c="<"===a.charAt(0)&&">"===a.charAt(a.length-1)&&a.length>=3?[null,a,null]:z.exec(a),!c||!c[1]&&b)return!b||b.jquery?(b||x).find(a):this.constructor(b).find(a);if(c[1]){if(b=b instanceof m?b[0]:b,m.merge(this,m.parseHTML(c[1],b&&b.nodeType?b.ownerDocument||b:y,!0)),u.test(c[1])&&m.isPlainObject(b))for(c in b)m.isFunction(this[c])?this[c](b[c]):this.attr(c,b[c]);return this}if(d=y.getElementById(c[2]),d&&d.parentNode){if(d.id!==c[2])return x.find(a);this.length=1,this[0]=d}return this.context=y,this.selector=a,this}return a.nodeType?(this.context=this[0]=a,this.length=1,this):m.isFunction(a)?"undefined"!=typeof x.ready?x.ready(a):a(m):(void 0!==a.selector&&(this.selector=a.selector,this.context=a.context),m.makeArray(a,this))};A.prototype=m.fn,x=m(y);var B=/^(?:parents|prev(?:Until|All))/,C={children:!0,contents:!0,next:!0,prev:!0};m.extend({dir:function(a,b,c){var d=[],e=a[b];while(e&&9!==e.nodeType&&(void 0===c||1!==e.nodeType||!m(e).is(c)))1===e.nodeType&&d.push(e),e=e[b];return d},sibling:function(a,b){for(var c=[];a;a=a.nextSibling)1===a.nodeType&&a!==b&&c.push(a);return c}}),m.fn.extend({has:function(a){var b,c=m(a,this),d=c.length;return this.filter(function(){for(b=0;d>b;b++)if(m.contains(this,c[b]))return!0})},closest:function(a,b){for(var c,d=0,e=this.length,f=[],g=t.test(a)||"string"!=typeof a?m(a,b||this.context):0;e>d;d++)for(c=this[d];c&&c!==b;c=c.parentNode)if(c.nodeType<11&&(g?g.index(c)>-1:1===c.nodeType&&m.find.matchesSelector(c,a))){f.push(c);break}return this.pushStack(f.length>1?m.unique(f):f)},index:function(a){return a?"string"==typeof a?m.inArray(this[0],m(a)):m.inArray(a.jquery?a[0]:a,this):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(a,b){return this.pushStack(m.unique(m.merge(this.get(),m(a,b))))},addBack:function(a){return this.add(null==a?this.prevObject:this.prevObject.filter(a))}});function D(a,b){do a=a[b];while(a&&1!==a.nodeType);return a}m.each({parent:function(a){var b=a.parentNode;return b&&11!==b.nodeType?b:null},parents:function(a){return m.dir(a,"parentNode")},parentsUntil:function(a,b,c){return m.dir(a,"parentNode",c)},next:function(a){return D(a,"nextSibling")},prev:function(a){return D(a,"previousSibling")},nextAll:function(a){return m.dir(a,"nextSibling")},prevAll:function(a){return m.dir(a,"previousSibling")},nextUntil:function(a,b,c){return m.dir(a,"nextSibling",c)},prevUntil:function(a,b,c){return m.dir(a,"previousSibling",c)},siblings:function(a){return m.sibling((a.parentNode||{}).firstChild,a)},children:function(a){return m.sibling(a.firstChild)},contents:function(a){return m.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:m.merge([],a.childNodes)}},function(a,b){m.fn[a]=function(c,d){var e=m.map(this,b,c);return"Until"!==a.slice(-5)&&(d=c),d&&"string"==typeof d&&(e=m.filter(d,e)),this.length>1&&(C[a]||(e=m.unique(e)),B.test(a)&&(e=e.reverse())),this.pushStack(e)}});var E=/\S+/g,F={};function G(a){var b=F[a]={};return m.each(a.match(E)||[],function(a,c){b[c]=!0}),b}m.Callbacks=function(a){a="string"==typeof a?F[a]||G(a):m.extend({},a);var b,c,d,e,f,g,h=[],i=!a.once&&[],j=function(l){for(c=a.memory&&l,d=!0,f=g||0,g=0,e=h.length,b=!0;h&&e>f;f++)if(h[f].apply(l[0],l[1])===!1&&a.stopOnFalse){c=!1;break}b=!1,h&&(i?i.length&&j(i.shift()):c?h=[]:k.disable())},k={add:function(){if(h){var d=h.length;!function f(b){m.each(b,function(b,c){var d=m.type(c);"function"===d?a.unique&&k.has(c)||h.push(c):c&&c.length&&"string"!==d&&f(c)})}(arguments),b?e=h.length:c&&(g=d,j(c))}return this},remove:function(){return h&&m.each(arguments,function(a,c){var d;while((d=m.inArray(c,h,d))>-1)h.splice(d,1),b&&(e>=d&&e--,f>=d&&f--)}),this},has:function(a){return a?m.inArray(a,h)>-1:!(!h||!h.length)},empty:function(){return h=[],e=0,this},disable:function(){return h=i=c=void 0,this},disabled:function(){return!h},lock:function(){return i=void 0,c||k.disable(),this},locked:function(){return!i},fireWith:function(a,c){return!h||d&&!i||(c=c||[],c=[a,c.slice?c.slice():c],b?i.push(c):j(c)),this},fire:function(){return k.fireWith(this,arguments),this},fired:function(){return!!d}};return k},m.extend({Deferred:function(a){var b=[["resolve","done",m.Callbacks("once memory"),"resolved"],["reject","fail",m.Callbacks("once memory"),"rejected"],["notify","progress",m.Callbacks("memory")]],c="pending",d={state:function(){return c},always:function(){return e.done(arguments).fail(arguments),this},then:function(){var a=arguments;return m.Deferred(function(c){m.each(b,function(b,f){var g=m.isFunction(a[b])&&a[b];e[f[1]](function(){var a=g&&g.apply(this,arguments);a&&m.isFunction(a.promise)?a.promise().done(c.resolve).fail(c.reject).progress(c.notify):c[f[0]+"With"](this===d?c.promise():this,g?[a]:arguments)})}),a=null}).promise()},promise:function(a){return null!=a?m.extend(a,d):d}},e={};return d.pipe=d.then,m.each(b,function(a,f){var g=f[2],h=f[3];d[f[1]]=g.add,h&&g.add(function(){c=h},b[1^a][2].disable,b[2][2].lock),e[f[0]]=function(){return e[f[0]+"With"](this===e?d:this,arguments),this},e[f[0]+"With"]=g.fireWith}),d.promise(e),a&&a.call(e,e),e},when:function(a){var b=0,c=d.call(arguments),e=c.length,f=1!==e||a&&m.isFunction(a.promise)?e:0,g=1===f?a:m.Deferred(),h=function(a,b,c){return function(e){b[a]=this,c[a]=arguments.length>1?d.call(arguments):e,c===i?g.notifyWith(b,c):--f||g.resolveWith(b,c)}},i,j,k;if(e>1)for(i=new Array(e),j=new Array(e),k=new Array(e);e>b;b++)c[b]&&m.isFunction(c[b].promise)?c[b].promise().done(h(b,k,c)).fail(g.reject).progress(h(b,j,i)):--f;return f||g.resolveWith(k,c),g.promise()}});var H;m.fn.ready=function(a){return m.ready.promise().done(a),this},m.extend({isReady:!1,readyWait:1,holdReady:function(a){a?m.readyWait++:m.ready(!0)},ready:function(a){if(a===!0?!--m.readyWait:!m.isReady){if(!y.body)return setTimeout(m.ready);m.isReady=!0,a!==!0&&--m.readyWait>0||(H.resolveWith(y,[m]),m.fn.triggerHandler&&(m(y).triggerHandler("ready"),m(y).off("ready")))}}});function I(){y.addEventListener?(y.removeEventListener("DOMContentLoaded",J,!1),a.removeEventListener("load",J,!1)):(y.detachEvent("onreadystatechange",J),a.detachEvent("onload",J))}function J(){(y.addEventListener||"load"===event.type||"complete"===y.readyState)&&(I(),m.ready())}m.ready.promise=function(b){if(!H)if(H=m.Deferred(),"complete"===y.readyState)setTimeout(m.ready);else if(y.addEventListener)y.addEventListener("DOMContentLoaded",J,!1),a.addEventListener("load",J,!1);else{y.attachEvent("onreadystatechange",J),a.attachEvent("onload",J);var c=!1;try{c=null==a.frameElement&&y.documentElement}catch(d){}c&&c.doScroll&&!function e(){if(!m.isReady){try{c.doScroll("left")}catch(a){return setTimeout(e,50)}I(),m.ready()}}()}return H.promise(b)};var K="undefined",L;for(L in m(k))break;k.ownLast="0"!==L,k.inlineBlockNeedsLayout=!1,m(function(){var a,b,c,d;c=y.getElementsByTagName("body")[0],c&&c.style&&(b=y.createElement("div"),d=y.createElement("div"),d.style.cssText="position:absolute;border:0;width:0;height:0;top:0;left:-9999px",c.appendChild(d).appendChild(b),typeof b.style.zoom!==K&&(b.style.cssText="display:inline;margin:0;border:0;padding:1px;width:1px;zoom:1",k.inlineBlockNeedsLayout=a=3===b.offsetWidth,a&&(c.style.zoom=1)),c.removeChild(d))}),function(){var a=y.createElement("div");if(null==k.deleteExpando){k.deleteExpando=!0;try{delete a.test}catch(b){k.deleteExpando=!1}}a=null}(),m.acceptData=function(a){var b=m.noData[(a.nodeName+" ").toLowerCase()],c=+a.nodeType||1;return 1!==c&&9!==c?!1:!b||b!==!0&&a.getAttribute("classid")===b};var M=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,N=/([A-Z])/g;function O(a,b,c){if(void 0===c&&1===a.nodeType){var d="data-"+b.replace(N,"-$1").toLowerCase();if(c=a.getAttribute(d),"string"==typeof c){try{c="true"===c?!0:"false"===c?!1:"null"===c?null:+c+""===c?+c:M.test(c)?m.parseJSON(c):c}catch(e){}m.data(a,b,c)}else c=void 0}return c}function P(a){var b;for(b in a)if(("data"!==b||!m.isEmptyObject(a[b]))&&"toJSON"!==b)return!1;return!0}function Q(a,b,d,e){if(m.acceptData(a)){var f,g,h=m.expando,i=a.nodeType,j=i?m.cache:a,k=i?a[h]:a[h]&&h; +if(k&&j[k]&&(e||j[k].data)||void 0!==d||"string"!=typeof b)return k||(k=i?a[h]=c.pop()||m.guid++:h),j[k]||(j[k]=i?{}:{toJSON:m.noop}),("object"==typeof b||"function"==typeof b)&&(e?j[k]=m.extend(j[k],b):j[k].data=m.extend(j[k].data,b)),g=j[k],e||(g.data||(g.data={}),g=g.data),void 0!==d&&(g[m.camelCase(b)]=d),"string"==typeof b?(f=g[b],null==f&&(f=g[m.camelCase(b)])):f=g,f}}function R(a,b,c){if(m.acceptData(a)){var d,e,f=a.nodeType,g=f?m.cache:a,h=f?a[m.expando]:m.expando;if(g[h]){if(b&&(d=c?g[h]:g[h].data)){m.isArray(b)?b=b.concat(m.map(b,m.camelCase)):b in d?b=[b]:(b=m.camelCase(b),b=b in d?[b]:b.split(" ")),e=b.length;while(e--)delete d[b[e]];if(c?!P(d):!m.isEmptyObject(d))return}(c||(delete g[h].data,P(g[h])))&&(f?m.cleanData([a],!0):k.deleteExpando||g!=g.window?delete g[h]:g[h]=null)}}}m.extend({cache:{},noData:{"applet ":!0,"embed ":!0,"object ":"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000"},hasData:function(a){return a=a.nodeType?m.cache[a[m.expando]]:a[m.expando],!!a&&!P(a)},data:function(a,b,c){return Q(a,b,c)},removeData:function(a,b){return R(a,b)},_data:function(a,b,c){return Q(a,b,c,!0)},_removeData:function(a,b){return R(a,b,!0)}}),m.fn.extend({data:function(a,b){var c,d,e,f=this[0],g=f&&f.attributes;if(void 0===a){if(this.length&&(e=m.data(f),1===f.nodeType&&!m._data(f,"parsedAttrs"))){c=g.length;while(c--)g[c]&&(d=g[c].name,0===d.indexOf("data-")&&(d=m.camelCase(d.slice(5)),O(f,d,e[d])));m._data(f,"parsedAttrs",!0)}return e}return"object"==typeof a?this.each(function(){m.data(this,a)}):arguments.length>1?this.each(function(){m.data(this,a,b)}):f?O(f,a,m.data(f,a)):void 0},removeData:function(a){return this.each(function(){m.removeData(this,a)})}}),m.extend({queue:function(a,b,c){var d;return a?(b=(b||"fx")+"queue",d=m._data(a,b),c&&(!d||m.isArray(c)?d=m._data(a,b,m.makeArray(c)):d.push(c)),d||[]):void 0},dequeue:function(a,b){b=b||"fx";var c=m.queue(a,b),d=c.length,e=c.shift(),f=m._queueHooks(a,b),g=function(){m.dequeue(a,b)};"inprogress"===e&&(e=c.shift(),d--),e&&("fx"===b&&c.unshift("inprogress"),delete f.stop,e.call(a,g,f)),!d&&f&&f.empty.fire()},_queueHooks:function(a,b){var c=b+"queueHooks";return m._data(a,c)||m._data(a,c,{empty:m.Callbacks("once memory").add(function(){m._removeData(a,b+"queue"),m._removeData(a,c)})})}}),m.fn.extend({queue:function(a,b){var c=2;return"string"!=typeof a&&(b=a,a="fx",c--),arguments.lengthh;h++)b(a[h],c,g?d:d.call(a[h],h,b(a[h],c)));return e?a:j?b.call(a):i?b(a[0],c):f},W=/^(?:checkbox|radio)$/i;!function(){var a=y.createElement("input"),b=y.createElement("div"),c=y.createDocumentFragment();if(b.innerHTML="
    a",k.leadingWhitespace=3===b.firstChild.nodeType,k.tbody=!b.getElementsByTagName("tbody").length,k.htmlSerialize=!!b.getElementsByTagName("link").length,k.html5Clone="<:nav>"!==y.createElement("nav").cloneNode(!0).outerHTML,a.type="checkbox",a.checked=!0,c.appendChild(a),k.appendChecked=a.checked,b.innerHTML="",k.noCloneChecked=!!b.cloneNode(!0).lastChild.defaultValue,c.appendChild(b),b.innerHTML="",k.checkClone=b.cloneNode(!0).cloneNode(!0).lastChild.checked,k.noCloneEvent=!0,b.attachEvent&&(b.attachEvent("onclick",function(){k.noCloneEvent=!1}),b.cloneNode(!0).click()),null==k.deleteExpando){k.deleteExpando=!0;try{delete b.test}catch(d){k.deleteExpando=!1}}}(),function(){var b,c,d=y.createElement("div");for(b in{submit:!0,change:!0,focusin:!0})c="on"+b,(k[b+"Bubbles"]=c in a)||(d.setAttribute(c,"t"),k[b+"Bubbles"]=d.attributes[c].expando===!1);d=null}();var X=/^(?:input|select|textarea)$/i,Y=/^key/,Z=/^(?:mouse|pointer|contextmenu)|click/,$=/^(?:focusinfocus|focusoutblur)$/,_=/^([^.]*)(?:\.(.+)|)$/;function ab(){return!0}function bb(){return!1}function cb(){try{return y.activeElement}catch(a){}}m.event={global:{},add:function(a,b,c,d,e){var f,g,h,i,j,k,l,n,o,p,q,r=m._data(a);if(r){c.handler&&(i=c,c=i.handler,e=i.selector),c.guid||(c.guid=m.guid++),(g=r.events)||(g=r.events={}),(k=r.handle)||(k=r.handle=function(a){return typeof m===K||a&&m.event.triggered===a.type?void 0:m.event.dispatch.apply(k.elem,arguments)},k.elem=a),b=(b||"").match(E)||[""],h=b.length;while(h--)f=_.exec(b[h])||[],o=q=f[1],p=(f[2]||"").split(".").sort(),o&&(j=m.event.special[o]||{},o=(e?j.delegateType:j.bindType)||o,j=m.event.special[o]||{},l=m.extend({type:o,origType:q,data:d,handler:c,guid:c.guid,selector:e,needsContext:e&&m.expr.match.needsContext.test(e),namespace:p.join(".")},i),(n=g[o])||(n=g[o]=[],n.delegateCount=0,j.setup&&j.setup.call(a,d,p,k)!==!1||(a.addEventListener?a.addEventListener(o,k,!1):a.attachEvent&&a.attachEvent("on"+o,k))),j.add&&(j.add.call(a,l),l.handler.guid||(l.handler.guid=c.guid)),e?n.splice(n.delegateCount++,0,l):n.push(l),m.event.global[o]=!0);a=null}},remove:function(a,b,c,d,e){var f,g,h,i,j,k,l,n,o,p,q,r=m.hasData(a)&&m._data(a);if(r&&(k=r.events)){b=(b||"").match(E)||[""],j=b.length;while(j--)if(h=_.exec(b[j])||[],o=q=h[1],p=(h[2]||"").split(".").sort(),o){l=m.event.special[o]||{},o=(d?l.delegateType:l.bindType)||o,n=k[o]||[],h=h[2]&&new RegExp("(^|\\.)"+p.join("\\.(?:.*\\.|)")+"(\\.|$)"),i=f=n.length;while(f--)g=n[f],!e&&q!==g.origType||c&&c.guid!==g.guid||h&&!h.test(g.namespace)||d&&d!==g.selector&&("**"!==d||!g.selector)||(n.splice(f,1),g.selector&&n.delegateCount--,l.remove&&l.remove.call(a,g));i&&!n.length&&(l.teardown&&l.teardown.call(a,p,r.handle)!==!1||m.removeEvent(a,o,r.handle),delete k[o])}else for(o in k)m.event.remove(a,o+b[j],c,d,!0);m.isEmptyObject(k)&&(delete r.handle,m._removeData(a,"events"))}},trigger:function(b,c,d,e){var f,g,h,i,k,l,n,o=[d||y],p=j.call(b,"type")?b.type:b,q=j.call(b,"namespace")?b.namespace.split("."):[];if(h=l=d=d||y,3!==d.nodeType&&8!==d.nodeType&&!$.test(p+m.event.triggered)&&(p.indexOf(".")>=0&&(q=p.split("."),p=q.shift(),q.sort()),g=p.indexOf(":")<0&&"on"+p,b=b[m.expando]?b:new m.Event(p,"object"==typeof b&&b),b.isTrigger=e?2:3,b.namespace=q.join("."),b.namespace_re=b.namespace?new RegExp("(^|\\.)"+q.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,b.result=void 0,b.target||(b.target=d),c=null==c?[b]:m.makeArray(c,[b]),k=m.event.special[p]||{},e||!k.trigger||k.trigger.apply(d,c)!==!1)){if(!e&&!k.noBubble&&!m.isWindow(d)){for(i=k.delegateType||p,$.test(i+p)||(h=h.parentNode);h;h=h.parentNode)o.push(h),l=h;l===(d.ownerDocument||y)&&o.push(l.defaultView||l.parentWindow||a)}n=0;while((h=o[n++])&&!b.isPropagationStopped())b.type=n>1?i:k.bindType||p,f=(m._data(h,"events")||{})[b.type]&&m._data(h,"handle"),f&&f.apply(h,c),f=g&&h[g],f&&f.apply&&m.acceptData(h)&&(b.result=f.apply(h,c),b.result===!1&&b.preventDefault());if(b.type=p,!e&&!b.isDefaultPrevented()&&(!k._default||k._default.apply(o.pop(),c)===!1)&&m.acceptData(d)&&g&&d[p]&&!m.isWindow(d)){l=d[g],l&&(d[g]=null),m.event.triggered=p;try{d[p]()}catch(r){}m.event.triggered=void 0,l&&(d[g]=l)}return b.result}},dispatch:function(a){a=m.event.fix(a);var b,c,e,f,g,h=[],i=d.call(arguments),j=(m._data(this,"events")||{})[a.type]||[],k=m.event.special[a.type]||{};if(i[0]=a,a.delegateTarget=this,!k.preDispatch||k.preDispatch.call(this,a)!==!1){h=m.event.handlers.call(this,a,j),b=0;while((f=h[b++])&&!a.isPropagationStopped()){a.currentTarget=f.elem,g=0;while((e=f.handlers[g++])&&!a.isImmediatePropagationStopped())(!a.namespace_re||a.namespace_re.test(e.namespace))&&(a.handleObj=e,a.data=e.data,c=((m.event.special[e.origType]||{}).handle||e.handler).apply(f.elem,i),void 0!==c&&(a.result=c)===!1&&(a.preventDefault(),a.stopPropagation()))}return k.postDispatch&&k.postDispatch.call(this,a),a.result}},handlers:function(a,b){var c,d,e,f,g=[],h=b.delegateCount,i=a.target;if(h&&i.nodeType&&(!a.button||"click"!==a.type))for(;i!=this;i=i.parentNode||this)if(1===i.nodeType&&(i.disabled!==!0||"click"!==a.type)){for(e=[],f=0;h>f;f++)d=b[f],c=d.selector+" ",void 0===e[c]&&(e[c]=d.needsContext?m(c,this).index(i)>=0:m.find(c,this,null,[i]).length),e[c]&&e.push(d);e.length&&g.push({elem:i,handlers:e})}return h]","i"),hb=/^\s+/,ib=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,jb=/<([\w:]+)/,kb=/\s*$/g,rb={option:[1,""],legend:[1,"
    ","
    "],area:[1,"",""],param:[1,"",""],thead:[1,"","
    "],tr:[2,"","
    "],col:[2,"","
    "],td:[3,"","
    "],_default:k.htmlSerialize?[0,"",""]:[1,"X
    ","
    "]},sb=db(y),tb=sb.appendChild(y.createElement("div"));rb.optgroup=rb.option,rb.tbody=rb.tfoot=rb.colgroup=rb.caption=rb.thead,rb.th=rb.td;function ub(a,b){var c,d,e=0,f=typeof a.getElementsByTagName!==K?a.getElementsByTagName(b||"*"):typeof a.querySelectorAll!==K?a.querySelectorAll(b||"*"):void 0;if(!f)for(f=[],c=a.childNodes||a;null!=(d=c[e]);e++)!b||m.nodeName(d,b)?f.push(d):m.merge(f,ub(d,b));return void 0===b||b&&m.nodeName(a,b)?m.merge([a],f):f}function vb(a){W.test(a.type)&&(a.defaultChecked=a.checked)}function wb(a,b){return m.nodeName(a,"table")&&m.nodeName(11!==b.nodeType?b:b.firstChild,"tr")?a.getElementsByTagName("tbody")[0]||a.appendChild(a.ownerDocument.createElement("tbody")):a}function xb(a){return a.type=(null!==m.find.attr(a,"type"))+"/"+a.type,a}function yb(a){var b=pb.exec(a.type);return b?a.type=b[1]:a.removeAttribute("type"),a}function zb(a,b){for(var c,d=0;null!=(c=a[d]);d++)m._data(c,"globalEval",!b||m._data(b[d],"globalEval"))}function Ab(a,b){if(1===b.nodeType&&m.hasData(a)){var c,d,e,f=m._data(a),g=m._data(b,f),h=f.events;if(h){delete g.handle,g.events={};for(c in h)for(d=0,e=h[c].length;e>d;d++)m.event.add(b,c,h[c][d])}g.data&&(g.data=m.extend({},g.data))}}function Bb(a,b){var c,d,e;if(1===b.nodeType){if(c=b.nodeName.toLowerCase(),!k.noCloneEvent&&b[m.expando]){e=m._data(b);for(d in e.events)m.removeEvent(b,d,e.handle);b.removeAttribute(m.expando)}"script"===c&&b.text!==a.text?(xb(b).text=a.text,yb(b)):"object"===c?(b.parentNode&&(b.outerHTML=a.outerHTML),k.html5Clone&&a.innerHTML&&!m.trim(b.innerHTML)&&(b.innerHTML=a.innerHTML)):"input"===c&&W.test(a.type)?(b.defaultChecked=b.checked=a.checked,b.value!==a.value&&(b.value=a.value)):"option"===c?b.defaultSelected=b.selected=a.defaultSelected:("input"===c||"textarea"===c)&&(b.defaultValue=a.defaultValue)}}m.extend({clone:function(a,b,c){var d,e,f,g,h,i=m.contains(a.ownerDocument,a);if(k.html5Clone||m.isXMLDoc(a)||!gb.test("<"+a.nodeName+">")?f=a.cloneNode(!0):(tb.innerHTML=a.outerHTML,tb.removeChild(f=tb.firstChild)),!(k.noCloneEvent&&k.noCloneChecked||1!==a.nodeType&&11!==a.nodeType||m.isXMLDoc(a)))for(d=ub(f),h=ub(a),g=0;null!=(e=h[g]);++g)d[g]&&Bb(e,d[g]);if(b)if(c)for(h=h||ub(a),d=d||ub(f),g=0;null!=(e=h[g]);g++)Ab(e,d[g]);else Ab(a,f);return d=ub(f,"script"),d.length>0&&zb(d,!i&&ub(a,"script")),d=h=e=null,f},buildFragment:function(a,b,c,d){for(var e,f,g,h,i,j,l,n=a.length,o=db(b),p=[],q=0;n>q;q++)if(f=a[q],f||0===f)if("object"===m.type(f))m.merge(p,f.nodeType?[f]:f);else if(lb.test(f)){h=h||o.appendChild(b.createElement("div")),i=(jb.exec(f)||["",""])[1].toLowerCase(),l=rb[i]||rb._default,h.innerHTML=l[1]+f.replace(ib,"<$1>")+l[2],e=l[0];while(e--)h=h.lastChild;if(!k.leadingWhitespace&&hb.test(f)&&p.push(b.createTextNode(hb.exec(f)[0])),!k.tbody){f="table"!==i||kb.test(f)?""!==l[1]||kb.test(f)?0:h:h.firstChild,e=f&&f.childNodes.length;while(e--)m.nodeName(j=f.childNodes[e],"tbody")&&!j.childNodes.length&&f.removeChild(j)}m.merge(p,h.childNodes),h.textContent="";while(h.firstChild)h.removeChild(h.firstChild);h=o.lastChild}else p.push(b.createTextNode(f));h&&o.removeChild(h),k.appendChecked||m.grep(ub(p,"input"),vb),q=0;while(f=p[q++])if((!d||-1===m.inArray(f,d))&&(g=m.contains(f.ownerDocument,f),h=ub(o.appendChild(f),"script"),g&&zb(h),c)){e=0;while(f=h[e++])ob.test(f.type||"")&&c.push(f)}return h=null,o},cleanData:function(a,b){for(var d,e,f,g,h=0,i=m.expando,j=m.cache,l=k.deleteExpando,n=m.event.special;null!=(d=a[h]);h++)if((b||m.acceptData(d))&&(f=d[i],g=f&&j[f])){if(g.events)for(e in g.events)n[e]?m.event.remove(d,e):m.removeEvent(d,e,g.handle);j[f]&&(delete j[f],l?delete d[i]:typeof d.removeAttribute!==K?d.removeAttribute(i):d[i]=null,c.push(f))}}}),m.fn.extend({text:function(a){return V(this,function(a){return void 0===a?m.text(this):this.empty().append((this[0]&&this[0].ownerDocument||y).createTextNode(a))},null,a,arguments.length)},append:function(){return this.domManip(arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=wb(this,a);b.appendChild(a)}})},prepend:function(){return this.domManip(arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=wb(this,a);b.insertBefore(a,b.firstChild)}})},before:function(){return this.domManip(arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this)})},after:function(){return this.domManip(arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this.nextSibling)})},remove:function(a,b){for(var c,d=a?m.filter(a,this):this,e=0;null!=(c=d[e]);e++)b||1!==c.nodeType||m.cleanData(ub(c)),c.parentNode&&(b&&m.contains(c.ownerDocument,c)&&zb(ub(c,"script")),c.parentNode.removeChild(c));return this},empty:function(){for(var a,b=0;null!=(a=this[b]);b++){1===a.nodeType&&m.cleanData(ub(a,!1));while(a.firstChild)a.removeChild(a.firstChild);a.options&&m.nodeName(a,"select")&&(a.options.length=0)}return this},clone:function(a,b){return a=null==a?!1:a,b=null==b?a:b,this.map(function(){return m.clone(this,a,b)})},html:function(a){return V(this,function(a){var b=this[0]||{},c=0,d=this.length;if(void 0===a)return 1===b.nodeType?b.innerHTML.replace(fb,""):void 0;if(!("string"!=typeof a||mb.test(a)||!k.htmlSerialize&&gb.test(a)||!k.leadingWhitespace&&hb.test(a)||rb[(jb.exec(a)||["",""])[1].toLowerCase()])){a=a.replace(ib,"<$1>");try{for(;d>c;c++)b=this[c]||{},1===b.nodeType&&(m.cleanData(ub(b,!1)),b.innerHTML=a);b=0}catch(e){}}b&&this.empty().append(a)},null,a,arguments.length)},replaceWith:function(){var a=arguments[0];return this.domManip(arguments,function(b){a=this.parentNode,m.cleanData(ub(this)),a&&a.replaceChild(b,this)}),a&&(a.length||a.nodeType)?this:this.remove()},detach:function(a){return this.remove(a,!0)},domManip:function(a,b){a=e.apply([],a);var c,d,f,g,h,i,j=0,l=this.length,n=this,o=l-1,p=a[0],q=m.isFunction(p);if(q||l>1&&"string"==typeof p&&!k.checkClone&&nb.test(p))return this.each(function(c){var d=n.eq(c);q&&(a[0]=p.call(this,c,d.html())),d.domManip(a,b)});if(l&&(i=m.buildFragment(a,this[0].ownerDocument,!1,this),c=i.firstChild,1===i.childNodes.length&&(i=c),c)){for(g=m.map(ub(i,"script"),xb),f=g.length;l>j;j++)d=i,j!==o&&(d=m.clone(d,!0,!0),f&&m.merge(g,ub(d,"script"))),b.call(this[j],d,j);if(f)for(h=g[g.length-1].ownerDocument,m.map(g,yb),j=0;f>j;j++)d=g[j],ob.test(d.type||"")&&!m._data(d,"globalEval")&&m.contains(h,d)&&(d.src?m._evalUrl&&m._evalUrl(d.src):m.globalEval((d.text||d.textContent||d.innerHTML||"").replace(qb,"")));i=c=null}return this}}),m.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){m.fn[a]=function(a){for(var c,d=0,e=[],g=m(a),h=g.length-1;h>=d;d++)c=d===h?this:this.clone(!0),m(g[d])[b](c),f.apply(e,c.get());return this.pushStack(e)}});var Cb,Db={};function Eb(b,c){var d,e=m(c.createElement(b)).appendTo(c.body),f=a.getDefaultComputedStyle&&(d=a.getDefaultComputedStyle(e[0]))?d.display:m.css(e[0],"display");return e.detach(),f}function Fb(a){var b=y,c=Db[a];return c||(c=Eb(a,b),"none"!==c&&c||(Cb=(Cb||m("